answer
stringlengths 17
10.2M
|
|---|
package cgeo.geocaching;
import cgeo.geocaching.activity.AbstractActionBarActivity;
import cgeo.geocaching.enumerations.LoadFlags;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.RxUtils;
import org.androidannotations.annotations.EActivity;
import org.androidannotations.annotations.Extra;
import org.androidannotations.annotations.OptionsItem;
import org.androidannotations.annotations.OptionsMenu;
import org.apache.commons.collections4.CollectionUtils;
import android.app.ProgressDialog;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.view.LayoutInflater;
import android.widget.ImageView;
import android.widget.LinearLayout;
import java.util.ArrayList;
import java.util.List;
@EActivity
@OptionsMenu(R.menu.static_maps_activity_options)
public class StaticMapsActivity extends AbstractActionBarActivity {
@Extra(Intents.EXTRA_DOWNLOAD) boolean download = false;
@Extra(Intents.EXTRA_WAYPOINT_ID) Integer waypointId = null;
@Extra(Intents.EXTRA_GEOCODE) String geocode = null;
private Geocache cache;
private final List<Bitmap> maps = new ArrayList<>();
private LayoutInflater inflater = null;
private ProgressDialog waitDialog = null;
private LinearLayout smapsView = null;
private final Handler loadMapsHandler = new Handler() {
@Override
public void handleMessage(final Message msg) {
if (waitDialog != null) {
waitDialog.dismiss();
}
try {
if (CollectionUtils.isEmpty(maps)) {
if (download) {
final boolean succeeded = downloadStaticMaps();
if (succeeded) {
startActivity(StaticMapsActivity.this.getIntent());
} else {
showToast(res.getString(R.string.err_detail_google_maps_limit_reached));
}
} else {
showToast(res.getString(R.string.err_detail_not_load_map_static));
}
finish();
} else {
showStaticMaps();
}
} catch (final Exception e) {
Log.e("StaticMapsActivity.loadMapsHandler", e);
}
}
};
/**
* Shows the static maps.
*/
private void showStaticMaps() {
if (inflater == null) {
inflater = getLayoutInflater();
}
if (smapsView == null) {
smapsView = (LinearLayout) findViewById(R.id.maps_list);
}
smapsView.removeAllViews();
for (final Bitmap image : maps) {
if (image != null) {
final ImageView map = (ImageView) inflater.inflate(R.layout.staticmaps_activity_item, smapsView, false);
map.setImageBitmap(image);
smapsView.addView(map);
}
}
}
@Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState, R.layout.staticmaps_activity);
cache = DataStore.loadCache(geocode, LoadFlags.LOAD_CACHE_OR_DB);
if (cache == null) {
showToast("Sorry, c:geo forgot for what cache you want to load static maps.");
finish();
return;
}
setCacheTitleBar(cache);
waitDialog = ProgressDialog.show(this, null, res.getString(R.string.map_static_loading), true);
waitDialog.setCancelable(true);
(new LoadMapsThread()).start();
}
private class LoadMapsThread extends Thread {
@Override
public void run() {
try {
// try downloading 2 times
for (int trials = 0; trials < 2; trials++) {
for (int level = 1; level <= StaticMapsProvider.MAPS_LEVEL_MAX; level++) {
try {
if (waypointId != null) {
final Bitmap image = StaticMapsProvider.getWaypointMap(geocode, cache.getWaypointById(waypointId), level);
if (image != null) {
maps.add(image);
}
} else {
final Bitmap image = StaticMapsProvider.getCacheMap(geocode, level);
if (image != null) {
maps.add(image);
}
}
} catch (final Exception e) {
Log.e("StaticMapsActivity.LoadMapsThread.run", e);
}
}
if (!maps.isEmpty()) {
break;
}
}
loadMapsHandler.sendMessage(Message.obtain());
} catch (final Exception e) {
Log.e("StaticMapsActivity.LoadMapsThread.run", e);
}
}
}
@OptionsItem(R.id.menu_refresh)
void refreshMaps() {
downloadStaticMaps();
restartActivity();
}
private boolean downloadStaticMaps() {
if (waypointId == null) {
showToast(res.getString(R.string.info_storing_static_maps));
RxUtils.waitForCompletion(StaticMapsProvider.storeCacheStaticMap(cache));
return cache.hasStaticMap();
}
final Waypoint waypoint = cache.getWaypointById(waypointId);
if (waypoint != null) {
showToast(res.getString(R.string.info_storing_static_maps));
// refresh always removes old waypoint files
StaticMapsProvider.removeWpStaticMaps(waypoint, geocode);
RxUtils.waitForCompletion(StaticMapsProvider.storeWaypointStaticMap(cache, waypoint));
return StaticMapsProvider.hasStaticMapForWaypoint(geocode, waypoint);
}
showToast(res.getString(R.string.err_detail_not_load_map_static));
return false;
}
}
|
package cgeo.geocaching.files;
import cgeo.geocaching.cgeoapplication;
import cgeo.geocaching.utils.CryptUtils;
import cgeo.geocaching.utils.Log;
import ch.boye.httpclientandroidlib.Header;
import ch.boye.httpclientandroidlib.HttpResponse;
import org.apache.commons.lang3.StringUtils;
import android.os.Environment;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* Handle local storage issues on phone and SD card.
*
*/
public class LocalStorage {
/** Name of the local private directory used to hold cached information */
public final static String cache = ".cgeo";
private static File internalStorageBase;
/**
* Return the primary storage cache root (external media if mounted, phone otherwise).
*
* @return the root of the cache directory
*/
public static File getStorage() {
return getStorageSpecific(false);
}
/**
* Return the secondary storage cache root (phone if external media is mounted, external media otherwise).
*
* @return the root of the cache directory
*/
public static File getStorageSec() {
return getStorageSpecific(true);
}
private static File getStorageSpecific(boolean secondary) {
return Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED) ^ secondary ?
getExternalStorageBase() :
new File(getInternalStorageBase(), LocalStorage.cache);
}
public static File getExternalDbDirectory() {
return getExternalStorageBase();
}
public static File getInternalDbDirectory() {
return new File(getInternalStorageBase(), "databases");
}
private static File getExternalStorageBase() {
return new File(Environment.getExternalStorageDirectory(), LocalStorage.cache);
}
private static File getInternalStorageBase() {
if (internalStorageBase == null) {
// A race condition will do no harm as the operation is idempotent. No need to synchronize.
internalStorageBase = cgeoapplication.getInstance().getApplicationContext().getFilesDir().getParentFile();
}
return internalStorageBase;
}
/**
* Get the guessed file extension of an URL. A file extension can contain up-to 4 characters in addition to the dot.
*
* @param url
* the relative or absolute URL
* @return the file extension, including the leading dot, or the empty string if none could be determined
*/
static String getExtension(final String url) {
final String urlExt = StringUtils.substringAfterLast(url, ".");
return urlExt.length() >= 1 && urlExt.length() <= 4 ? "." + urlExt : "";
}
/**
* Get the primary storage cache directory for a geocode. A null or empty geocode will be replaced by a default
* value.
*
* @param geocode
* the geocode
* @return the cache directory
*/
public static File getStorageDir(final String geocode) {
return storageDir(getStorage(), geocode);
}
/**
* Get the secondary storage cache directory for a geocode. A null or empty geocode will be replaced by a default
* value.
*
* @param geocode
* the geocode
* @return the cache directory
*/
private static File getStorageSecDir(final String geocode) {
return storageDir(getStorageSec(), geocode);
}
private static File storageDir(final File base, final String geocode) {
return new File(base, StringUtils.defaultIfEmpty(geocode, "_others"));
}
/**
* Get the primary file corresponding to a geocode and a file name or an url. If it is an url, an appropriate
* filename will be built by hashing it. The directory structure will be created if needed.
* A null or empty geocode will be replaced by a default value.
*
* @param geocode
* the geocode
* @param fileNameOrUrl
* the file name or url
* @param isUrl
* true if an url was given, false if a file name was given
* @return the file
*/
public static File getStorageFile(final String geocode, final String fileNameOrUrl, final boolean isUrl, final boolean createDirs) {
return buildFile(getStorageDir(geocode), fileNameOrUrl, isUrl, createDirs);
}
/**
* Get the secondary file corresponding to a geocode and a file name or an url. If it is an url, an appropriate
* filename will be built by hashing it. The directory structure will not be created automatically.
* A null or empty geocode will be replaced by a default value.
*
* @param geocode
* the geocode
* @param fileNameOrUrl
* the file name or url
* @param isUrl
* true if an url was given, false if a file name was given
* @return the file
*/
public static File getStorageSecFile(final String geocode, final String fileNameOrUrl, final boolean isUrl) {
return buildFile(getStorageSecDir(geocode), fileNameOrUrl, isUrl, false);
}
private static File buildFile(final File base, final String fileName, final boolean isUrl, final boolean createDirs) {
if (createDirs) {
base.mkdirs();
}
return new File(base, isUrl ? CryptUtils.md5(fileName) + getExtension(fileName) : fileName);
}
/**
* Save an HTTP response to a file.
*
* @param response
* the response whose entity content will be saved
* @param targetFile
* the target file, which will be created if necessary
* @return true if the operation was successful, false otherwise, in which case the file will not exist
*/
public static boolean saveEntityToFile(final HttpResponse response, final File targetFile) {
if (response == null) {
return false;
}
try {
final boolean saved = saveToFile(response.getEntity().getContent(), targetFile);
saveHeader("etag", saved ? response : null, targetFile);
saveHeader("last-modified", saved ? response : null, targetFile);
return saved;
} catch (IOException e) {
Log.e("LocalStorage.saveEntityToFile", e);
}
return false;
}
private static void saveHeader(final String name, final HttpResponse response, final File baseFile) {
final Header header = response != null ? response.getFirstHeader(name) : null;
final File file = filenameForHeader(baseFile, name);
if (header == null) {
file.delete();
} else {
saveToFile(new ByteArrayInputStream(header.getValue().getBytes()), file);
}
}
private static File filenameForHeader(final File baseFile, final String name) {
return new File(baseFile.getAbsolutePath() + "-" + name);
}
/**
* Get the saved header value for this file.
*
* @param baseFile
* the name of the cached resource
* @param name
* the name of the header ("etag" or "last-modified")
* @return null if no value has been cached, the value otherwise
*/
public static String getSavedHeader(final File baseFile, final String name) {
try {
final File file = filenameForHeader(baseFile, name);
final FileReader f = new FileReader(file);
try {
// No header will be more than 256 bytes
final char[] value = new char[256];
final int count = f.read(value);
return new String(value, 0, count);
} finally {
f.close();
}
} catch (final FileNotFoundException e) {
// Do nothing, the file does not exist
} catch (final Exception e) {
Log.w("could not read saved header " + name + " for " + baseFile, e);
}
return null;
}
/**
* Save a stream to a file.
* <p/>
* If the response could not be saved to the file due, for example, to a network error, the file will not exist when
* this method returns.
*
* @param inputStream
* the stream whose content will be saved
* @param targetFile
* the target file, which will be created if necessary
* @return true if the operation was successful, false otherwise
*/
public static boolean saveToFile(final InputStream inputStream, final File targetFile) {
if (inputStream == null) {
return false;
}
try {
try {
final FileOutputStream fos = new FileOutputStream(targetFile);
final boolean written = copy(inputStream, fos);
fos.close();
if (!written) {
targetFile.delete();
}
return written;
} finally {
inputStream.close();
}
} catch (IOException e) {
Log.e("LocalStorage.saveToFile", e);
targetFile.delete();
}
return false;
}
/**
* Copy a file into another. The directory structure of target file will be created if needed.
*
* @param source
* the source file
* @param destination
* the target file
* @return true if the copy happened without error, false otherwise
*/
public static boolean copy(final File source, final File destination) {
destination.getParentFile().mkdirs();
InputStream input = null;
OutputStream output;
try {
input = new FileInputStream(source);
output = new FileOutputStream(destination);
} catch (FileNotFoundException e) {
Log.e("LocalStorage.copy: could not open file", e);
if (input != null) {
try {
input.close();
} catch (IOException e1) {
// ignore
}
}
return false;
}
boolean copyDone = copy(input, output);
try {
input.close();
output.close();
} catch (IOException e) {
Log.e("LocalStorage.copy: could not close file", e);
return false;
}
return copyDone;
}
private static boolean copy(final InputStream input, final OutputStream output) {
final byte[] buffer = new byte[4096];
int length;
try {
while ((length = input.read(buffer)) > 0) {
output.write(buffer, 0, length);
}
// Flushing is only necessary if the stream is not immediately closed afterwards.
// We rely on all callers to do that correctly outside of this method
} catch (IOException e) {
Log.e("LocalStorage.copy: error when copying data", e);
return false;
}
return true;
}
/**
* Check if an external media (SD card) is available for use.
*
* @return true if the external media is properly mounted
*/
public static boolean isExternalStorageAvailable() {
return Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED);
}
public static boolean deleteDirectory(File path) {
if (path.exists()) {
for (final File file : path.listFiles()) {
if (file.isDirectory()) {
deleteDirectory(file);
} else {
file.delete();
}
}
}
return path.delete();
}
/**
* Deletes all files from directory geocode with the given prefix.
*
* @param geocode
* The geocode identifying the cache directory
* @param prefix
* The filename prefix
*/
public static void deleteFilesWithPrefix(final String geocode, final String prefix) {
final File[] filesToDelete = getFilesWithPrefix(geocode, prefix);
if (filesToDelete == null) {
return;
}
for (final File file : filesToDelete) {
try {
if (!file.delete()) {
Log.w("LocalStorage.deleteFilesPrefix: Can't delete file " + file.getName());
}
} catch (Exception e) {
Log.e("LocalStorage.deleteFilesPrefix: " + e.toString());
}
}
}
/**
* Get an array of all files of the geocode directory starting with
* the given filenamePrefix.
*
* @param geocode
* The geocode identifying the cache data directory
* @param filenamePrefix
* The prefix of the files
* @return File[] the array of files starting with filenamePrefix in geocode directory
*/
public static File[] getFilesWithPrefix(final String geocode, final String filenamePrefix) {
final FilenameFilter filter = new FilenameFilter() {
@Override
public boolean accept(File dir, String filename) {
return filename.startsWith(filenamePrefix);
}
};
return LocalStorage.getStorageDir(geocode).listFiles(filter);
}
}
|
package services.languageProcessor;
import com.fasterxml.jackson.databind.JsonNode;
import play.libs.Json;
import scala.util.parsing.json.JSONArray;
import services.IntentEntity;
import services.Response;
import services.queryHandler.Extractor;
import java.lang.reflect.*;
import java.util.ArrayList;
import java.util.List;
public class TaskMap {
/**
* This method calls appropriate method on run time based on the
* parameters (methodName and argName) passed and returns a value
* returned by that method.
* @param methodName
* @param issueKey
* @return
*/
public static JsonNode questionMapping(String methodName, String issueKey, JsonNode responseBody) {
TaskMap taskMap = new TaskMap();
try {
//call the method at runtime according to the argument "methodName"
Method method = TaskMap.class.getMethod(methodName, String.class, JsonNode.class);
JsonNode returnVal = (JsonNode) method.invoke(taskMap, issueKey, responseBody);
return returnVal;
} catch (NoSuchMethodException e) {
return parseToJson("fail", e.getMessage());
} catch (InvocationTargetException e) {
return parseToJson("fail", e.getMessage());
} catch (IllegalAccessException e) {
return parseToJson("fail", e.getMessage());
} catch (NullPointerException e) {
return parseToJson("fail", e.getMessage());
}
}
/**
* This method requests issue info and returns it to the calling method
*
* @param issueKey
* @return
*/
public JsonNode IssueDescription(String issueKey, JsonNode responseBody) {
if (Extractor.getIssueDscription(responseBody, "description").equals("[\"Issue Does Not Exist\"]")) {
return parseToJson("fail", "Cannot find issue");
} else {
String answer = "Description of " + issueKey + " is as follows: \n" +
Extractor.getIssueDscription(responseBody, "description");
return parseToJson("success", answer);
}
}
/**
* This method requests assignee of issue and returns it to the calling method
*
* @param issueKey
* @return
*/
public JsonNode IssueAssignee(String issueKey, JsonNode responseBody) {
if (Extractor.getIssueAssignee(responseBody, "assignee").equals("[\"Issue Does Not Exist\"]")) {
return parseToJson("fail", "Cannot find issue");
} else {
String answer = Extractor.getIssueAssignee(responseBody, "assignee") + " is working on " + issueKey + ".";
System.out.println(answer);
return parseToJson("success", answer);
}
}
/**
* This method requests brief description of issue and returns it to the calling method
*
* @param issueKey
* @return
*/
public JsonNode IssueBrief(String issueKey, JsonNode responseBody) {
if (Extractor.getIssueBrief(responseBody, "assignee").equals("[\"Issue Does Not Exist\"]")) {
return parseToJson("fail", "Cannot find issue");
} else {
String answer = Extractor.getIssueBrief(responseBody, "assignee") + " is working on " + issueKey + ".";
System.out.println(answer);
return parseToJson("success", answer);
}
}
/** COMPLETE THIS METHOD
* This method requests status of an issue and returns it to the calling method
*
* @param issueKey
* @return
*/
public JsonNode IssueStatus(String issueKey, JsonNode responseBody) {
String answer = "Code to find the STATUS OF AN ISSUE has not been implemented yet. Please wait for our next version update.";
System.out.println(answer);
return parseToJson("success", answer);
}
/**COMPLETE THIS METHOD
* This method gets the questions that POET was not able to answer in the past.
*
* @param issueKey
* @param responseBody
* @return
*/
public JsonNode QuestionsNotAnswered(String issueKey, JsonNode responseBody) {
String answer = "I have not saved any questions so far. Please wait for my next version update.";
System.out.println(answer);
return parseToJson("success", answer);
}
/** COMPLETE THIS METHOD
* This method requests issues that are in progress and returns it to the calling method
*
* @param issueKey
* @return
*/
public JsonNode IssuesInProgress(String issueKey, JsonNode responseBody) {
String answer = "Code to find ISSUES IN PROGRESS has not been implemented yet. Please wait for our next version update.";
System.out.println(answer);
return parseToJson("success", answer);
}
/** COMPLETE THIS METHOD
* This method requests issues that are completed and returns it to the calling method
*
* @param issueKey
* @return
*/
public JsonNode IssuesCompleted(String issueKey, JsonNode responseBody) {
String answer = "Code to find COMPLETED ISSUES has not been implemented yet. Please wait for our next version update.";
System.out.println(answer);
return parseToJson("success", answer);
}
/** COMPLETE THIS METHOD
* This method requests issues that are stalled and returns it to the calling method
*
* @param issueKey
* @return
*/
public JsonNode StalledIssues(String issueKey, JsonNode responseBody) {
String answer = "Code to find STALLED ISSUES has not been implemented yet. Please wait for our next version update.";
System.out.println(answer);
return parseToJson("success", answer);
}
/** COMPLETE THIS METHOD
* This method sets the project for a channel
*
* @param issueKey
* @return
*/
public JsonNode SetProject(String issueKey, JsonNode responseBody) {
String answer = "Code to SET PROJECT TO A CHANNEL has not been implemented yet. Please wait for our next version update.";
System.out.println(answer);
return parseToJson("success", answer);
}
/** COMPLETE THIS METHOD
*
* This method sets the context for the conversations.
* It remembers the issue people are talking about.
* @param issueKey
* @return
*/
public JsonNode SetIssueContext(String issueKey, JsonNode responseBody) {
String answer = "Code to find SET CONTEXT has not been implemented yet. Please wait for our next version update.";
System.out.println(answer);
return parseToJson("success", answer);
}
/**
* This method takes String as an input as returns a JSON object in the required format
* @param message
* @return
*/
public static JsonNode parseToJson(String status, String message) {
Response response = new Response();
response.status = status;
response.message = message;
System.out.println("Response: " + response.message);
JsonNode answer = Json.toJson(response);
return answer;
}
}
|
package com.CS2103.TextBuddy_v2;
import java.text.SimpleDateFormat;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.io.*;
import java.util.StringTokenizer;
public class TextBuddyPlusPlus {
private static final String APP_COMMAND = "TextBuddy";
private static final String MSG_WELCOME = "Welcome to TextBuddy. %s is ready for use\n";
private static final String COMMAND = "command: ";
private static final String MSG_LIST_EMPTY = "%s is empty\n";
private static final String MSG_PRINT_LIST = "%d. %s\n";
private static final String MSG_ADDED_ELEMENT = "added to %s: \"%s\"\n";
private static final String MSG_DELETE_ELEMENT = "delete from %s: \"%s\"\n";
private static final String MSG_CLEAR_LIST = "all content deleted from %s\n";
private static final String MSG_ERROR_SAVING = "Error encountered when saving %s\n";
private static final String MSG_ERROR_READING = "Error encountered when reading %s\n";
private static final String MSG_INVALID_COMMAND = "Invalid command!\n";
private static final String MSG_INVALID_ELEID = "Invalid element ID\n";
private static final String MSG_SEARCH_FAIL = "%s is not found.";
private static final String MSG_SEARCH_INVALID = "Search Invalid!";
private static final String MSG_SORT_SUCCESS = "Successfully Sorted! \"%s\"";
// Points to current directory
private static final String CURRENT_DIRECTORY = System.getProperty("user.dir") + "/";
private static final String TXT_EXTENSION = ".txt";
// Booleans used when saving file
private static final boolean FILE_SAVE_SUCCESSFUL = true;
private static final boolean FILE_SAVE_UNSUCCESSFUL = false;
// Enumerated types of command
enum COMMAND_TYPE {
DISPLAY, ADD, DELETE, CLEAR, SORT, SEARCH, INVALID, EXIT
};
private static final ArrayList<String> list = new ArrayList<String>();
private static final BufferedReader buffered_reader = new BufferedReader(new InputStreamReader(System.in));
private static String file_name;
/**
* Initialize TextBuddy
* Usage: java TextBuddy <file_name>
* @param args Input parameters to execute TextBuddy.
* @throws IOException
*/
public static void main(String[] args) throws IOException {
TextBuddyPlusPlus textbuddypp = new TextBuddyPlusPlus();
textbuddypp.TextBuddy(args);
textbuddypp.processUserCommands();
}
public static void TextBuddy(String[] args) throws ArrayIndexOutOfBoundsException {
/* Check for filename in program input parameter
* If COMMAND equals to "TextBuddy", program is valid.
* Else, program will exit because of invalid command.
*/
if (args.length > 0) {
if(!args[0].equals(APP_COMMAND)){
print(MSG_INVALID_COMMAND);
System.exit(0);
} else {
file_name = args[1];
checkIfFileExists();
print(String.format(MSG_WELCOME, CURRENT_DIRECTORY + file_name));
}
} else if (args.length == 0 || args.length < 2) {
print(MSG_INVALID_COMMAND);
System.exit(0);
} else {
file_name = getDateTime().concat(TXT_EXTENSION);
}
}
/**
* Gets the current date and time.
*
* @return The current date and time in "dd-MMM-HH-mm" format.
*/
private static String getDateTime() {
SimpleDateFormat dt_format = new SimpleDateFormat("dd-MMM-HH-mm");
return (dt_format.format(new Date()));
}
/**
* Check if file exists
* If file exists, read the file contents.
* todo: Else create a new file
*/
private static void checkIfFileExists() {
try {
File file = new File(file_name);
if (!file.exists()) {
createFileIfNotExist(file);
} else {
/* Read in the file contents
* add it to the list
*/
String _line;
BufferedReader br = new BufferedReader(new FileReader(file_name));
while ((_line = br.readLine()) != null) {
list.add(_line);
}
br.close();
}
} catch (Exception ex) {
list.clear(); // Clear all data
print(String.format(MSG_ERROR_READING, file_name));
}
}
/**
* createFileIfNotExist:
*
* @param n_file Receive File to be processed
* @throws IOException
*/
private static void createFileIfNotExist(File n_file) throws IOException {
n_file.createNewFile();
}
/**
* processUserCommands:
* Read commands from each line and execute the commands accordingly
* DISPLAY: displays all lines in the file
* ADD: adds a line to the file
* DELETE: delete a line from the file
* CLEAR: clear the entire file
* EXIT: exit the program
* INVALID COMMAND: anything that doesn't resemble the commands above
*
* @throws IOException
*/
public static void processUserCommands() throws IOException {
String print_out = "";
while(true) {
print(COMMAND);
try {
/* Split the commands into two arguments.
* cmd[0] represents COMMAND argument
* cmd[1] represents the argument to be parsed into
*/
String[] cmd = buffered_reader.readLine().trim().split(" ", 2);
COMMAND_TYPE cmd_type = determineCommandType(cmd[0]);
switch (cmd_type) {
case DISPLAY :
print_out = displayFile();
break;
case ADD :
print_out = addElement(cmd[1]);
break;
case DELETE :
print_out = deleteElement(cmd[1]);
break;
case CLEAR :
print_out = clearList();
break;
case SORT :
print_out = sort();
break;
case SEARCH :
print_out = search(cmd[1]);
break;
case EXIT :
System.exit(0);
break;
default :
print(MSG_INVALID_COMMAND);
break;
}
} catch (Exception ex) {
print_out = MSG_INVALID_COMMAND;
}
print(print_out);
}
}
public static String executeCommand(String cmd) throws IOException {
String textInput = removeCommandType(cmd);
// String cmd_type = getFirstCommand(cmd.toUpperCase());
COMMAND_TYPE cmd_type = determineCommandType(cmd);
switch (cmd_type) {
case DISPLAY :
return displayFile();
case ADD :
return addElement(textInput);
case DELETE :
return deleteElement(textInput);
case CLEAR :
return clearList();
case SORT :
return sort();
case SEARCH :
return search(textInput);
case EXIT :
System.exit(0);
default :
throw new Error(MSG_INVALID_COMMAND);
}
}
private static String getFirstCommand(String cmd_in) {
StringTokenizer cmd_token = new StringTokenizer(cmd_in);
return cmd_token.nextToken();
}
private static String removeCommandType(String cmd_in) {
return cmd_in.replace(getFirstCommand(cmd_in), "").trim();
}
/**
* This operation determines which of the supported command types the user
* wants to perform
*
* @param command_type_str is the first word of the user command
*/
private static COMMAND_TYPE determineCommandType(String command_type_str) {
if (command_type_str == null) {
throw new Error("Command type string cannot be null!");
}
if (command_type_str.equalsIgnoreCase("display") || command_type_str.equalsIgnoreCase("ls")) {
return COMMAND_TYPE.DISPLAY;
} else if (command_type_str.equalsIgnoreCase("add") || command_type_str.equalsIgnoreCase("+")) {
return COMMAND_TYPE.ADD;
} else if (command_type_str.equalsIgnoreCase("delete") || command_type_str.equalsIgnoreCase("rm") || command_type_str.equalsIgnoreCase("-")) {
return COMMAND_TYPE.DELETE;
} else if (command_type_str.equalsIgnoreCase("clear")) {
return COMMAND_TYPE.CLEAR;
} else if (command_type_str.equalsIgnoreCase("sort")) {
return COMMAND_TYPE.SORT;
} else if (command_type_str.equalsIgnoreCase("search")) {
return COMMAND_TYPE.SEARCH;
} else if (command_type_str.equalsIgnoreCase("exit") || command_type_str.equalsIgnoreCase("quit") || command_type_str.equalsIgnoreCase("q")) {
return COMMAND_TYPE.EXIT;
} else {
return COMMAND_TYPE.INVALID;
}
}
/**
* addElement:
* add line to an ArrayList<String>
*
* @param _element Input element to be added
* @return Success message when element added or error while saving
*/
private static String addElement(String _element) {
list.add(_element);
boolean isFileSaveSuccessful = saveFile();
if (isFileSaveSuccessful) {
return String.format(MSG_ADDED_ELEMENT, file_name, _element);
} else {
return String.format(MSG_ERROR_SAVING, file_name);
}
}
/**
* saveFile:
* Saves ArrayList<String> elements to file
*
* @return True or false while saving file -> Success or failure in saving
*/
private static boolean saveFile() {
try {
FileWriter file = new FileWriter(file_name);
String output = "";
for (String _line : list) {
output = output + _line + "\n";
}
file.write(output);
file.flush();
file.close();
} catch (IOException e) {
return FILE_SAVE_UNSUCCESSFUL;
}
return FILE_SAVE_SUCCESSFUL;
}
/**
* displayFile:
* Displays all lines found in the text file.
* If list is empty, return list empty message, otherwise, print out the entire list of text
* @return Entire data entries list
* @throws IOException
*/
private static String displayFile() throws IOException {
StringBuffer strbuffer = new StringBuffer();
if (list.isEmpty()) {
System.out.printf(MSG_LIST_EMPTY, file_name);
} else {
for (int i = 0; i < list.size(); i++) {
strbuffer.append(String.format(MSG_PRINT_LIST, (i + 1), list.get(i)));
}
}
return strbuffer.toString();
}
/**
* deleteElement:
* deletes a single index line from the file
* @param param Input element to be deleted
* @return Delete successful and saved or error deleting and saving
*/
private static String deleteElement(String param) {
int id;
int _index;
// Parse the element ID
try {
id = parseID(param);
} catch (Exception e) {
return String.format(MSG_INVALID_COMMAND);
}
// Check if ID is valid
if (id > 0 && list.size() >= id) {
_index = id - 1; // 0 based indexing list
String element = list.get(_index);
list.remove(_index);
boolean isFileSaveSuccessful = saveFile();
if (isFileSaveSuccessful) {
return String.format(MSG_DELETE_ELEMENT, file_name, element);
} else {
return String.format(MSG_ERROR_SAVING, file_name);
}
} else if (list.isEmpty()) {
return String.format(MSG_LIST_EMPTY, file_name);
} else {
return String.format(MSG_INVALID_ELEID);
}
}
/**
* clearList:
* clears the data entries of the entire file
* @return File cleared successfully or error saving
* @throws IOException
*/
private static String clearList() throws IOException {
list.clear();
boolean isFileSaveSuccessful = saveFile();
if (isFileSaveSuccessful) {
return String.format(MSG_CLEAR_LIST, file_name);
} else {
return String.format(MSG_ERROR_SAVING, file_name);
}
}
/**
* Parse the parameter string into integer preparing for element deletion.
*
* @param param User entered parameter
* @return Element ID as entered by user if valid
* @throws Exception When invalid element ID entered
*/
private static int parseID(String param) throws Exception {
try {
return Integer.parseInt(param.split(" ", 2)[0]);
} catch (Exception e) {
throw e;
}
}
private static void print(String output) {
System.out.print(output);
}
/**
* Method to find lines with a particular keyword
*
* @param param Keyword to be searched for
*/
public static String search(String param) {
if (param == null || param.trim().length() == 0) {
return MSG_SEARCH_INVALID;
} else { return null; }
}
public static String sort() {
return null;
}
}
|
package com.bourke.glimmr.activities;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.view.ViewPager;
import android.text.Html;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.actionbarsherlock.app.ActionBar;
import com.actionbarsherlock.app.SherlockFragment;
import com.actionbarsherlock.view.MenuItem;
import com.androidquery.AQuery;
import com.bourke.glimmr.common.Constants;
import com.bourke.glimmr.common.GlimmrPagerAdapter;
import com.bourke.glimmr.common.MenuListView;
import com.bourke.glimmr.event.Events.IActivityItemsReadyListener;
import com.bourke.glimmr.event.Events.IPhotoInfoReadyListener;
import com.bourke.glimmr.fragments.explore.RecentPublicPhotosFragment;
import com.bourke.glimmr.fragments.home.ContactsGridFragment;
import com.bourke.glimmr.fragments.home.FavoritesGridFragment;
import com.bourke.glimmr.fragments.home.GroupListFragment;
import com.bourke.glimmr.fragments.home.PhotosetsFragment;
import com.bourke.glimmr.fragments.home.PhotoStreamGridFragment;
import com.bourke.glimmr.R;
import com.bourke.glimmr.services.ActivityNotificationHandler;
import com.bourke.glimmr.services.AppListener;
import com.bourke.glimmr.services.AppService;
import com.bourke.glimmr.tasks.LoadFlickrActivityTask;
import com.bourke.glimmr.tasks.LoadPhotoInfoTask;
import com.commonsware.cwac.wakeful.WakefulIntentService;
import com.googlecode.flickrjandroid.activity.Event;
import com.googlecode.flickrjandroid.activity.Item;
import com.googlecode.flickrjandroid.people.User;
import com.googlecode.flickrjandroid.photos.Photo;
import com.sbstrm.appirater.Appirater;
import com.viewpagerindicator.PageIndicator;
import com.viewpagerindicator.TitlePageIndicator;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import net.simonvt.widget.MenuDrawer;
import net.simonvt.widget.MenuDrawerManager;
import org.ocpsoft.pretty.time.PrettyTime;
import com.actionbarsherlock.view.Window;
public class MainActivity extends BaseActivity {
private static final String TAG = "Glimmr/MainActivity";
private List<PageItem> mContent;
private MenuDrawerManager mMenuDrawerMgr;
private MenuAdapter mMenuAdapter;
private MenuListView mList;
private GlimmrPagerAdapter mPagerAdapter;
private ViewPager mViewPager;
private PageIndicator mIndicator;
private int mActivePosition = -1;
private long mActivityListVersion = -1;
private SharedPreferences mPrefs;
@Override
public void onCreate(Bundle savedInstanceState) {
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
super.onCreate(savedInstanceState);
if (mOAuth == null) {
startActivity(new Intent(this, ExploreActivity.class));
} else {
if (savedInstanceState != null) {
mActivePosition =
savedInstanceState.getInt(Constants.STATE_ACTIVE_POSITION);
}
mPrefs = getSharedPreferences(Constants.PREFS_NAME,
Context.MODE_PRIVATE);
mAq = new AQuery(this);
initPageItems();
mMenuDrawerMgr =
new MenuDrawerManager(this, MenuDrawer.MENU_DRAG_CONTENT);
mMenuDrawerMgr.setContentView(R.layout.main_activity);
setSupportProgressBarIndeterminateVisibility(Boolean.FALSE);
initViewPager();
initMenuDrawer();
initNotificationAlarms();
Appirater.appLaunched(this);
}
}
@Override
public void onResume() {
super.onResume();
mPrefs = getSharedPreferences(Constants.PREFS_NAME,
Context.MODE_PRIVATE);
mOAuth = loadAccessToken(mPrefs);
if (mOAuth != null) {
mUser = mOAuth.getUser();
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
if (activityItemsNeedsUpdate()) {
updateMenuListItems();
}
mMenuDrawerMgr.toggleMenu();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onBackPressed() {
final int drawerState = mMenuDrawerMgr.getDrawerState();
if (drawerState == MenuDrawer.STATE_OPEN ||
drawerState == MenuDrawer.STATE_OPENING) {
mMenuDrawerMgr.closeMenu();
return;
}
super.onBackPressed();
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
if (mMenuDrawerMgr != null) {
outState.putParcelable(Constants.STATE_MENUDRAWER,
mMenuDrawerMgr.onSaveDrawerState());
outState.putInt(Constants.STATE_ACTIVE_POSITION, mActivePosition);
}
outState.putLong(Constants.TIME_MENUDRAWER_ITEMS_LAST_UPDATED,
mActivityListVersion);
}
@Override
protected void onRestoreInstanceState(Bundle inState) {
super.onRestoreInstanceState(inState);
mMenuDrawerMgr.onRestoreDrawerState(inState
.getParcelable(Constants.STATE_MENUDRAWER));
mActivityListVersion = inState.getLong(
Constants.TIME_MENUDRAWER_ITEMS_LAST_UPDATED, -1);
}
@Override
public User getUser() {
return mUser;
}
private void initPageItems() {
mContent = new ArrayList<PageItem>();
mContent.add(new PageItem(getString(R.string.contacts),
R.drawable.ic_action_social_person_dark,
ContactsGridFragment.class));
mContent.add(new PageItem(getString(R.string.photos),
R.drawable.ic_content_picture_dark,
PhotoStreamGridFragment.class));
mContent.add(new PageItem(getString(R.string.favorites),
R.drawable.ic_action_rating_important_dark,
FavoritesGridFragment.class));
mContent.add(new PageItem(getString(R.string.sets),
R.drawable.collections_collection_dark,
PhotosetsFragment.class));
mContent.add(new PageItem(getString(R.string.groups),
R.drawable.ic_action_social_group_dark,
GroupListFragment.class));
mContent.add(new PageItem(getString(R.string.explore),
R.drawable.ic_action_av_shuffle_dark,
RecentPublicPhotosFragment.class));
}
public void updateMenuListItems() {
if (Constants.DEBUG) Log.d(TAG, "updateMenuListItems");
final List<Object> menuItems = new ArrayList<Object>();
/* Add the standard page related items */
for (PageItem page : mContent) {
menuItems.add(new MenuDrawerItem(page.mTitle, page.mIconDrawable));
}
menuItems.add(new MenuDrawerCategory(getString(R.string.activity)));
/* If the activity list file exists, add the contents to the menu
* drawer area. Otherwise start a task to fetch one. */
File f = getFileStreamPath(Constants.ACTIVITY_ITEMLIST_FILE);
if (f.exists()) {
/* There is some duplicated code here. Could move it into another
* function but the task is fragmented enough as is */
List<Item> items = ActivityNotificationHandler.loadItemList(this);
menuItems.addAll(buildActivityStream(items));
mActivityListVersion = mPrefs.getLong(
Constants.TIME_ACTIVITY_ITEMS_LAST_UPDATED, -1);
mMenuAdapter.setItems(menuItems);
mMenuAdapter.notifyDataSetChanged();
} else {
new LoadFlickrActivityTask(new IActivityItemsReadyListener() {
@Override
public void onItemListReady(List<Item> items) {
if (items != null) {
ActivityNotificationHandler.storeItemList(
MainActivity.this, items);
menuItems.addAll(buildActivityStream(items));
mActivityListVersion = mPrefs.getLong(
Constants.TIME_ACTIVITY_ITEMS_LAST_UPDATED,
-1);
} else {
Log.e(TAG, "onItemListReady: Item list is null");
}
mMenuAdapter.setItems(menuItems);
mMenuAdapter.notifyDataSetChanged();
}
})
.execute(mOAuth);
}
}
/**
* Determines if the menu drawer's items need to be refreshed from the
* cache file.
* True if the cache file doesn't exist or a newer cache file exists than
* the version we're displaying.
*/
private boolean activityItemsNeedsUpdate() {
long lastUpdate = mPrefs.getLong(
Constants.TIME_ACTIVITY_ITEMS_LAST_UPDATED, -1);
File f = getFileStreamPath(Constants.ACTIVITY_ITEMLIST_FILE);
boolean isStale = (mActivityListVersion < lastUpdate);
boolean ret = (isStale || !f.exists());
if (Constants.DEBUG) Log.d(TAG, "activityItemsNeedsUpdate: " + ret);
return ret;
}
/**
* An item can be a photo or photoset.
* An event can be a comment, note, or fav on that item.
*/
private List<Object> buildActivityStream(List<Item> activityItems) {
List<Object> ret = new ArrayList<Object>();
if (activityItems == null) {
return ret;
}
PrettyTime prettyTime = new PrettyTime(Locale.getDefault());
String html = "<small><i>%s</i></small><br>" +
"%s <font color=\"
for (Item i : activityItems) {
if ("photo".equals(i.getType())) {
StringBuilder itemString = new StringBuilder();
for (int j=i.getEvents().size()-1; j>=0; j
Event e = ((List<Event>)i.getEvents()).get(j);
String pTime = prettyTime.format(e.getDateadded());
String author = e.getUsername();
if (mUser != null && mUser.getUsername().equals(author)) {
author = getString(R.string.you);
}
if ("comment".equals(e.getType())) {
itemString.append(String.format(html, pTime, author,
getString(R.string.commented_on),
i.getTitle()));
} else if ("fave".equals(e.getType())) {
itemString.append(String.format(html, pTime, author,
getString(R.string.favorited),
i.getTitle()));
} else {
Log.e(TAG, "unsupported Event type: " + e.getType());
continue;
}
if (j > 0) {
itemString.append("<br><br>");
}
}
ret.add(new MenuDrawerActivityItem(itemString.toString(), -1));
}
}
return ret;
}
private void initMenuDrawer() {
/* A custom ListView is needed so the drawer can be notified when it's
* scrolled. This is to update the position
* of the arrow indicator. */
mList = new MenuListView(this);
mMenuAdapter = new MenuAdapter();
mList.setDivider(null);
mList.setDividerHeight(0);
mList.setCacheColorHint(0);
mList.setBackgroundResource(R.drawable.navy_blue_tiled);
mList.setSelector(R.drawable.selectable_background_glimmrdark);
mList.setAdapter(mMenuAdapter);
mList.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view,
int position, long id) {
switch (mMenuAdapter.getItemViewType(position)) {
case MENU_DRAWER_ITEM:
mViewPager.setCurrentItem(position);
mActivePosition = position;
mMenuDrawerMgr.setActiveView(view, position);
mMenuDrawerMgr.closeMenu();
break;
case MENU_DRAWER_ACTIVITY_ITEM:
/* offset the position by number of content items + 1
* for the category item */
startViewerForActivityItem(position-mContent.size()-1);
break;
}
}
});
mList.setOnScrollChangedListener(
new MenuListView.OnScrollChangedListener() {
@Override
public void onScrollChanged() {
mMenuDrawerMgr.getMenuDrawer().invalidate();
}
});
mMenuDrawerMgr.setMenuView(mList);
mActionBar.setDisplayHomeAsUpEnabled(true);
mMenuDrawerMgr.getMenuDrawer().setTouchMode(
MenuDrawer.TOUCH_MODE_FULLSCREEN);
mMenuDrawerMgr.getMenuDrawer().setOnDrawerStateChangeListener(
new MenuDrawer.OnDrawerStateChangeListener() {
@Override
public void onDrawerStateChange(int oldState,
int newState) {
if (newState == MenuDrawer.STATE_OPEN) {
if (activityItemsNeedsUpdate()) {
updateMenuListItems();
}
}
}
});
ViewPager.SimpleOnPageChangeListener pageChangeListener =
new ViewPager.SimpleOnPageChangeListener() {
@Override
public void onPageSelected(final int position) {
if (mIndicator != null) {
mIndicator.setCurrentItem(position);
} else {
mActionBar.setSelectedNavigationItem(position);
}
if (position == 0) {
mMenuDrawerMgr.getMenuDrawer().setTouchMode(
MenuDrawer.TOUCH_MODE_FULLSCREEN);
} else {
mMenuDrawerMgr.getMenuDrawer().setTouchMode(
MenuDrawer.TOUCH_MODE_NONE);
}
}
};
if (mIndicator != null) {
mIndicator.setOnPageChangeListener(pageChangeListener);
} else {
mViewPager.setOnPageChangeListener(pageChangeListener);
}
updateMenuListItems();
}
private void startViewerForActivityItem(int itemPos) {
setSupportProgressBarIndeterminateVisibility(Boolean.TRUE);
// TODO: only load these once throughout the activity
List<Item> items = ActivityNotificationHandler
.loadItemList(MainActivity.this);
Item item = items.get(itemPos);
new LoadPhotoInfoTask(new IPhotoInfoReadyListener() {
@Override
public void onPhotoInfoReady(final Photo photo) {
if (photo == null) {
Log.e(TAG, "onPhotoInfoReady: photo is null, " +
"can't start viewer");
// TODO: alert user
return;
}
List<Photo> photos = new ArrayList<Photo>();
photos.add(photo);
setSupportProgressBarIndeterminateVisibility(Boolean.FALSE);
PhotoViewerActivity.startPhotoViewer(
MainActivity.this, photos, 0);
}
}, item.getId(), item.getSecret()).execute(mOAuth);
}
private void initNotificationAlarms() {
SharedPreferences defaultSharedPrefs =
PreferenceManager.getDefaultSharedPreferences(this);
boolean enableNotifications = defaultSharedPrefs.getBoolean(
Constants.KEY_ENABLE_NOTIFICATIONS, false);
if (enableNotifications) {
if (Constants.DEBUG) Log.d(TAG, "Scheduling alarms");
WakefulIntentService.scheduleAlarms(
new AppListener(), this, false);
} else {
if (Constants.DEBUG) Log.d(TAG, "Cancelling alarms");
AppService.cancelAlarms(this);
}
}
private void initViewPager() {
mViewPager = (ViewPager) findViewById(R.id.viewPager);
List<String> pageTitles = new ArrayList<String>();
for (PageItem page : mContent) {
pageTitles.add(page.mTitle);
}
mPagerAdapter = new GlimmrPagerAdapter(
getSupportFragmentManager(), mViewPager, mActionBar,
pageTitles.toArray(new String[pageTitles.size()])) {
@Override
public SherlockFragment getItemImpl(int position) {
try {
return (SherlockFragment)
mContent.get(position).mFragmentClass.newInstance();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return null;
}
};
mViewPager.setAdapter(mPagerAdapter);
mIndicator = (TitlePageIndicator) findViewById(R.id.indicator);
if (mIndicator != null) {
mIndicator.setViewPager(mViewPager);
} else {
mActionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
for (PageItem page : mContent) {
ActionBar.Tab newTab =
mActionBar.newTab().setText(page.mTitle);
newTab.setTabListener(mPagerAdapter);
mActionBar.addTab(newTab);
}
}
}
private static final class PageItem {
public String mTitle;
public Integer mIconDrawable;
public Class mFragmentClass;
PageItem(String title, int iconDrawable, Class fragmentClass) {
mTitle = title;
mIconDrawable = iconDrawable;
mFragmentClass = fragmentClass;
}
}
private static final class MenuDrawerItem {
public String mTitle;
public int mIconRes;
MenuDrawerItem(String title, int iconRes) {
mTitle = title;
mIconRes = iconRes;
}
}
private static final class MenuDrawerCategory {
public String mTitle;
MenuDrawerCategory(String title) {
mTitle = title;
}
}
private static final class MenuDrawerActivityItem {
public String mTitle;
public int mIconRes;
MenuDrawerActivityItem(String title, int iconRes) {
mTitle = title;
mIconRes = iconRes;
}
}
public static final int MENU_DRAWER_ITEM = 0;
public static final int MENU_DRAWER_CATEGORY_ITEM = 1;
public static final int MENU_DRAWER_ACTIVITY_ITEM = 2;
private class MenuAdapter extends BaseAdapter {
private List<Object> mItems;
MenuAdapter(List<Object> items) {
mItems = items;
}
MenuAdapter() {
mItems = new ArrayList<Object>();
}
public void setItems(List<Object> items) {
mItems = items;
}
@Override
public int getCount() {
return mItems.size();
}
@Override
public Object getItem(int position) {
return mItems.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public int getItemViewType(int position) {
Object item = getItem(position);
if (item instanceof MenuDrawerActivityItem) {
return MENU_DRAWER_ACTIVITY_ITEM;
} else if (item instanceof MenuDrawerCategory) {
return MENU_DRAWER_CATEGORY_ITEM;
}
return MENU_DRAWER_ITEM;
}
@Override
public int getViewTypeCount() {
return 3;
}
@Override
public boolean isEnabled(int position) {
return !(getItem(position) instanceof MenuDrawerCategory);
}
@Override
public boolean areAllItemsEnabled() {
return false;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
Object item = getItem(position);
if (item instanceof MenuDrawerActivityItem) {
if (v == null) {
v = (LinearLayout) getLayoutInflater().inflate(
R.layout.menu_row_activity_item, parent, false);
}
TextView tv = (TextView) v.findViewById(R.id.text);
tv.setText(Html.fromHtml(
((MenuDrawerActivityItem) item).mTitle));
} else if (item instanceof MenuDrawerItem) {
if (v == null) {
v = getLayoutInflater().inflate(
R.layout.menu_row_item, parent, false);
}
TextView tv = (TextView) v;
tv.setText(((MenuDrawerItem) item).mTitle);
tv.setCompoundDrawablesWithIntrinsicBounds(
((MenuDrawerItem) item).mIconRes, 0, 0, 0);
} else if (item instanceof MenuDrawerCategory) {
if (v == null) {
v = (LinearLayout) getLayoutInflater().inflate(
R.layout.menu_row_category, parent, false);
}
((TextView) v.findViewById(R.id.text)).setText(
((MenuDrawerCategory) item).mTitle);
} else {
Log.e(TAG, "MenuAdapter.getView: Unsupported item type");
}
v.setTag(R.id.mdActiveViewPosition, position);
if (position == mActivePosition) {
mMenuDrawerMgr.setActiveView(v, position);
}
return v;
}
}
}
|
package com.darcycasselman.cachedemo;
import java.util.HashMap;
import java.util.Map;
public class LruCacheImpl implements LruCache {
private static int DEFAULT_CACHE_SIZE = 10;
private Map<Object, Object> cache = new HashMap<>();
private int maxSize;
public LruCacheImpl(int maxSize) {
this.maxSize = maxSize;
}
public LruCacheImpl() {
this(DEFAULT_CACHE_SIZE);
}
@Override
public Object get(Object key) {
return cache.get(key);
}
@Override
public void put(Object key, Object value) {
cache.put(key, value);
}
@Override
public int getMaxSize() {
return maxSize;
}
@Override
public String toString() {
return cache.toString();
}
}
|
package com.ecyrd.jspwiki.url;
import java.util.Properties;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import javax.servlet.http.HttpServletRequest;
import org.apache.log4j.Logger;
import com.ecyrd.jspwiki.InternalWikiException;
import com.ecyrd.jspwiki.TextUtil;
import com.ecyrd.jspwiki.WikiContext;
import com.ecyrd.jspwiki.WikiEngine;
/**
* Provides a way to do short URLs of the form /wiki/PageName.
*
* @author Janne Jalkanen
*
* @since 2.2
*/
public class ShortURLConstructor
extends DefaultURLConstructor
{
static Logger log = Logger.getLogger( ShortURLConstructor.class );
protected String m_urlPrefix = "";
public static final String PROP_PREFIX = "jspwiki.shortURLConstructor.prefix";
public void initialize( WikiEngine engine,
Properties properties )
{
super.initialize( engine, properties );
m_urlPrefix = TextUtil.getStringProperty( properties, PROP_PREFIX, null );
if( m_urlPrefix == null )
{
String baseurl = engine.getBaseURL();
if( baseurl == null || baseurl.length() == 0 )
{
log.error("Using ShortURLConstructors without jspwiki.baseURL is NOT advised, and can result in trouble.");
baseurl = "";
}
try
{
URL url = new URL( baseurl );
String path = url.getPath();
m_urlPrefix = path+"wiki/";
}
catch( MalformedURLException e )
{
log.error( "Malformed base URL!" );
m_urlPrefix = "/wiki/"; // Just a guess.
}
}
log.info("Short URL prefix path="+m_urlPrefix+" (You can use "+PROP_PREFIX+" to override this)");
}
/**
* Constructs the actual URL based on the context.
*/
private String makeURL( String context,
String name,
boolean absolute )
{
String viewurl = m_urlPrefix+"%n";
if( absolute )
viewurl = "%u"+m_urlPrefix+"%n";
if( context.equals(WikiContext.VIEW) )
{
if( name == null ) return makeURL("%u","",absolute); // FIXME
return doReplacement( viewurl, name, absolute );
}
else if( context.equals(WikiContext.EDIT) )
{
return doReplacement( viewurl+"?do=Edit", name, absolute );
}
else if( context.equals(WikiContext.ATTACH) )
{
return doReplacement( "%Uattach/%n", name, absolute );
}
else if( context.equals(WikiContext.INFO) )
{
return doReplacement( viewurl+"?do=PageInfo", name, absolute );
}
else if( context.equals(WikiContext.DIFF) )
{
return doReplacement( viewurl+"?do=Diff", name, absolute );
}
else if( context.equals(WikiContext.NONE) )
{
return doReplacement( "%U%n", name, absolute );
}
else if( context.equals(WikiContext.UPLOAD) )
{
return doReplacement( viewurl+"?do=Upload", name, absolute );
}
else if( context.equals(WikiContext.COMMENT) )
{
return doReplacement( viewurl+"?do=Comment", name, absolute );
}
else if( context.equals(WikiContext.LOGIN) )
{
return doReplacement( viewurl+"?do=Login", name, absolute );
}
else if( context.equals(WikiContext.ERROR) )
{
return doReplacement( "%UError.jsp", name, absolute );
}
throw new InternalWikiException("Requested unsupported context "+context);
}
/**
* Constructs the URL with a bunch of parameters.
* @param parameters If null or empty, no parameters are added.
*/
public String makeURL( String context,
String name,
boolean absolute,
String parameters )
{
if( parameters != null && parameters.length() > 0 )
{
if( context.equals(WikiContext.ATTACH) || context.equals(WikiContext.VIEW) )
{
parameters = "?"+parameters;
}
else
{
parameters = "&"+parameters;
}
}
else
{
parameters = "";
}
return makeURL( context, name, absolute )+parameters;
}
/**
* Should parse the "page" parameter from the actual
* request.
*/
public String parsePage( String context,
HttpServletRequest request,
String encoding )
throws UnsupportedEncodingException
{
String pagereq = m_engine.safeGetParameter( request, "page" );
if( pagereq == null )
{
pagereq = parsePageFromURL( request, encoding );
}
return pagereq;
}
public String getForwardPage( HttpServletRequest req )
{
String jspPage = req.getParameter( "do" );
if( jspPage == null ) jspPage = "Wiki";
return jspPage+".jsp";
}
}
|
package com.eddysystems.eddy.engine;
import com.eddysystems.eddy.EddyThread;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiField;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.PsiShortNamesCache;
import com.intellij.util.Processor;
import com.intellij.util.indexing.IdFilter;
import org.jetbrains.annotations.NotNull;
import tarski.Items;
import tarski.JavaTrie.Generator;
import java.util.ArrayList;
import java.util.List;
class ItemGenerator implements Generator<Items.Item> {
static final int cacheSize = 10000;
final LRUCache<String, Items.Item[]> cache = new LRUCache<String, Items.Item[]>(cacheSize);
final Project project;
final GlobalSearchScope scope;
final PsiShortNamesCache psicache;
final IdFilter filter = new IdFilter() { @Override public boolean containsFileId(int id) { return true; } };
final Converter converter;
ItemGenerator(Project project, GlobalSearchScope scope, Converter conv) {
this.project = project;
this.scope = scope;
this.psicache = PsiShortNamesCache.getInstance(project);
converter = conv;
}
private Items.Item[] generate(String s) {
final EddyThread thread = EddyThread.getEddyThread();
final List<Items.Item> results = new ArrayList<Items.Item>();
final Processor<PsiClass> classProc = new Processor<PsiClass>() {
@Override
public boolean process(PsiClass cls) {
if (thread != null && thread.canceled())
return false;
results.add(converter.addClass(cls));
return true;
}
};
final Processor<PsiMethod> methodProc = new Processor<PsiMethod>() {
@Override
public boolean process(PsiMethod method) {
if (thread != null && thread.canceled())
return false;
results.add(converter.addMethod(method));
return true;
}
};
final Processor<PsiField> fieldProc = new Processor<PsiField>() {
@Override
public boolean process(PsiField fld) {
if (thread != null && thread.canceled())
return false;
results.add(converter.addField(fld));
return true;
}
};
if (thread != null) thread.pushSoftInterrupts();
try {
psicache.processClassesWithName(s, classProc, scope, filter);
psicache.processMethodsWithName(s, methodProc, scope, filter);
psicache.processFieldsWithName(s, fieldProc, scope, filter);
} finally {
if (thread != null) thread.popSoftInterrupts();
}
return results.toArray(new Items.Item[results.size()]);
}
@Override @NotNull
public Items.Item[] lookup(String s) {
Items.Item[] result = cache.get(s);
if (result != null)
return result;
else
result = generate(s);
// add to cache
cache.put(s, result);
return result;
}
}
|
package com.example.octoissues;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Intent;
import android.os.Build;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.widget.Button;
import android.widget.NumberPicker;
public class RepoSelectActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_ACTION_BAR);
setContentView(R.layout.repo_select_view);
ActionBar actionBar = getActionBar();
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
actionBar.setHomeButtonEnabled(true);
}
NumberPicker picker = (NumberPicker) findViewById(R.id.repoPicker);
String[] values = new String[]{"first", "second"};
picker.setMinValue(1);
picker.setMaxValue(2);
picker.setWrapSelectorWheel(false);
picker.setDisplayedValues(values);
picker.setValue(1);
Button signin = (Button) findViewById(R.id.view_issues);
signin.setTextSize(30);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.issues_menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_settings:
startActivity(new Intent(this, PrefsActivity.class));
return true;
case R.id.saved_issues:
return true;
}
return super.onOptionsItemSelected(item);
}
public void onClickSignIn(View view) {
Intent intent = new Intent(this, IssuesListActivity.class);
startActivity(intent);
}
}
|
package com.gilleland.george.homework;
import com.gilleland.george.utils.HomeworkAssignment;
import com.gilleland.george.utils.Menu;
import com.gilleland.george.utils.NotSortedException;
import com.gilleland.george.utils.Utils;
import java.io.File;
import java.io.FileNotFoundException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Random;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
public class Assignment3 extends HomeworkAssignment {
private static final Logger log = Logger.getLogger(Assignment3.class.getName());
public SortAndSearch sortAndSearch = new SortAndSearch();
public Random rand = new Random();
public ArrayList<String> dataset = new ArrayList<>();
private Scanner in = new Scanner(System.in);
@Override
public void run() {
System.out.println(Assignment3.class.getClassLoader().getResource("logging.properties"));
while (true) {
ArrayList<String> menu_choices = new ArrayList<String>() {{
add("Read");
add("Generate");
add("Print");
add("Sort");
add("Search");
}};
// Get the numeric choice, -1 if exit
int choice = menu_choices.indexOf(Menu.displayS(menu_choices));
if (choice >= 0) {
try {
// because I'm lazy and don't feel like cluttering my code. We'll
// dynamically call the method
this.getClass().getMethod(menu_choices.get(choice).toLowerCase()).invoke(this);
} catch (NoSuchMethodException e) {
System.out.println("I'm afraid I can't do that, Dave... Check the logs");
String msg = "Attempted to call an undefined method of " + choice;
String error = e.toString();
log.log(Level.FINE, msg, error);
} catch (InvocationTargetException e) {
System.out.println("There was an error processing your command. Please check the logs");
String error = e.getTargetException().toString();
String msg = "Invocation target exception. Invoked metohd " + choice + " threw an error of: " + error;
log.log(Level.FINE, msg, error);
} catch (IllegalAccessException e) {
System.out.println("I'm afraid I can't do that, Dave... Check the logs");
String msg = "Illegal access exception. Failed to access method " + choice;
String error = e.toString();
log.log(Level.FINE, msg, error);
}
} else {
break;
}
}
}
/**
* Reads in a list of strings and store them in {@link #dataset}
*/
public void read() {
System.out.println("Input a string to add to the dataset (note, this will append to the end of the data and it will be marked as unsorted!)");
System.out.println("When you are done, simply press enter without inputting anything else");
while (!this.in.hasNext("")) {
if (this.in.hasNextDouble() || this.in.hasNextLong() || this.in.hasNextFloat() || this.in.hasNextInt()) {
System.out.println("No numbers please!");
continue;
}
String input = this.in.nextLine();
if (!input.equals("")) {
this.dataset.add(input);
} else {
break;
}
}
}
/**
* Prints the current dataset and whether or not it's sorted
*/
public void print() {
System.out.println("The current data set:");
System.out.println(this.dataset.toString());
System.out.printf("It %s sorted\n", (this.sortAndSearch.is_sorted) ? "is" : "isn't");
}
/**
* Present the user with the option to sort a data set using three sorting methods
* TODO: Fix Merge sort
*/
public void sort() {
ArrayList<String> choices = new ArrayList<String>() {{
add("Merge");
add("Quick");
add("Shell");
}};
String choice = Menu.displayS(choices);
if (choice.equalsIgnoreCase("merge")) {
// do stuff for merge sortAndSearch
this.sortAndSearch.mergeSort(this.dataset);
} else if (choice.equalsIgnoreCase("quick")) {
// do stuff for quick sortAndSearch
this.sortAndSearch.quickSort(this.dataset);
} else if (choice.equalsIgnoreCase("shell")) {
this.sortAndSearch.shellSort(this.dataset);
}
// Sync the data sets
// TODO: Get rid of this data set maybe?
this.dataset = this.sortAndSearch.dataset;
}
/**
* Opens the scrabble_words.txt file and randomly generates between 7 and 15 words
* and adds them to the dataset. For the lazy user and programmer!
*
* @throws FileNotFoundException
*/
public void generate() throws FileNotFoundException {
this.dataset.clear();
this.sortAndSearch.is_sorted = false;
boolean unique;
int bound = rand.nextInt(16);
if (bound <= 6) {
bound = 10;
}
for (int i = 0; i < bound; i++) {
String choose = Utils.choose(new File(getClass().getResource("scrabble_words.txt").getPath()));
unique = true;
for (String element : this.dataset) {
// check for unique first character values
if (element.charAt(0) == choose.charAt(0)) {
// TODO: Make this more efficient
i--; // try again!
unique = false;
}
}
if (unique) {
this.dataset.add(choose);
}
}
}
/**
* Present the user with the options to search for a key in the data set
*/
public void search() {
ArrayList<String> choices = new ArrayList<String>() {{
add("Binary");
add("Interpolation");
}};
String choice = Menu.displayS(choices);
int position = -1;
try {
if (choice.equalsIgnoreCase("binary")) {
System.out.println("Enter a key to search for: ");
position = this.sortAndSearch.binarySearch(this.in.nextLine().charAt(0));
} else if (choice.equalsIgnoreCase("interpolation")) {
System.out.println("Enter a key to search for: ");
position = this.sortAndSearch.interpolationSearch(this.in.nextLine().charAt(0));
} else {
System.out.println("Invalid choice! Try again!");
return;
}
if (position >= 0) {
System.out.printf("Key was found at index %d with a value of %s. Yay!\n", position, this.sortAndSearch.dataset.get(position));
} else {
System.out.println("The key requested was not found in the data set!");
}
} catch (NotSortedException e) {
System.out.println("You have to sort the data set first! I recommend the quickSort function!");
System.out.println("Please sort the input and try again!");
}
}
}
|
package authzadmin;
import java.util.stream.Collectors;
import org.springframework.security.oauth2.provider.client.BaseClientDetails;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
public class OauthClientDetails extends BaseClientDetails {
public OauthClientDetails(OauthSettings oauthSettings) {
super(
oauthSettings.getConsumerKey(),
null,
CollectionUtils.isEmpty(oauthSettings.getScopes()) ? null : StringUtils.collectionToCommaDelimitedString(oauthSettings.getScopes().stream().map(Scope::getValue).collect(Collectors.toList())),
null,
null,
oauthSettings.getCallbackUrl()
);
setClientSecret(oauthSettings.getSecret());
}
}
|
package nl.mpi.kinnate.ui;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import nl.mpi.kinnate.KinTermSavePanel;
import nl.mpi.kinnate.kindata.VisiblePanelSetting.PanelType;
public class KinTermsMenu extends JMenu {
DiagramWindowManager diagramWindowManager;
KinTermSavePanel currentKinTermSavePanel;
JCheckBoxMenuItem hideShowMenu;
JMenuItem newMenu;
JMenuItem exportMenu;
JMenuItem importMenu;
JMenuItem deleteMenu;
public KinTermsMenu(DiagramWindowManager diagramWindowManager) {
this.diagramWindowManager = diagramWindowManager;
initMenu();
}
public KinTermsMenu() {
initMenu();
}
private void initMenu() {
hideShowMenu = new JCheckBoxMenuItem("Show");
newMenu = new JMenuItem("New Kin Term Group");
exportMenu = new JMenuItem("Export");
importMenu = new JMenuItem("Import");
deleteMenu = new JMenuItem("Delete");
this.setText("Kin Terms");
this.add(hideShowMenu);
this.add(newMenu);
this.add(importMenu);
this.add(exportMenu);
this.add(deleteMenu);
this.addMenuListener(new javax.swing.event.MenuListener() {
public void menuSelected(javax.swing.event.MenuEvent evt) {
// todo: check if a kin term pane is showing or associated with the diagram and that a diagam is showing
currentKinTermSavePanel = diagramWindowManager.getKinTermPanel();
if (currentKinTermSavePanel != null) {
if (currentKinTermSavePanel.getKinTermGroupCount() == 0) {
// if there are no kin term groups then present "new" not "show"
hideShowMenu.setSelected(false);
hideShowMenu.setEnabled(false);
exportMenu.setEnabled(false);
importMenu.setEnabled(false);
newMenu.setEnabled(true);
deleteMenu.setEnabled(false);
} else {
hideShowMenu.setEnabled(true);
if (!currentKinTermSavePanel.getPanelState(PanelType.KinTerms)) {
hideShowMenu.setSelected(false);
exportMenu.setEnabled(false);
importMenu.setEnabled(false);
newMenu.setEnabled(false);
deleteMenu.setEnabled(false);
// hideShowMenu.setText("Show");
} else {
hideShowMenu.setSelected(true);
exportMenu.setEnabled(true);
importMenu.setEnabled(true);
newMenu.setEnabled(true);
deleteMenu.setEnabled(false); // todo: Ticket #1063 enable deleting the current kin term group and update the menu to reflect the group name that would be deleted
// hideShowMenu.setText("Hide");
}
}
} else {
exportMenu.setEnabled(false);
importMenu.setEnabled(false);
hideShowMenu.setEnabled(false);
newMenu.setEnabled(false);
deleteMenu.setEnabled(false);
}
}
public void menuDeselected(javax.swing.event.MenuEvent evt) {
}
public void menuCanceled(javax.swing.event.MenuEvent evt) {
}
});
hideShowMenu.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
currentKinTermSavePanel.setPanelState(PanelType.KinTerms, hideShowMenu.isSelected());
}
});
exportMenu.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
currentKinTermSavePanel.exportKinTerms();
}
});
importMenu.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
currentKinTermSavePanel.importKinTerms();
}
});
newMenu.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
currentKinTermSavePanel.setPanelState(PanelType.KinTerms, true);
currentKinTermSavePanel.addKinTermGroup();
}
});
}
}
|
package cateam.teastory.item;
import java.util.List;
import javax.annotation.Nullable;
import cateam.teastory.achievement.AchievementLoader;
import cateam.teastory.block.BlockLoader;
import cateam.teastory.common.ConfigLoader;
import cateam.teastory.creativetab.CreativeTabsLoader;
import cateam.teastory.potion.PotionLoader;
import net.minecraft.block.Block;
import net.minecraft.block.SoundType;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.MobEffects;
import net.minecraft.item.EnumAction;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemSoup;
import net.minecraft.item.ItemStack;
import net.minecraft.potion.Potion;
import net.minecraft.potion.PotionEffect;
import net.minecraft.util.EnumActionResult;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.text.translation.I18n;
import net.minecraft.world.World;
public class BlackTea extends ItemTeaDrink
{
public BlackTea()
{
super("black_tea");
}
@Override
protected void onFoodEaten(ItemStack itemstack, World world, EntityPlayer entityplayer)
{
if(!world.isRemote)
{
int tier = itemstack.getItemDamage();
addPotion(tier, world, entityplayer);
}
}
public static void addPotion(int tier, World world, EntityPlayer entityplayer)
{
if (tier == 0)
{
entityplayer.addPotionEffect(new PotionEffect(MobEffects.STRENGTH, Math.max(0, ConfigLoader.TeaDrink_Time), 0));
if(world.rand.nextFloat() < 0.2F)
{
entityplayer.addPotionEffect(new PotionEffect(PotionLoader.PotionLifeDrain, Math.max(0, ConfigLoader.TeaDrink_Time) * 2, 0));
}
else if(world.rand.nextFloat() >= 0.8F)
{
entityplayer.addPotionEffect(new PotionEffect(MobEffects.HEALTH_BOOST, Math.max(0, ConfigLoader.TeaDrink_Time) * 2, 0));
}
}
else
{
entityplayer.addPotionEffect(new PotionEffect(MobEffects.STRENGTH, (int)(Math.max(0, ConfigLoader.TeaDrink_Time) * (10 + tier) / 10), tier - 1));
if(world.rand.nextFloat() < 0.2F)
{
entityplayer.addPotionEffect(new PotionEffect(PotionLoader.PotionLifeDrain, Math.max(0, ConfigLoader.TeaDrink_Time) * (10 + tier) / 10 * 2, tier - 1));
}
else if(world.rand.nextFloat() >= 0.8F)
{
entityplayer.addPotionEffect(new PotionEffect(MobEffects.HEALTH_BOOST, Math.max(0, ConfigLoader.TeaDrink_Time) * (10 + tier) / 10 * 2, tier - 1));
}
}
if (entityplayer.getRNG().nextInt() > 0.5F)
{
if (!entityplayer.inventory.addItemStackToInventory(new ItemStack(ItemLoader.tea_residue, 1, 1)))
{
world.spawnEntityInWorld(new EntityItem(world, entityplayer.posX + 0.5D, entityplayer.posY + 1.5D, entityplayer.posZ + 0.5D,
new ItemStack(ItemLoader.tea_residue, 1, 1)));
}
}
}
@Override
@Nullable
public ItemStack onItemUseFinish(ItemStack stack, World worldIn, EntityLivingBase entityLiving)
{
((EntityPlayer) entityLiving).addStat(AchievementLoader.blackTea);
return super.onItemUseFinish(stack, worldIn, entityLiving);
}
public Block getBlock(int meta)
{
switch(meta)
{
case 1:
return BlockLoader.blacktea_stone_cup;
case 2:
return BlockLoader.blacktea_glass_cup;
case 3:
return BlockLoader.blacktea_porcelain_cup;
default:
return BlockLoader.blacktea_wood_cup;
}
}
@Override
public EnumActionResult onItemUse(ItemStack stack, EntityPlayer playerIn, World worldIn, BlockPos pos, EnumHand hand, EnumFacing facing, float hitX, float hitY, float hitZ)
{
if (playerIn.isSneaking())
{
Block drinkblock = getBlock(stack.getItemDamage());
IBlockState iblockstate = worldIn.getBlockState(pos);
Block block = iblockstate.getBlock();
if (!block.isReplaceable(worldIn, pos))
{
pos = pos.offset(facing);
}
if (stack.stackSize != 0 && playerIn.canPlayerEdit(pos, facing, stack) && worldIn.canBlockBePlaced(drinkblock, pos, false, facing, (Entity)null, stack))
{
int i = this.getMetadata(stack.getMetadata());
IBlockState iblockstate1 = drinkblock.getStateForPlacement(worldIn, pos, facing, hitX, hitY, hitZ, i, playerIn, stack);
if (placeBlockAt(stack, playerIn, worldIn, pos, facing, hitX, hitY, hitZ, iblockstate1))
{
SoundType soundtype = worldIn.getBlockState(pos).getBlock().getSoundType(worldIn.getBlockState(pos), worldIn, pos, playerIn);
worldIn.playSound(playerIn, pos, soundtype.getPlaceSound(), SoundCategory.BLOCKS, (soundtype.getVolume() + 1.0F) / 2.0F, soundtype.getPitch() * 0.8F);
--stack.stackSize;
}
return EnumActionResult.SUCCESS;
}
else
{
return EnumActionResult.FAIL;
}
}
else
{
return EnumActionResult.FAIL;
}
}
public boolean placeBlockAt(ItemStack stack, EntityPlayer player, World world, BlockPos pos, EnumFacing side, float hitX, float hitY, float hitZ, IBlockState newState)
{
if (!world.setBlockState(pos, newState, 3)) return false;
IBlockState state = world.getBlockState(pos);
if (state.getBlock() == getBlock(stack.getItemDamage()))
{
ItemBlock.setTileEntityNBT(world, player, pos, stack);
getBlock(stack.getItemDamage()).onBlockPlacedBy(world, pos, state, player, stack);
}
return true;
}
}
|
package algorithms.graphs;
import algorithms.PermutationsWithAwait;
import algorithms.matrix.MatrixUtil;
import algorithms.misc.MiscMath0;
import algorithms.util.PairInt;
import gnu.trove.map.TIntIntMap;
import gnu.trove.map.TIntObjectMap;
import gnu.trove.map.TObjectIntMap;
import gnu.trove.map.hash.TIntIntHashMap;
import gnu.trove.set.TIntSet;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import thirdparty.HungarianAlgorithm;
public class ApproxGraphSearchZeng {
/**
* property used in calculating the edit distance. if edgesAreLabeled = true,
* the cost of edge insert, delete, and substitutions are added.
*/
private boolean edgesAreLabeled = false;
/**
* set the property edgesAreLabeled to true or false (default is false),
* to add the cost of edge insert, delete, and substitutions into edit distances.
*/
public void setEdgesAreLabeled(boolean labeled) {
this.edgesAreLabeled = labeled;
}
public List<Result> approxFullSearch(Graph q, List<Graph> db, int w,
boolean useAsFilterWithoutOptimal) throws InterruptedException {
List<Result> results = new ArrayList<Result>();
Graph dbi;
StarStructure[] sQ = StarStructure.createStarStructureMultiset(q);
StarStructure[] sg1, sg2;
int[][] a1, a2;
double[][] distM;
double lM, tau, rho, lambda;
int[] refinedAssign;
StarStructure s;
Set<PairInt> e1, e2;
Graph g;
boolean swappedSG;
Norm norm;
int i, k, rIdx;
for (int ii = 0; ii < db.size(); ++ii) {
dbi = db.get(ii);
sg1 = StarStructure.copy(sQ);
sg2 = StarStructure.createStarStructureMultiset(dbi);
// normalize sq1 and sg2 to have same cardinality for bipartite vertex assignments
norm = normalize(sg1, sg2);
sg1 = norm.sg1;
sg2 = norm.sg2;
swappedSG = norm.swapped;
a1 = createAdjacencyMatrix(sg1);
a2 = createAdjacencyMatrix(sg2);
e1 = getEdges(sg1);
e2 = getEdges(sg2);
// create cost matrix for bipartite assignments of vertexes in sg1 to sg2
if (this.edgesAreLabeled) {
distM = StarStructure.createDistanceMatrix(sg1, sg2);
} else {
distM = StarStructure.createDistanceMatrixV(sg1, sg2);
}
int[] assign = balancedBipartiteAssignment(distM);
int mappingDist = mappingDistance(sg1, sg2, assign);
lM = lowerBoundEditDistance(sg1, sg2, mappingDist);
if (lM > w) {
continue;
}
if (this.edgesAreLabeled) {
tau = suboptimalEditDistance(sg1, sg2, e1, e2, assign);
} else {
tau = suboptimalEditDistanceV(sg1, sg2, a1, a2, assign);
}
if (tau <= w) {
results.add(new Result(ii, Result.BOUND.SUBOPTIMAL, assign, tau));
continue;
}
refinedAssign = Arrays.copyOf(assign, assign.length);
rho = refinedSuboptimalEditDistance(sg1, sg2, e1, e2, a1, a2, refinedAssign, tau, distM);
if (rho <= w) {
results.add(new Result(ii, Result.BOUND.REFINED_SUBOPTIMAL, assign, rho));
continue;
}
if (useAsFilterWithoutOptimal){
// exponential runtime complexity:
lambda = optimalEditDistance(sg1, sg2, e1, e2, a1, a2, refinedAssign, tau);
if (lambda <= w) {
results.add(new Result(ii, Result.BOUND.OPTIMAL, assign, lambda));
}
} else {
results.add(new Result(ii, Result.BOUND.LOWER, assign, lM));
}
} // end loop over db graphs
return results;
}
public List<Graph> approxSubSearch(Graph q, List<Graph> db, int w) {
throw new UnsupportedOperationException("not yet implemented");
}
public List<Graph> approxSubSearchFilter(Graph q, List<Graph> db, int w) {
List<Graph> results = new ArrayList<Graph>();
Graph dbi;
StarStructure[] sQ = StarStructure.createStarStructureMultiset(q);
StarStructure[] sg1, sg2;
int[][] a1, a2;
double[][] distM;
double lM, tau, rho, lambda;
int[] refinedAssign;
StarStructure s;
Set<PairInt> e1, e2;
Graph g;
Norm norm;
boolean swappedSG;
int i, k, rIdx;
for (int ii = 0; ii < db.size(); ++ii) {
dbi = db.get(ii);
sg1 = StarStructure.copy(sQ);
sg2 = StarStructure.createStarStructureMultiset(dbi);
// normalize sq1 and sg2 to have same cardinality for bipartite vertex assignments
norm = normalize(sg1, sg2);
sg1 = norm.sg1;
sg2 = norm.sg2;
swappedSG = norm.swapped;
//TODO: expect dbi > q for sub-graph searches?
// if so, the re-ordering here is unnecessary
a1 = createAdjacencyMatrix(sg1);
a2 = createAdjacencyMatrix(sg2);
e1 = getEdges(sg1);
e2 = getEdges(sg2);
// create cost matrix for bipartite assignments of vertexes in sg1 to sg2
if (this.edgesAreLabeled) {
distM = StarStructure.createDistanceMatrixNoRelabeling(sg1, sg2);
} else {
distM = StarStructure.createDistanceMatrixNoRelabelingV(sg1, sg2);
}
int[] assign = balancedBipartiteAssignment(distM);
int mappingDist = mappingDistance(sg1, sg2, assign);
lM = lowerBoundEditDistance(sg1, sg2, mappingDist);
int l = Math.abs(e2.size() - e1.size()) + Math.abs(q.vLabels.size() - dbi.vLabels.size());
if (lM > (l + 2*w)) {
continue;
}
results.add(dbi);
// return filtered results for user to then make
// approx subgraph search, subgraph similarity search,
// exact subgraph search,
// inexact or error-correcting graph isomorphisms
// the subgraph isomorphism problem is NP-complete
//TODO: follow-up on edge relaxation in Grafil ([35], Yan et al.)
// Yan, F. Zhu, P. S. Yu, and J. Han.
// Feature-based similarity search in graph structures. ACM TODS, 31(4), 2006
} // end loop over db graphs
return results;
}
protected double suboptimalEditDistance(StarStructure[] sg1, StarStructure[] sg2,
Set<PairInt> e1, Set<PairInt> e2, int[] assignments) {
int n = sg1.length;
int costVSubst = 0;
int costEDelSubst = 0;
int costEIns = 0;
TIntIntMap revAssign = reverseAssignment(assignments);
// vertex relabeling
int i, j;
for (i = 0; i < assignments.length; ++i) {
j = assignments[i];
if (sg1[i].rootLabel != sg2[j].rootLabel) {
costVSubst++;
}
}
int i2, j2, i1r, j1r;
PairInt edge2;
// Edge deletion or relabeling
for (PairInt edge1 : e1) {
i = edge1.getX();
i2 = edge1.getY();
j = assignments[i];
j2 = assignments[i2];
if (j < j2) {
edge2 = new PairInt(j, j2);
} else {
edge2 = new PairInt(j2, j);
}
i1r = sg1[i].reverseOrigVIndexes.get(i2);
j1r = sg2[j].reverseOrigVIndexes.get(j2);
if (!e2.contains(edge2)) {
costEDelSubst++;
} else if (i1r >= sg1[i].eLabels.length||j1r >= sg2[j].eLabels.length) {
costEDelSubst++;
} else {
int edgeLabel1 = sg1[i].eLabels[i1r];
int edgeLabel2 = sg2[j].eLabels[j1r];
if (!e2.contains(edge2) || (edgeLabel1 != edgeLabel2)) {
costEDelSubst++;
}
}
}
//TODO: fix error(s) here
// Edge insertion
PairInt edge1;
for (PairInt edge : e2) {
j = edge.getX();
j2 = edge.getY();
i = revAssign.get(j);
i2 = revAssign.get(j2);
if (i < i2) {
edge1 = new PairInt(i, 12);
} else {
edge1 = new PairInt(i2, i);
}
if (!e1.contains(edge1)) {
costEIns++;
}
}
int cost = costVSubst + costEDelSubst + costEIns;
System.out.printf("costs vSubst=%d, eDelSubst=%d, eIns=%d ==> c=%d\n",
costVSubst, costEDelSubst, costEIns, cost);
return cost;
}
protected double suboptimalEditDistanceV(StarStructure[] sg1, StarStructure[] sg2,
int[][] a1, int[][] a2, int[] assignments) {
// Zeng et al. 2009 which has vertex edits but not edge edits
int[][] p = MatrixUtil.createPermutationMatrix(assignments);
int[][] c = createLabelMatrix(sg1, sg2, assignments);
assert(c.length == p.length);
assert(c[0].length == p[0].length);
//C(g, h, P') = sum_(i|0:n-1) sum_(j|0:n-1) ( c[i][j]*p[i][j]
// + (1/2) || a1 - P*a2*P^T ||_1
//Assuming that the L1-norm here is the same convention as MatLab:
// For p-norm = 1, the L1-norm is the maximum absolute column sum of the matrix.
// ||X||_1 = max sum for an arg j where (0<=j<=n-1) sum_(i=0 to n-1) ( |a[i][j] )
int[][] pA2PT = MatrixUtil.multiply(p,
MatrixUtil.multiply(a2, MatrixUtil.transpose(p)));
double term2 = 0.5*MatrixUtil.lp1Norm(
MatrixUtil.elementwiseSubtract(a1, pA2PT));
//this is a large term, summed from labels being equal.
//origins are BLP paper by Justice & Hero, eqn(18).
double term1 = 0;
int i, j;
for (i = 0; i < c.length; ++i) {
for (j = 0; j < c[i].length; ++j) {
term1 += c[i][j] * p[i][j];
}
}
//System.out.printf("term1=%.2f term2=%.2f\n", term1, term2);
return term1 + term2;
}
protected double lowerBoundEditDistance(StarStructure[] sg1, StarStructure[] sg2,
int mappingDist) {
int maxDeg1 = maxDegree(sg1);
int maxDeg2 = maxDegree(sg2);
double denom = Math.max(maxDeg1, maxDeg2) + 1;
double lM = mappingDist/denom;
return lM;
}
protected int mappingDistance(StarStructure[] sg1, StarStructure[] sg2,
int[] assignments) {
// usign the assignments, sum the edit distances.
int sum = 0, i, d;
StarStructure s1, s2;
for (i = 0; i < assignments.length; ++i) {
s1 = sg1[i];
s2 = sg2[assignments[i]];
if (this.edgesAreLabeled) {
d = StarStructure.calculateEditDistance(s1, s2);
} else {
d = StarStructure.calculateEditDistanceV(s1, s2);
}
sum += d;
//System.out.printf("i=%d ed=%d sum=%d\n", i, d, sum);
}
return sum;
}
/**
* find the maximum degree of a vertex for the graph g.
* @param sg
* @return
*/
private int maxDegree(StarStructure[] sg) {
int max = 0, deg;
for (int i = 0; i < sg.length; ++i) {
deg = sg[i].vLabels.length;
if (deg > max) {
max = deg;
}
}
return max;
}
static int[][] createAdjacencyMatrix(StarStructure[] s) {
int nV = s.length;
int[][] a = new int[nV][];
StarStructure si;
int uIdx, vIdx, j;
for (int i = 0; i < nV; ++i) {
a[i] = new int[nV];
si = s[i];
uIdx = si.rootIdx;
for (j = 0; j < si.vLabels.length; ++j) {
vIdx = si.origVIndexes[j];
a[uIdx][vIdx] = 1;
}
}
return a;
}
static Set<PairInt> getEdges(StarStructure[] s) {
int nV = s.length;
Set<PairInt> edges = new HashSet<PairInt>();
StarStructure si;
int uIdx, vIdx, j;
for (int i = 0; i < nV; ++i) {
si = s[i];
uIdx = si.rootIdx;
for (j = 0; j < si.vLabels.length; ++j) {
vIdx = si.origVIndexes[j];
if (uIdx < vIdx) {
edges.add(new PairInt(uIdx, vIdx));
} else {
edges.add(new PairInt(vIdx, uIdx));
}
}
}
return edges;
}
private int[][] createLabelMatrix(StarStructure[] sg1, StarStructure[] sg2,
int[] assignments) {
int[][] c = new int[sg1.length][];
int i, j;
for (i = 0; i < sg1.length; ++i) {
c[i] = new int[sg2.length];
//NLK, changing the fill values from 0 to 1 because the non-matches will cost 1
Arrays.fill(c[i], 1);
}
for (i = 0; i < assignments.length; ++i) {
j = assignments[i];
if (sg1[i].rootLabel == sg2[j].rootLabel) {
//NLK, changing to not penalize if vertex labels match
//c[i][j] = 1;
c[i][j] = 0;
}
}
return c;
}
/**
* calculates the refined suboptimal edit distance by swapping the
* vertex assignments and calculating the
* suboptimal distance, keeping the permuted assignments that result in
* the smallest edit distance.
* @param sg1 star structures for graph g1
* @param sg2 star structures for graph g2
* @param e1
* @param e2
* @param a1 adjacency matrix for graph g1
* @param a2 adjacency matrix for graph g2
* @param refinedAssign input initial vertex assignments and output
* refined vertex assignments.
* @param tau the sub-optimal edit distance C(g,h,P) where
* P is formed from the given assignments in refinedAssign.
* @param distM cost matrix for bipartite assignments of vertexes in sg1 to sg2
* @return
*/
protected double refinedSuboptimalEditDistance(StarStructure[] sg1, StarStructure[] sg2,
Set<PairInt> e1, Set<PairInt> e2, int[][] a1, int[][] a2,
int[] refinedAssign, double tau, double[][] distM) {
int[] assign = Arrays.copyOf(refinedAssign, refinedAssign.length);
TIntIntMap revAssign = reverseAssignment(assign);
double min = tau;
int iV1, iV2, jV1, jV2, iV3, jV1Adj, jj;
PairInt pV;
int[] jV1AdjIdxs;
for (PairInt edge1 : e1) {
iV1 = edge1.getX();
iV2 = edge1.getY();
jV1 = assign[iV1];
jV2 = assign[iV2];
if (jV1 < jV2) {
pV = new PairInt(jV1, jV2);
} else {
pV = new PairInt(jV2, jV1);
}
if (e2.contains(pV)) {
continue;
}
jV1AdjIdxs = sg2[jV1].origVIndexes;
for (jj = 0; jj < jV1AdjIdxs.length; ++jj) {
jV1Adj = jV1AdjIdxs[jj];
iV3 = revAssign.get(jV1Adj);
// tentative changes to assign
assign[iV2] = jV1Adj;
assign[iV3] = jV2;
if (this.edgesAreLabeled) {
tau = suboptimalEditDistance(sg1, sg2, e1, e2, assign);
} else {
tau = suboptimalEditDistanceV(sg1, sg2, a1, a2, assign);
}
if (tau < min) {
min = tau;
System.arraycopy(assign, 0, refinedAssign, 0, assign.length);
revAssign.put(jV1Adj, iV2);
revAssign.put(jV2, iV3);
break;
} else {
//restore assign to latest refineAssign
System.arraycopy(refinedAssign, 0, assign, 0, assign.length);
}
}
}
return min;
}
/**
* calculates the optimal edit distance by permuting the vertex assignments
* to find the one which results in the minimum C(g, h, P).
* (see section 4.3 of Zeng et al. 2009).
* the runtime complexity is n!.
* @param sg1 star structures for graph g1
* @param sg2 star structures for graph g2
* @param e1
* @param e2
* @param a1 adjacency matrix for graph g1
* @param a2 adjacency matrix for graph g2
* @param refinedAssign input initial vertex assignments and output
* refined vertex assignments.
* @param tau the sub-optimal edit distance C(g,h,P) where
* P is formed from the given assignments in refinedAssign.
* @return
* @throws java.lang.InterruptedException exception thrown if thread is
* interrupted. The permutation code is running in a separate thread using
* a semaphore model to pause and continue execution.
*/
protected double optimalEditDistance(StarStructure[] sg1, StarStructure[] sg2,
Set<PairInt> e1, Set<PairInt> e2, int[][] a1, int[][] a2,
int[] refinedAssign, double tau) throws InterruptedException {
int[] assign = new int[refinedAssign.length];
double min = tau;
PermutationsWithAwait perm = new PermutationsWithAwait(Arrays.copyOf(refinedAssign, refinedAssign.length));
BigInteger np = MiscMath0.factorialBigInteger(refinedAssign.length);
while (np.compareTo(BigInteger.ZERO) > 0) {
perm.getNext(assign);
if (this.edgesAreLabeled) {
tau = suboptimalEditDistance(sg1, sg2, e1, e2, assign);
} else {
tau = suboptimalEditDistanceV(sg1, sg2, a1, a2, assign);
}
if (tau < min) {
min = tau;
System.arraycopy(assign, 0, refinedAssign, 0, assign.length);
}
np = np.subtract(BigInteger.ONE);
}
return min;
}
protected TIntIntMap reverseAssignment(int[] assign) {
TIntIntMap r = new TIntIntHashMap();
for (int i = 0; i < assign.length; ++i) {
r.put(assign[i], i);
}
return r;
}
static Norm normalize(StarStructure[] sg1, StarStructure[] sg2) {
Norm norm = new Norm();
// order so that sg1.length >= sg2.length
if (sg1.length < sg2.length) {
StarStructure[] tmp = sg1;
sg1 = sg2;
sg2 = tmp;
norm.swapped = true;
} else {
norm.swapped = false;
}
StarStructure s;
int k, i, rIdx;
if (sg1.length > sg2.length) {
k = sg1.length - sg2.length;
//insert k vertices to sg2 and set their labels to eps
StarStructure[] _sg2 = new StarStructure[sg1.length];
System.arraycopy(sg2, 0, _sg2, 0, sg2.length);
for (i = 0; i < k; ++i) {
rIdx = sg1.length - 1 + i;
s = new StarStructure(rIdx, StarStructure.eps,
new int[0], new int[0], new int[0]);
_sg2[sg2.length + i] = s;
}
sg2 = _sg2;
}
assert (sg1.length == sg2.length);
norm.sg1 = sg1;
norm.sg2 = sg2;
return norm;
}
static int[] balancedBipartiteAssignment(double[][] distM) {
HungarianAlgorithm ha = new HungarianAlgorithm();
int[][] hAssign = ha.computeAssignments(MatrixUtil.convertToFloat(distM));
assert(hAssign.length == distM.length);
int[] assign = new int[distM.length];
int i1, i2, i;
for (i = 0; i < hAssign.length; ++i) {
i1 = hAssign[i][0];
i2 = hAssign[i][1];
assign[i1] = i2;
}
return assign;
}
/**
* an undirected attributed graph
*/
public static class Graph {
public final TIntIntMap vLabels;
public final TObjectIntMap<PairInt> eLabels;
public final TIntObjectMap<TIntSet> adjMap;
/**
* construct a graph instance using given data structures. Note that this
* method copies by reference and does not copy by value into new
* data structures (can use MatrixUtil.copy() methods).
* @param adjMap
* @param vLabels
* @param eLabels
*/
public Graph(TIntObjectMap<TIntSet> adjMap, TIntIntMap vLabels, TObjectIntMap<PairInt> eLabels) {
this.vLabels = vLabels;
this.eLabels = eLabels;
this.adjMap = adjMap;
}
}
public static class Result {
public static enum BOUND {
LOWER, SUBOPTIMAL, REFINED_SUBOPTIMAL, OPTIMAL
}
/**
* the graph node assignment w.r.t. the query graph
*/
public final int[] assignment;
public final int dbGraphIndex;
public final double editCost;
public final BOUND bound;
public Result(int dbGraphIndex, BOUND bound, int[] assign, double editCost) {
this.bound = bound;
this.dbGraphIndex = dbGraphIndex;
this.assignment = Arrays.copyOf(assign, assign.length);
this.editCost = editCost;
}
}
static class Norm {
StarStructure[] sg1;
StarStructure[] sg2;
boolean swapped;
}
}
|
package ch.stola3;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* alias NonsenseWrapper just wraps the IssuePojo for Mustache...
*/
public class MustachePojoWrapper {
private List<IssuePojo> issueEvents;
public String currentTime;
public String repoOwner;
public String repoName;
public MustachePojoWrapper(List<IssuePojo> issueEvents) {
this.issueEvents = issueEvents;
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm");
currentTime = LocalDateTime.now().format(formatter).toString();
}
/**
* Get all IssueEvents.<br>
* <b>Order: newest top!</b>
*
* @return List<IssuePojo>
*/
public List<IssuePojo> issueEvents() {
return issueEvents;
}
/**
* Get all IssueEvents.<br>
* <b>Order: oldest entry top, newest bottom!</b>
*
* @return List<IssuePojo>
*/
public List<IssuePojo> issueEventsReverse() {
List<IssuePojo> reverse = new ArrayList<IssuePojo>(issueEvents);
Collections.reverse(reverse);
return reverse;
}
public int countIssueEvent() {
if (issueEvents == null) {
return 0;
} else {
return issueEvents.size();
}
}
/**
* @return
*/
public IssuePojo.IssueEventType[] getIssueEventLegend() {
return IssuePojo.IssueEventType.values();
}
}
|
package at.ac.tuwien.inso.service;
import java.util.List;
import javax.validation.constraints.NotNull;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize;
import at.ac.tuwien.inso.controller.lecturer.forms.AddCourseForm;
import at.ac.tuwien.inso.dto.CourseDetailsForStudent;
import at.ac.tuwien.inso.entity.Course;
import at.ac.tuwien.inso.entity.Lecturer;
import at.ac.tuwien.inso.entity.Student;
import at.ac.tuwien.inso.entity.Subject;
import at.ac.tuwien.inso.entity.SubjectForStudyPlan;
import at.ac.tuwien.inso.exception.ValidationException;
public interface CourseService {
/**
* this method returns all courses for the current semester with the specified name of a subject
* may start a new semester!
* the user needs to be authenticated
*
* @param name the name of a subject to search for, search strategy is NameLikeIgnoreCase
* @param pageable a spring pageable element
* @return
*/
@PreAuthorize("isAuthenticated()")
Page<Course> findCourseForCurrentSemesterWithName(@NotNull String name, Pageable pageable);
/**
* returns all courses of the given lecturer for the current semester
* may start a new semester!
* the user needs to be authenticated
*
* @param lecturer
* @return
*/
@PreAuthorize("isAuthenticated()")
List<Course> findCoursesForCurrentSemesterForLecturer(Lecturer lecturer);
/**
* this method saves a new course by the given AddCourseForm
* this method should also take care of tags that are contained by the form. if they are new and have not been in the system before, they should be created
* the user needs to be lecturer or admin
*
* @param form
* @return
*/
@PreAuthorize("hasAnyRole('ROLE_LECTURER', 'ROLE_ADMIN')")
Course saveCourse(AddCourseForm form);
@PreAuthorize("isAuthenticated()")
Course findOne(Long id);
@PreAuthorize("hasAnyRole('ROLE_LECTURER', 'ROLE_ADMIN')")
boolean remove(Long id) throws ValidationException;
@PreAuthorize("hasRole('STUDENT')")
boolean registerStudentForCourse(Course course);
@PreAuthorize("isAuthenticated()")
List<Course> findAllForStudent(Student student);
@PreAuthorize("isAuthenticated()")
List<Course> findCoursesForSubject(Subject subject);
@PreAuthorize("isAuthenticated()")
List<Course> findCoursesForSubjectAndCurrentSemester(Subject subject);
@PreAuthorize("hasRole('ROLE_STUDENT')")
void dismissCourse(Student student, Long courseId);
@PreAuthorize("isAuthenticated()")
Course unregisterStudentFromCourse(Student student, Long courseId);
@PreAuthorize("isAuthenticated()")
CourseDetailsForStudent courseDetailsFor(Student student, Long courseId);
@PreAuthorize("hasAnyRole('ROLE_LECTURER', 'ROLE_ADMIN')")
List<SubjectForStudyPlan> getSubjectForStudyPlanList(Course course);
@PreAuthorize("isAuthenticated()")
List<Course> findAllCoursesForCurrentSemester();
}
|
package com.algolia.search.saas;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.algolia.search.saas.APIClient.IndexQuery;
import android.app.Activity;
import android.os.AsyncTask;
/**
* Contains all the functions related to one index
* You should use APIClient.initIndex(indexName) to retrieve this object
*/
public class Index {
private APIClient client;
private String encodedIndexName;
private String indexName;
/**
* Index initialization (You should not call this initialized yourself)
*/
protected Index(APIClient client, String indexName) {
try {
this.client = client;
this.encodedIndexName = URLEncoder.encode(indexName, "UTF-8");
this.indexName = indexName;
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public String getIndexName() {
return indexName;
}
/**
* Add an object in this index
*
* @param obj the object to add.
*/
public JSONObject addObject(JSONObject obj) throws AlgoliaException {
return client.postRequest("/1/indexes/" + encodedIndexName, obj.toString(), false);
}
/**
* Add an object in this index
*
* @param obj the object to add.
* @param objectID an objectID you want to attribute to this object
* (if the attribute already exist the old object will be overwrite)
*/
public JSONObject addObject(JSONObject obj, String objectID) throws AlgoliaException {
try {
return client.putRequest("/1/indexes/" + encodedIndexName + "/" + URLEncoder.encode(objectID, "UTF-8"), obj.toString());
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
/**
* Add an object in this index asynchronously
*
* @param obj the object to add.
* The object is represented by an associative array
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void addObjectASync(JSONObject obj, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.AddObject, null, obj);
new ASyncIndexTask().execute(params);
}
/**
* Add an object in this index asynchronously
*
* @param obj the object to add.
* The object is represented by an associative array
* @param objectID an objectID you want to attribute to this object
* (if the attribute already exist the old object will be overwrite)
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void addObjectASync(JSONObject obj, String objectID, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.AddObject, objectID, obj);
new ASyncIndexTask().execute(params);
}
/**
* Custom batch
*
* @param actions the array of actions
* @throws AlgoliaException
*/
public JSONObject batch(JSONArray actions) throws AlgoliaException {
try {
JSONObject content = new JSONObject();
content.put("requests", actions);
return client.postRequest("/1/indexes/" + encodedIndexName + "/batch", content.toString(), false);
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Custom batch
*
* @param actions the array of actions
* @throws AlgoliaException
*/
public JSONObject batch(List<JSONObject> actions) throws AlgoliaException {
try {
JSONObject content = new JSONObject();
content.put("requests", actions);
return client.postRequest("/1/indexes/" + encodedIndexName + "/batch", content.toString(), false);
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Add several objects
*
* @param objects contains an array of objects to add.
*/
public JSONObject addObjects(List<JSONObject> objects) throws AlgoliaException {
try {
JSONArray array = new JSONArray();
for (JSONObject obj : objects) {
JSONObject action = new JSONObject();
action.put("action", "addObject");
action.put("body",obj);
array.put(action);
}
return batch(array);
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Add several objects asynchronously
*
* @param objects contains an array of objects to add. If the object contains an objectID
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void addObjectsASync(List<JSONObject> objects, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.AddObjects, objects);
new ASyncIndexTask().execute(params);
}
/**
* Add several objects
*
* @param objects contains an array of objects to add.
*/
public JSONObject addObjects(JSONArray inputArray) throws AlgoliaException {
try {
JSONArray array = new JSONArray();
for(int n = 0; n < inputArray.length(); n++)
{
JSONObject action = new JSONObject();
action.put("action", "addObject");
action.put("body", inputArray.getJSONObject(n));
array.put(action);
}
return batch(array);
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Add several objects asynchronously
*
* @param objects contains an array of objects to add. If the object contains an objectID
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void addObjectsASync(JSONArray objects, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.AddObjects2, objects);
new ASyncIndexTask().execute(params);
}
/**
* Get an object from this index
*
* @param objectID the unique identifier of the object to retrieve
*/
public JSONObject getObject(String objectID) throws AlgoliaException {
try {
return client.getRequest("/1/indexes/" + encodedIndexName + "/" + URLEncoder.encode(objectID, "UTF-8"), false);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
/**
* Get an object from this index
*
* @param objectID the unique identifier of the object to retrieve
* @param attributesToRetrieve, contains the list of attributes to retrieve.
*/
public JSONObject getObject(String objectID, List<String> attributesToRetrieve) throws AlgoliaException {
try {
StringBuilder params = new StringBuilder();
params.append("?attributes=");
for (int i = 0; i < attributesToRetrieve.size(); ++i) {
if (i > 0)
params.append(",");
params.append(URLEncoder.encode(attributesToRetrieve.get(i), "UTF-8"));
}
return client.getRequest("/1/indexes/" + encodedIndexName + "/" + URLEncoder.encode(objectID, "UTF-8") + params.toString(), false);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
/**
* Get an object from this index asynchronously
*
* @param objectID the unique identifier of the object to retrieve
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void getObjectASync(String objectID, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.GetObject, objectID, (List)null);
new ASyncIndexTask().execute(params);
}
/**
* Get an object from this index asynchronously
*
* @param objectID the unique identifier of the object to retrieve
* @param attributesToRetrieve, contains the list of attributes to retrieve as a string separated by ","
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void getObjectASync(String objectID, List<String> attributesToRetrieve, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.GetObject, objectID, attributesToRetrieve);
new ASyncIndexTask().execute(params);
}
/**
* Get several objects from this index
*
* @param objectIDs the array of unique identifier of objects to retrieve
* @throws AlgoliaException
*/
public JSONObject getObjects(List<String> objectIDs) throws AlgoliaException {
try {
JSONArray requests = new JSONArray();
for (String id : objectIDs) {
JSONObject request = new JSONObject();
request.put("indexName", this.indexName);
request.put("objectID", id);
requests.put(request);
}
JSONObject body = new JSONObject();
body.put("requests", requests);
return client.postRequest("/1/indexes/*/objects", body.toString(), true);
} catch (JSONException e){
throw new AlgoliaException(e.getMessage());
}
}
/**
* Get several objects from this index asynchronously
*
* @param objectIDs the array of unique identifier of objects to retrieve
* @throws AlgoliaException
*/
public void getObjectsASync(List<String> objectIDs, IndexListener listener) throws AlgoliaException {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.GetObjects, objectIDs);
new ASyncIndexTask().execute(params);
}
/**
* Update partially an object (only update attributes passed in argument)
*
* @param partialObject the object attributes to override
*/
public JSONObject partialUpdateObject(JSONObject partialObject, String objectID) throws AlgoliaException {
try {
return client.postRequest("/1/indexes/" + encodedIndexName + "/" + URLEncoder.encode(objectID, "UTF-8") + "/partial", partialObject.toString(), false);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
/**
* Update partially an object asynchronously (only update attributes passed in argument)
*
* @param partialObject the object attributes to override, the
* object must contains an objectID attribute
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void partialUpdateObjectASync(JSONObject partialObject, String objectID, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.PartialSaveObject, objectID, partialObject);
new ASyncIndexTask().execute(params);
}
/**
* Partially Override the content of several objects
*
* @param objects the array of objects to update (each object must contains an objectID attribute)
*/
public JSONObject partialUpdateObjects(JSONArray inputArray) throws AlgoliaException {
try {
JSONArray array = new JSONArray();
for(int n = 0; n < inputArray.length(); n++)
{
JSONObject obj = inputArray.getJSONObject(n);
JSONObject action = new JSONObject();
action.put("action", "partialUpdateObject");
action.put("objectID", obj.getString("objectID"));
action.put("body", obj);
array.put(action);
}
return batch(array);
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Override the content of several objects asynchronously
*
* @param objects contains an array of objects to update (each object must contains an objectID attribute)
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void partialUpdateObjectsASync(JSONArray objects, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.PartialSaveObjects2, objects);
new ASyncIndexTask().execute(params);
}
/**
* Partially Override the content of several objects
*
* @param objects the array of objects to update (each object must contains an objectID attribute)
*/
public JSONObject partialUpdateObjects(List<JSONObject> objects) throws AlgoliaException {
try {
JSONArray array = new JSONArray();
for (JSONObject obj : objects) {
JSONObject action = new JSONObject();
action.put("action", "partialUpdateObject");
action.put("objectID", obj.getString("objectID"));
action.put("body",obj);
array.put(action);
}
return batch(array);
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Partially Override the content of several objects asynchronously
*
* @param objects contains an array of objects to update (each object must contains an objectID attribute)
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void partialUpdateObjectsASync(List<JSONObject> objects, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.PartialSaveObjects, objects);
new ASyncIndexTask().execute(params);
}
/**
* Override the content of object
*
* @param object the object to save
*/
public JSONObject saveObject(JSONObject object, String objectID) throws AlgoliaException {
try {
return client.putRequest("/1/indexes/" + encodedIndexName + "/" + URLEncoder.encode(objectID, "UTF-8"), object.toString());
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
/**
* Override the content of object asynchronously
*
* @param object the object to save
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void saveObjectASync(JSONObject object, String objectID, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.SaveObject, objectID, object);
new ASyncIndexTask().execute(params);
}
/**
* Override the content of several objects
*
* @param objects an array of objects to update (each object must contains an objectID attribute)
*/
public JSONObject saveObjects(List<JSONObject> objects) throws AlgoliaException {
try {
JSONArray array = new JSONArray();
for (JSONObject obj : objects) {
JSONObject action = new JSONObject();
action.put("action", "updateObject");
action.put("objectID", obj.getString("objectID"));
action.put("body",obj);
array.put(action);
}
return batch(array);
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Override the content of several objects asynchronously
*
* @param objects contains an array of objects to update (each object must contains an objectID attribute)
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void saveObjectsASync(List<JSONObject> objects, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.SaveObjects, objects);
new ASyncIndexTask().execute(params);
}
/**
* Override the content of several objects
*
* @param objects contains an array of objects to update (each object must contains an objectID attribute)
*/
public JSONObject saveObjects(JSONArray inputArray) throws AlgoliaException {
try {
JSONArray array = new JSONArray();
for(int n = 0; n < inputArray.length(); n++)
{
JSONObject obj = inputArray.getJSONObject(n);
JSONObject action = new JSONObject();
action.put("action", "updateObject");
action.put("objectID", obj.getString("objectID"));
action.put("body", obj);
array.put(action);
}
return batch(array);
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Override the content of several objects asynchronously
*
* @param objects contains an array of objects to update (each object must contains an objectID attribute)
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void saveObjectsASync(JSONArray objects, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.SaveObjects2, objects);
new ASyncIndexTask().execute(params);
}
/**
* Delete an object from the index
*
* @param objectID the unique identifier of object to delete
*/
public JSONObject deleteObject(String objectID) throws AlgoliaException {
if (objectID.length() == 0 || objectID == null)
throw new AlgoliaException("Invalid objectID");
try {
return client.deleteRequest("/1/indexes/" + encodedIndexName + "/" + URLEncoder.encode(objectID, "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
/**
* Delete an object from the index asynchronously
*
* @param objectID the unique identifier of object to delete
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void deleteObjectASync(String objectID, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.DeleteObject, objectID, (List)null);
new ASyncIndexTask().execute(params);
}
/**
* Delete several objects
*
* @param objects the array of objectIDs to delete
*/
public JSONObject deleteObjects(List<String> objects) throws AlgoliaException {
try {
JSONArray array = new JSONArray();
for (String id : objects) {
JSONObject obj = new JSONObject();
obj.put("objectID", id);
JSONObject action = new JSONObject();
action.put("action", "deleteObject");
action.put("body",obj);
array.put(action);
}
return batch(array);
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Delete several objects
*
* @param objects the array of objectIDs to delete
*/
public JSONObject deleteObjects2(List<JSONObject> objects) throws AlgoliaException {
try {
JSONArray array = new JSONArray();
for (JSONObject obj : objects) {
JSONObject action = new JSONObject();
action.put("action", "deleteObject");
action.put("body", obj);
array.put(action);
}
return batch(array);
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Delete several objects asynchronously
*
* @param objects the array of objectIDs to delete
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void deleteObjectsASync(List<String> ids, IndexListener listener) throws AlgoliaException {
List<JSONObject> objects = new ArrayList<JSONObject>();
for (String id : ids) {
try {
objects.add(new JSONObject().put("objectID", id));
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.DeleteObjects, objects);
new ASyncIndexTask().execute(params);
}
/**
* Delete all objects matching a query
*
* @param query the query string
* @throws AlgoliaException
*/
public void deleteByQuery(Query query) throws AlgoliaException {
List<String> attributesToRetrieve = new ArrayList<String>();
attributesToRetrieve.add("objectID");
query.setAttributesToRetrieve(attributesToRetrieve);
query.setHitsPerPage(100);
JSONObject results = this.search(query);
try {
while (results.getInt("nbHits") != 0) {
List<String> objectIDs = new ArrayList<String>();
for (int i = 0; i < results.getJSONArray("hits").length(); ++i) {
JSONObject hit = results.getJSONArray("hits").getJSONObject(i);
objectIDs.add(hit.getString("objectID"));
}
JSONObject task = this.deleteObjects(objectIDs);
this.waitTask(task.getString("taskID"));
results = this.search(query);
}
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
}
}
/**
* Delete all objects matching a query asynchronously
*
* @param query the query string
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void deleteByQueryASync(Query query, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.DeleteByQuery, query);
new ASyncIndexTask().execute(params);
}
/**
* Search inside the index
*/
public JSONObject search(Query query) throws AlgoliaException {
String paramsString = query.getQueryString();
if (paramsString.length() > 0)
return client.getRequest("/1/indexes/" + encodedIndexName + "?" + paramsString, true);
else
return client.getRequest("/1/indexes/" + encodedIndexName, true);
}
/**
* Search inside the index asynchronously
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void searchASync(Query query, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, query);
new ASyncIndexTask().execute(params);
}
/**
* Wait the publication of a task on the server.
* All server task are asynchronous and you can check with this method that the task is published.
*
* @param taskID the id of the task returned by server
*/
public void waitTask(String taskID) throws AlgoliaException {
try {
while (true) {
JSONObject obj = client.getRequest("/1/indexes/" + encodedIndexName + "/task/" + URLEncoder.encode(taskID, "UTF-8"), true);
if (obj.getString("status").equals("published"))
return;
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
} catch (JSONException e) {
throw new AlgoliaException(e.getMessage());
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
/**
* Wait the publication of a task on the server asynchronously.
* All server task are asynchronous and you can check with this method that the task is published.
*
* @param taskID the id of the task returned by server
* @param timeBeforeRetry the time in milliseconds before retry (default = 100ms)
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void waitTaskASync(String taskID, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.WaitTask, taskID, (List)null);
new ASyncIndexTask().execute(params);
}
/**
* Get settings of this index
*/
public JSONObject getSettings() throws AlgoliaException {
return client.getRequest("/1/indexes/" + encodedIndexName + "/settings", false);
}
/**
* Get settings of this index asynchronously
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void getSettingsASync(IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.GetSettings, null, (List)null);
new ASyncIndexTask().execute(params);
}
/**
* Set settings for this index
*
* @param settigns the settings object that can contains :
* - minWordSizefor1Typo: (integer) the minimum number of characters to accept one typo (default = 3).
* - minWordSizefor2Typos: (integer) the minimum number of characters to accept two typos (default = 7).
* - hitsPerPage: (integer) the number of hits per page (default = 10).
* - attributesToRetrieve: (array of strings) default list of attributes to retrieve in objects.
* If set to null, all attributes are retrieved.
* - attributesToHighlight: (array of strings) default list of attributes to highlight.
* If set to null, all indexed attributes are highlighted.
* - attributesToSnippet**: (array of strings) default list of attributes to snippet alongside the number of words to return (syntax is attributeName:nbWords).
* By default no snippet is computed. If set to null, no snippet is computed.
* - attributesToIndex: (array of strings) the list of fields you want to index.
* If set to null, all textual and numerical attributes of your objects are indexed, but you should update it to get optimal results.
* This parameter has two important uses:
* - Limit the attributes to index: For example if you store a binary image in base64, you want to store it and be able to
* retrieve it but you don't want to search in the base64 string.
* - Control part of the ranking*: (see the ranking parameter for full explanation) Matches in attributes at the beginning of
* the list will be considered more important than matches in attributes further down the list.
* In one attribute, matching text at the beginning of the attribute will be considered more important than text after, you can disable
* this behavior if you add your attribute inside `unordered(AttributeName)`, for example attributesToIndex: ["title", "unordered(text)"].
* - attributesForFaceting: (array of strings) The list of fields you want to use for faceting.
* All strings in the attribute selected for faceting are extracted and added as a facet. If set to null, no attribute is used for faceting.
* - ranking: (array of strings) controls the way results are sorted.
* We have six available criteria:
* - typo: sort according to number of typos,
* - geo: sort according to decreassing distance when performing a geo-location based search,
* - proximity: sort according to the proximity of query words in hits,
* - attribute: sort according to the order of attributes defined by attributesToIndex,
* - exact: sort according to the number of words that are matched identical to query word (and not as a prefix),
* - custom: sort according to a user defined formula set in **customRanking** attribute.
* The standard order is ["typo", "geo", "proximity", "attribute", "exact", "custom"]
* - customRanking: (array of strings) lets you specify part of the ranking.
* The syntax of this condition is an array of strings containing attributes prefixed by asc (ascending order) or desc (descending order) operator.
* For example `"customRanking" => ["desc(population)", "asc(name)"]`
* - queryType: Select how the query words are interpreted, it can be one of the following value:
* - prefixAll: all query words are interpreted as prefixes,
* - prefixLast: only the last word is interpreted as a prefix (default behavior),
* - prefixNone: no query word is interpreted as a prefix. This option is not recommended.
* - highlightPreTag: (string) Specify the string that is inserted before the highlighted parts in the query result (default to "<em>").
* - highlightPostTag: (string) Specify the string that is inserted after the highlighted parts in the query result (default to "</em>").
* - optionalWords: (array of strings) Specify a list of words that should be considered as optional when found in the query.
*/
public JSONObject setSettings(JSONObject settings) throws AlgoliaException {
return client.putRequest("/1/indexes/" + encodedIndexName + "/settings", settings.toString());
}
/**
* Delete the index content without removing settings and index specific API keys.
*/
public JSONObject clearIndex() throws AlgoliaException {
return client.postRequest("/1/indexes/" + encodedIndexName + "/clear", "", false);
}
/**
* Set settings for this index asynchronously
* @param listener the listener that will receive the result or error. If the listener is an instance of Activity, the result will be received directly on UIthread
*/
public void setSettingsASync(JSONObject settings, IndexListener listener) {
ASyncIndexTaskParams params = new ASyncIndexTaskParams(listener, ASyncIndexTaskKind.SetSettings, null, settings);
new ASyncIndexTask().execute(params);
}
/**
* List all existing user keys with their associated ACLs
*/
public JSONObject listUserKeys() throws AlgoliaException {
return client.getRequest("/1/indexes/" + encodedIndexName + "/keys", false);
}
/**
* Get ACL of a user key
*/
public JSONObject getUserKeyACL(String key) throws AlgoliaException {
return client.getRequest("/1/indexes/" + encodedIndexName + "/keys/" + key, false);
}
/**
* Delete an existing user key
*/
public JSONObject deleteUserKey(String key) throws AlgoliaException {
return client.deleteRequest("/1/indexes/" + encodedIndexName + "/keys/" + key);
}
/**
* Create a new user key
*
* @param acls the list of ACL for this key. Defined by an array of strings that
* can contains the following values:
* - search: allow to search (https and http)
* - addObject: allows to add/update an object in the index (https only)
* - deleteObject : allows to delete an existing object (https only)
* - deleteIndex : allows to delete index content (https only)
* - settings : allows to get index settings (https only)
* - editSettings : allows to change index settings (https only)
*/
public JSONObject addUserKey(List<String> acls) throws AlgoliaException {
JSONArray array = new JSONArray(acls);
JSONObject jsonObject = new JSONObject();
try {
jsonObject.put("acl", array);
} catch (JSONException e) {
throw new RuntimeException(e);
}
return client.postRequest("/1/indexes/" + encodedIndexName + "/keys", jsonObject.toString(), false);
}
/**
* Update a user key
*
* @param acls the list of ACL for this key. Defined by an array of strings that
* can contains the following values:
* - search: allow to search (https and http)
* - addObject: allows to add/update an object in the index (https only)
* - deleteObject : allows to delete an existing object (https only)
* - deleteIndex : allows to delete index content (https only)
* - settings : allows to get index settings (https only)
* - editSettings : allows to change index settings (https only)
*/
public JSONObject updateUserKey(String key, List<String> acls) throws AlgoliaException {
JSONArray array = new JSONArray(acls);
JSONObject jsonObject = new JSONObject();
try {
jsonObject.put("acl", array);
} catch (JSONException e) {
throw new RuntimeException(e);
}
return client.putRequest("/1/indexes/" + encodedIndexName + "/keys/" + key, jsonObject.toString());
}
/**
* Create a new user key
*
* @param acls the list of ACL for this key. Defined by an array of strings that
* can contains the following values:
* - search: allow to search (https and http)
* - addObject: allows to add/update an object in the index (https only)
* - deleteObject : allows to delete an existing object (https only)
* - deleteIndex : allows to delete index content (https only)
* - settings : allows to get index settings (https only)
* - editSettings : allows to change index settings (https only)
* @param validity the number of seconds after which the key will be automatically removed (0 means no time limit for this key)
* @param maxQueriesPerIPPerHour Specify the maximum number of API calls allowed from an IP address per hour. Defaults to 0 (no rate limit).
* @param maxHitsPerQuery Specify the maximum number of hits this API key can retrieve in one call. Defaults to 0 (unlimited)
*/
public JSONObject addUserKey(List<String> acls, int validity, int maxQueriesPerIPPerHour, int maxHitsPerQuery) throws AlgoliaException {
JSONArray array = new JSONArray(acls);
JSONObject jsonObject = new JSONObject();
try {
jsonObject.put("acl", array);
jsonObject.put("validity", validity);
jsonObject.put("maxQueriesPerIPPerHour", maxQueriesPerIPPerHour);
jsonObject.put("maxHitsPerQuery", maxHitsPerQuery);
} catch (JSONException e) {
throw new RuntimeException(e);
}
return client.postRequest("/1/indexes/" + encodedIndexName + "/keys", jsonObject.toString(), false);
}
/**
* Update a user key
*
* @param acls the list of ACL for this key. Defined by an array of strings that
* can contains the following values:
* - search: allow to search (https and http)
* - addObject: allows to add/update an object in the index (https only)
* - deleteObject : allows to delete an existing object (https only)
* - deleteIndex : allows to delete index content (https only)
* - settings : allows to get index settings (https only)
* - editSettings : allows to change index settings (https only)
* @param validity the number of seconds after which the key will be automatically removed (0 means no time limit for this key)
* @param maxQueriesPerIPPerHour Specify the maximum number of API calls allowed from an IP address per hour. Defaults to 0 (no rate limit).
* @param maxHitsPerQuery Specify the maximum number of hits this API key can retrieve in one call. Defaults to 0 (unlimited)
*/
public JSONObject updateUserKey(String key, List<String> acls, int validity, int maxQueriesPerIPPerHour, int maxHitsPerQuery) throws AlgoliaException {
JSONArray array = new JSONArray(acls);
JSONObject jsonObject = new JSONObject();
try {
jsonObject.put("acl", array);
jsonObject.put("validity", validity);
jsonObject.put("maxQueriesPerIPPerHour", maxQueriesPerIPPerHour);
jsonObject.put("maxHitsPerQuery", maxHitsPerQuery);
} catch (JSONException e) {
throw new RuntimeException(e);
}
return client.putRequest("/1/indexes/" + encodedIndexName + "/keys/" + key, jsonObject.toString());
}
/**
* Browse all index content
*
* @param page Pagination parameter used to select the page to retrieve.
* Page is zero-based and defaults to 0. Thus, to retrieve the 10th page you need to set page=9
*/
public JSONObject browse(int page) throws AlgoliaException {
return client.getRequest("/1/indexes/" + encodedIndexName + "/browse?page=" + page, false);
}
/**
* Browse all index content
*
* @param page Pagination parameter used to select the page to retrieve.
* Page is zero-based and defaults to 0. Thus, to retrieve the 10th page you need to set page=9
* @param hitsPerPage: Pagination parameter used to select the number of hits per page. Defaults to 1000.
*/
public JSONObject browse(int page, int hitsPerPage) throws AlgoliaException {
return client.getRequest("/1/indexes/" + encodedIndexName + "/browse?page=" + page + "&hitsPerPage=" + hitsPerPage, false);
}
private enum ASyncIndexTaskKind
{
GetObject,
AddObject,
AddObjects,
AddObjects2,
SaveObject,
SaveObjects,
SaveObjects2,
PartialSaveObject,
PartialSaveObjects,
PartialSaveObjects2,
DeleteObject,
DeleteObjects,
DeleteByQuery,
WaitTask,
Query,
GetSettings,
SetSettings,
GetObjects
};
private static class ASyncIndexTaskParams
{
public IndexListener listener;
public Query query;
public ASyncIndexTaskKind kind;
public String objectID;
public List list;
public JSONObject objectContent;
public JSONArray objects2;
public List<String> attributesToRetrieve;
public ASyncIndexTaskParams(IndexListener listener, Query query) {
this.listener = listener;
this.query = query;
this.kind = ASyncIndexTaskKind.Query;
}
public ASyncIndexTaskParams(IndexListener listener, ASyncIndexTaskKind kind, String objectID, JSONObject content)
{
this.listener = listener;
this.kind = kind;
this.objectID = objectID;
this.objectContent = content;
}
public ASyncIndexTaskParams(IndexListener listener, ASyncIndexTaskKind kind, List<?> objects)
{
this.listener = listener;
this.kind = kind;
this.list = objects;
}
public ASyncIndexTaskParams(IndexListener listener, ASyncIndexTaskKind kind, Query query)
{
this.listener = listener;
this.kind = kind;
this.query = query;
}
public ASyncIndexTaskParams(IndexListener listener, ASyncIndexTaskKind kind, JSONArray objects)
{
this.listener = listener;
this.kind = kind;
this.objects2 = objects;
}
public ASyncIndexTaskParams(IndexListener listener, ASyncIndexTaskKind kind, String objectID, List<String> attributesToRetrieve)
{
this.listener = listener;
this.kind = kind;
this.objectID = objectID;
this.attributesToRetrieve = attributesToRetrieve;
}
}
private class ASyncIndexTask extends AsyncTask<ASyncIndexTaskParams, Void, Void> {
private void _sendResult(ASyncIndexTaskParams p, JSONObject res)
{
final ASyncIndexTaskParams fp = p;
final JSONObject fres = res;
if (p.listener instanceof Activity) {
((Activity)p.listener).runOnUiThread(new Runnable() {
public void run() {
_sendResultImpl(fp, fres);
}
});
} else {
_sendResultImpl(p, res);
}
}
private void _sendResultImpl(ASyncIndexTaskParams p, JSONObject res)
{
switch (p.kind) {
case AddObject:
p.listener.addObjectResult(Index.this, p.objectContent, res);
break;
case AddObjects:
p.listener.addObjectsResult(Index.this, p.list, res);
break;
case AddObjects2:
p.listener.addObjectsResult(Index.this, p.objects2, res);
break;
case WaitTask:
p.listener.waitTaskResult(Index.this, p.objectID);
break;
case SaveObject:
p.listener.saveObjectResult(Index.this, p.objectContent, p.objectID, res);
break;
case SaveObjects:
p.listener.saveObjectsResult(Index.this, p.list, res);
break;
case SaveObjects2:
p.listener.saveObjectsResult(Index.this, p.objects2, res);
break;
case DeleteObject:
p.listener.deleteObjectResult(Index.this, p.objectID, res);
break;
case PartialSaveObject:
p.listener.partialUpdateResult(Index.this, p.objectContent, p.objectID, res);
break;
case PartialSaveObjects:
p.listener.partialUpdateObjectsResult(Index.this, p.list, res);
break;
case PartialSaveObjects2:
p.listener.partialUpdateObjectsResult(Index.this, p.objects2, res);
break;
case DeleteObjects:
p.listener.deleteObjectsResult(Index.this, p.objects2, res);
break;
case DeleteByQuery:
p.listener.deleteByQueryResult(Index.this);
break;
case GetObject:
p.listener.getObjectResult(Index.this, p.objectID, res);
break;
case GetObjects:
p.listener.getObjectsResult(Index.this, p.list, res);
break;
case Query:
p.listener.searchResult(Index.this, p.query, res);
break;
case GetSettings:
p.listener.getSettingsResult(Index.this, res);
break;
case SetSettings:
p.listener.setSettingsResult(Index.this, p.objectContent, res);
break;
}
}
@Override
protected Void doInBackground(ASyncIndexTaskParams... params) {
ASyncIndexTaskParams p = params[0];
JSONObject res = null;
switch (p.kind) {
case AddObject:
try {
res = (p.objectID == null) ? addObject(p.objectContent) : addObject(p.objectContent, p.objectID);
} catch (AlgoliaException e) {
p.listener.addObjectError(Index.this, p.objectContent, e);
return null;
}
break;
case AddObjects:
try {
res = addObjects(p.list);
} catch (AlgoliaException e) {
p.listener.addObjectsError(Index.this, p.list, e);
return null;
}
break;
case AddObjects2:
try {
res = addObjects(p.objects2);
} catch (AlgoliaException e) {
p.listener.addObjectsError(Index.this, p.objects2, e);
return null;
}
break;
case WaitTask:
try {
waitTask(p.objectID);
} catch (AlgoliaException e) {
p.listener.waitTaskError(Index.this, p.objectID, e);
return null;
}
break;
case SaveObject:
try {
res = saveObject(p.objectContent, p.objectID);
} catch (AlgoliaException e) {
p.listener.saveObjectError(Index.this, p.objectContent, p.objectID, e);
return null;
}
break;
case SaveObjects:
try {
res = saveObjects(p.list);
} catch (AlgoliaException e) {
p.listener.saveObjectsError(Index.this, p.list, e);
return null;
}
break;
case SaveObjects2:
try {
res = saveObjects(p.objects2);
} catch (AlgoliaException e) {
p.listener.saveObjectsError(Index.this, p.objects2, e);
return null;
}
break;
case DeleteObject:
try {
res = deleteObject(p.objectID);
} catch (AlgoliaException e) {
p.listener.deleteObjectError(Index.this, p.objectID, e);
return null;
}
break;
case DeleteByQuery:
try {
deleteByQuery(p.query);
} catch (AlgoliaException e) {
p.listener.deleteByQueryError(Index.this, p.query, e);
return null;
}
break;
case PartialSaveObject:
try {
res = partialUpdateObject(p.objectContent, p.objectID);
} catch (AlgoliaException e) {
p.listener.partialUpdateError(Index.this, p.objectContent, p.objectID, e);
return null;
}
break;
case PartialSaveObjects:
try {
res = partialUpdateObjects(p.list);
} catch (AlgoliaException e) {
p.listener.partialUpdateObjectsError(Index.this, p.list, e);
return null;
}
break;
case PartialSaveObjects2:
try {
res = partialUpdateObjects(p.objects2);
} catch (AlgoliaException e) {
p.listener.partialUpdateObjectsError(Index.this, p.objects2, e);
return null;
}
break;
case DeleteObjects:
try {
res = deleteObjects2(p.list);
} catch (AlgoliaException e) {
p.listener.deleteObjectsError(Index.this, p.list, e);
return null;
}
break;
case GetObjects:
try {
res = getObjects(p.list);
} catch (AlgoliaException e) {
p.listener.getObjectsError(Index.this, p.list, e);
return null;
}
break;
case GetObject:
try {
if (p.attributesToRetrieve == null) {
res = getObject(p.objectID);
} else {
res = getObject(p.objectID, p.attributesToRetrieve);
}
} catch (AlgoliaException e) {
p.listener.getObjectError(Index.this, p.objectID, e);
return null;
}
break;
case Query:
try {
res = search(p.query);
} catch (AlgoliaException e) {
p.listener.searchError(Index.this, p.query, e);
return null;
}
break;
case GetSettings:
try {
res = getSettings();
} catch (AlgoliaException e) {
p.listener.getSettingsError(Index.this, e);
return null;
}
break;
case SetSettings:
try {
res = setSettings(p.objectContent);
} catch (AlgoliaException e) {
p.listener.setSettingsError(Index.this, p.objectContent, e);
return null;
}
break;
}
_sendResult(p, res);
return null;
}
@Override
protected void onPostExecute(Void result) {
}
@Override
protected void onPreExecute() {
}
@Override
protected void onProgressUpdate(Void... values) {
}
}
/**
* Perform a search with disjunctive facets generating as many queries as number of disjunctive facets
* @param query the query
* @param disjunctiveFacets the array of disjunctive facets
* @param refinements Map<String, List<String>> representing the current refinements
* ex: { "my_facet1" => ["my_value1", "my_value2"], "my_disjunctive_facet1" => ["my_value1", "my_value2"] }
* @throws AlgoliaException
*/
public JSONObject disjunctiveFaceting(Query query, List<String> disjunctiveFacets, Map<String, List<String>> refinements) throws AlgoliaException {
if (refinements == null) {
refinements = new HashMap<String, List<String>>();
}
HashMap<String, List<String>> disjunctiveRefinements = new HashMap<String, List<String>>();
for (Map.Entry<String, List<String>> elt : refinements.entrySet()) {
if (disjunctiveFacets.contains(elt.getKey())) {
disjunctiveRefinements.put(elt.getKey(), elt.getValue());
}
}
// build queries
List<IndexQuery> queries = new ArrayList<IndexQuery>();
// hits + regular facets query
StringBuilder filters = new StringBuilder();
boolean first_global = true;
for (Map.Entry<String, List<String>> elt : refinements.entrySet()) {
StringBuilder or = new StringBuilder();
or.append("(");
boolean first = true;
for (String val : elt.getValue()) {
if (disjunctiveRefinements.containsKey(elt.getKey())) {
// disjunctive refinements are ORed
if (!first) {
or.append(',');
}
first = false;
or.append(String.format("%s:%s", elt.getKey(), val));
} else {
if (!first_global) {
filters.append(',');
}
first_global = false;
filters.append(String.format("%s:%s", elt.getKey(), val));
}
}
// Add or
if (disjunctiveRefinements.containsKey(elt.getKey())) {
or.append(')');
if (!first_global) {
filters.append(',');
}
first_global = false;
filters.append(or.toString());
}
}
queries.add(new IndexQuery(this.indexName, new Query(query).setFacetFilters(filters.toString())));
// one query per disjunctive facet (use all refinements but the current one + hitsPerPage=1 + single facet
for (String disjunctiveFacet : disjunctiveFacets) {
filters = new StringBuilder();
first_global = true;
for (Map.Entry<String, List<String>> elt : refinements.entrySet()) {
if (disjunctiveFacet.equals(elt.getKey())) {
continue;
}
StringBuilder or = new StringBuilder();
or.append("(");
boolean first = true;
for (String val : elt.getValue()) {
if (disjunctiveRefinements.containsKey(elt.getKey())) {
// disjunctive refinements are ORed
if (!first) {
or.append(',');
}
first = false;
or.append(String.format("%s:%s", elt.getKey(), val));
} else {
if (!first_global) {
filters.append(',');
}
first_global = false;
filters.append(String.format("%s:%s", elt.getKey(), val));
}
}
// Add or
if (disjunctiveRefinements.containsKey(elt.getKey())) {
or.append(')');
if (!first_global) {
filters.append(',');
}
first_global = false;
filters.append(or.toString());
}
}
List<String> facets = new ArrayList<String>();
facets.add(disjunctiveFacet);
queries.add(new IndexQuery(this.indexName, new Query(query).setHitsPerPage(1).setAttributesToRetrieve(new ArrayList<String>()).setAttributesToHighlight(new ArrayList<String>()).setAttributesToSnippet(new ArrayList<String>()).setFacets(facets).setFacetFilters(filters.toString())));
}
JSONObject answers = this.client.multipleQueries(queries);
// aggregate answers
// first answer stores the hits + regular facets
try {
JSONArray results = answers.getJSONArray("results");
JSONObject aggregatedAnswer = results.getJSONObject(0);
JSONObject disjunctiveFacetsJSON = new JSONObject();
for (int i = 1; i < results.length(); ++i) {
JSONObject facets = results.getJSONObject(i).getJSONObject("facets");
Iterator<String> keys = facets.keys();
while(keys.hasNext()) {
String key = keys.next();
// Add the facet to the disjunctive facet hash
disjunctiveFacetsJSON.put(key, facets.getJSONObject(key));
// concatenate missing refinements
if (!disjunctiveRefinements.containsKey(key)) {
continue;
}
for (String refine : disjunctiveRefinements.get(key)) {
if (!disjunctiveFacetsJSON.getJSONObject(key).has(refine)) {
disjunctiveFacetsJSON.getJSONObject(key).put(refine, 0);
}
}
}
}
aggregatedAnswer.put("disjunctiveFacets", disjunctiveFacetsJSON);
return aggregatedAnswer;
} catch (JSONException e) {
throw new Error(e);
}
}
public JSONObject disjunctiveFaceting(Query query, List<String> disjunctiveFacets) throws AlgoliaException {
return disjunctiveFaceting(query, disjunctiveFacets, null);
}
}
|
package org.endeavourhealth.ui.endpoints;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.endeavourhealth.core.cache.ObjectMapperPool;
import org.endeavourhealth.core.data.admin.LibraryRepository;
import org.endeavourhealth.core.data.admin.models.ActiveItem;
import org.endeavourhealth.core.data.admin.models.Audit;
import org.endeavourhealth.core.data.admin.models.Item;
import org.endeavourhealth.core.data.config.ConfigurationRepository;
import org.endeavourhealth.core.data.config.models.ConfigurationResource;
import org.endeavourhealth.ui.framework.config.ConfigSerializer;
import org.endeavourhealth.ui.framework.config.models.RabbitmqManagement;
import org.endeavourhealth.core.security.annotations.RequiresAdmin;
import org.endeavourhealth.ui.json.*;
import org.endeavourhealth.core.security.SecurityUtils;
import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.*;
import javax.ws.rs.client.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import java.io.IOException;
import java.util.*;
@Path("/dashboard")
public final class DashboardEndpoint extends AbstractEndpoint {
private static final Logger LOG = LoggerFactory.getLogger(DashboardEndpoint.class);
private static final Random rnd = new Random();
private final HttpAuthenticationFeature rabbitAuth;
{
RabbitmqManagement authConfig = ConfigSerializer.getConfig().getRabbitmqManagement();
rabbitAuth = HttpAuthenticationFeature.basic(authConfig.getUsername(), authConfig.getPassword());
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("/getRecentDocuments")
public Response getRecentDocuments(@Context SecurityContext sc, @QueryParam("count") int count) throws Exception {
super.setLogbackMarkers(sc);
UUID userUuid = SecurityUtils.getCurrentUserId(sc);
UUID orgUuid = getOrganisationUuidFromToken(sc);
LOG.trace("getRecentDocuments {}", count);
List<JsonFolderContent> ret = new ArrayList<>();
LibraryRepository repository = new LibraryRepository();
Iterable<Audit> audit = repository.getAuditByOrgAndDateDesc(orgUuid);
for (Audit auditItem: audit) {
Iterable<ActiveItem> activeItems = repository.getActiveItemByAuditId(auditItem.getId());
for (ActiveItem activeItem: activeItems) {
if (activeItem.getIsDeleted()!=null && activeItem.getIsDeleted()==false) {
Item item = repository.getItemByKey(activeItem.getItemId(), activeItem.getAuditId());
JsonFolderContent content = new JsonFolderContent(activeItem, item, auditItem);
ret.add(content);
}
}
}
clearLogbackMarkers();
return Response
.ok()
.entity(ret)
.build();
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("/rabbitNodes")
public Response getRabbitNodes(@Context SecurityContext sc) throws Exception {
super.setLogbackMarkers(sc);
ConfigurationResource rabbitNodes = new ConfigurationRepository().getByKey(ConfigurationRepository.RABBIT_NODES);
List<JsonRabbitNode> ret = new ArrayList<>();
for (String node : rabbitNodes.getConfigurationData().split(",", -1))
ret.add(new JsonRabbitNode(node, 0));
clearLogbackMarkers();
return Response
.ok()
.entity(ret)
.build();
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("/rabbitNode/ping")
public Response pingRabbitNode(@Context SecurityContext sc, @QueryParam("address") String address) throws Exception {
super.setLogbackMarkers(sc);
Client client = ClientBuilder.newClient();
client.register(rabbitAuth);
WebTarget resource = client.target("http://"+address+"/api/cluster-name");
Invocation.Builder request = resource.request();
int ping = -1;
try {
long startTime = System.currentTimeMillis();
Response response = request.get();
long endTime = System.currentTimeMillis();
if (response.getStatusInfo().getFamily() == Response.Status.Family.SUCCESSFUL)
ping = Math.toIntExact(endTime - startTime);
response.close();
}
catch (Exception e) {
e.printStackTrace();
}
client.close();
JsonRabbitNode ret = new JsonRabbitNode(address, ping);
clearLogbackMarkers();
return Response
.ok()
.entity(ret)
.build();
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("/rabbitNode/queues")
public Response getRabbitQueues(@Context SecurityContext sc, @QueryParam("address") String address) throws Exception {
super.setLogbackMarkers(sc);
Client client = ClientBuilder.newClient();
client.register(rabbitAuth);
WebTarget resource = client.target("http://"+address+"/api/queues");
Invocation.Builder request = resource.request();
Response response = request.get();
String ret = null;
if (response.getStatusInfo().getFamily() == Response.Status.Family.SUCCESSFUL) {
ret = response.readEntity(String.class);
}
response.close();
client.close();
clearLogbackMarkers();
return Response
.ok()
.entity(ret)
.build();
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("/rabbitNode/exchanges")
public Response getRabbitExchanges(@Context SecurityContext sc, @QueryParam("address") String address) throws Exception {
super.setLogbackMarkers(sc);
Client client = ClientBuilder.newClient();
client.register(rabbitAuth);
WebTarget resource = client.target("http://"+address+"/api/exchanges");
Invocation.Builder request = resource.request();
Response response = request.get();
String ret = null;
if (response.getStatusInfo().getFamily() == Response.Status.Family.SUCCESSFUL) {
ret = response.readEntity(String.class);
}
response.close();
client.close();
clearLogbackMarkers();
return Response
.ok()
.entity(ret)
.build();
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("/rabbitNode/bindings")
public Response getRabbitBindings(@Context SecurityContext sc, @QueryParam("address") String address) throws Exception {
super.setLogbackMarkers(sc);
String ret = getRabbitBindingsJson(address);
clearLogbackMarkers();
return Response
.ok()
.entity(ret)
.build();
}
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("/rabbitNode/synchronize")
@RequiresAdmin
public Response synchronizeRabbit(@Context SecurityContext sc, @QueryParam("address") String address) throws Exception {
String[] pipelines = {"EdsInbound", "EdsProtocol", "EdsTransform", "EdsResponse", "EdsSubscriber"};
super.setLogbackMarkers(sc);
// Load current bindings
List<JsonRabbitBinding> currentBindings = getCurrentRabbitBindings(address);
// Load config
List<JsonRouteGroup> configuredBindings = getConfiguredBindings();
for (String pipeline : pipelines) {
// Declare (config) queues
declareAllQueues(address, pipeline, configuredBindings);
// Bind (config) queues to DLE
bindQueuesToExchange(address, pipeline + "-DLE", pipeline, configuredBindings);
// Remove all bindings from main exchange (DLE now routes to queues based on new config)
removeBindingsFromMainExchange(address, pipeline, currentBindings);
// Bind (config) to main exchange (main exchange now routes to queues based on new config)
bindQueuesToExchange(address, pipeline, pipeline, configuredBindings);
// Remove (config) bindings from DLE
removeBindingsFromDLEExchange(address, pipeline + "-DLE", pipeline, configuredBindings);
// Determine queues to remove (unbound)
// Wait for any unbound queues to drain
// Remove (now empty) unbound queues
// (Shutdown readers of unbound queues????)
// (Startup readers of queues without readers????)
}
String ret = getRabbitBindingsJson(address);
clearLogbackMarkers();
return Response
.ok(ret)
.build();
}
private String getRabbitBindingsJson(String address) {
String json = null;
Client client = ClientBuilder.newClient();
client.register(rabbitAuth);
WebTarget resource = client.target("http://"+address+"/api/bindings");
Invocation.Builder request = resource.request();
Response response = request.get();
if (response.getStatusInfo().getFamily() == Response.Status.Family.SUCCESSFUL) {
json = response.readEntity(String.class);
}
response.close();
client.close();
return json;
}
private List<JsonRabbitBinding> getCurrentRabbitBindings(String address) throws IOException {
List<JsonRabbitBinding> bindings = new ArrayList<>();
String json = getRabbitBindingsJson(address);
if (json != null) {
bindings = ObjectMapperPool.getInstance().readValue(json, new TypeReference<List<JsonRabbitBinding>>(){});
}
return bindings;
}
private List<JsonRouteGroup> getConfiguredBindings() throws IOException {
List<JsonRouteGroup> bindings = new ArrayList<>();
ConfigurationResource configurationResource = new ConfigurationRepository().getByKey(UUID.fromString("b9b14e26-5a52-4f36-ad89-f01e465c1361"));
if (configurationResource != null) {
bindings = ObjectMapperPool.getInstance().readValue(configurationResource.getConfigurationData(), new TypeReference<List<JsonRouteGroup>>(){});
}
return bindings;
}
private void declareAllQueues(String address, String queuePrefix, List<JsonRouteGroup> routeGroups) throws Exception {
Client client = ClientBuilder.newClient();
client.register(rabbitAuth);
for (JsonRouteGroup routeGroup : routeGroups) {
WebTarget resource = client.target("http://" + address + "/api/queues/%2f/"+queuePrefix + "-" + routeGroup.getRouteKey());
Invocation.Builder request = resource.request();
JsonRabbitQueueOptions optionsJson = new JsonRabbitQueueOptions();
optionsJson.setAuto_delete(false);
optionsJson.setDurable(true);
Response response = request.put(Entity.json(optionsJson));
if (response.getStatusInfo().getFamily() != Response.Status.Family.SUCCESSFUL) {
throw new Exception("Unable do declare the queue");
}
response.close();
}
client.close();
}
private void bindQueuesToExchange(String address, String exchange, String queuePrefix, List<JsonRouteGroup> routeGroups) throws Exception {
Client client = ClientBuilder.newClient();
client.register(rabbitAuth);
for (JsonRouteGroup routeGroup : routeGroups) {
WebTarget resource = client.target("http://" + address + "/api/bindings/%2f/e/"+exchange+"/q/"+queuePrefix + "-" + routeGroup.getRouteKey());
Invocation.Builder request = resource.request();
JsonRabbitBindingOptions optionsJson = new JsonRabbitBindingOptions();
optionsJson.setRouting_key(routeGroup.getRouteKey());
Response response = request.post(Entity.json(optionsJson));
if (response.getStatusInfo().getFamily() != Response.Status.Family.SUCCESSFUL) {
throw new Exception("Unable do declare the queue");
}
response.close();
}
client.close();
}
private void removeBindingsFromMainExchange(String address, String exchange, List<JsonRabbitBinding> currentBindings) throws Exception {
for (JsonRabbitBinding rabbitBinding : currentBindings) {
if (exchange.equals(rabbitBinding.getSource())) {
removeBindingFromExchange(address, exchange, rabbitBinding.getDestination(), rabbitBinding.getRouting_key());
}
}
}
private void removeBindingsFromDLEExchange(String address, String exchange, String queuePrefix, List<JsonRouteGroup> routeGroups) throws Exception {
for (JsonRouteGroup routeGroup : routeGroups) {
removeBindingFromExchange(address, exchange, queuePrefix+"-"+routeGroup.getRouteKey(), routeGroup.getRouteKey());
}
}
private void removeBindingFromExchange(String address, String exchange, String queue, String routingKey) throws Exception {
Client client = ClientBuilder.newClient();
client.register(rabbitAuth);
WebTarget resource = client.target("http://" + address + "/api/bindings/%2f/e/" + exchange + "/q/" + queue + "/"+routingKey);
Invocation.Builder request = resource.request();
Response response = request.delete();
if (response.getStatusInfo().getFamily() != Response.Status.Family.SUCCESSFUL) {
throw new Exception("Unable do declare the queue");
}
response.close();
}
}
|
package com.algolia.search.saas;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.List;
import org.json.JSONArray;
public class Query {
public enum QueryType
{
/// all query words are interpreted as prefixes.
PREFIX_ALL,
/// only the last word is interpreted as a prefix (default behavior).
PREFIX_LAST,
/// no query word is interpreted as a prefix. This option is not recommended.
PREFIX_NONE
}
public enum RemoveWordsType
{
/// when a query does not return any result, the final word will be removed until there is results. This option is particulary useful on e-commerce websites
REMOVE_LAST_WORDS,
/// when a query does not return any result, the first word will be removed until there is results. This option is useful on adress search.
REMOVE_FIRST_WORDS,
/// No specific processing is done when a query does not return any result.
REMOVE_NONE
}
protected List<String> attributes;
protected List<String> attributesToHighlight;
protected List<String> attributesToSnippet;
protected int minWordSizeForApprox1;
protected int minWordSizeForApprox2;
protected boolean getRankingInfo;
protected boolean ignorePlural;
protected boolean distinct;
protected boolean advancedSyntax;
protected int page;
protected int hitsPerPage;
protected String restrictSearchableAttributes;
protected String tags;
protected String numerics;
protected String insideBoundingBox;
protected String aroundLatLong;
protected boolean aroundLatLongViaIP;
protected String query;
protected QueryType queryType;
protected String optionalWords;
protected String facets;
protected String facetsFilter;
protected int maxNumberOfFacets;
protected boolean analytics;
protected boolean synonyms;
protected boolean replaceSynonyms;
protected boolean typoTolerance;
protected boolean allowTyposOnNumericTokens;
protected RemoveWordsType removeWordsIfNoResult;
public Query(String query) {
minWordSizeForApprox1 = 3;
minWordSizeForApprox2 = 7;
getRankingInfo = false;
ignorePlural = false;
distinct = false;
page = 0;
hitsPerPage = 20;
this.query = query;
queryType = QueryType.PREFIX_LAST;
maxNumberOfFacets = -1;
advancedSyntax = false;
analytics = synonyms = replaceSynonyms = typoTolerance = allowTyposOnNumericTokens = true;
removeWordsIfNoResult = RemoveWordsType.REMOVE_NONE;
}
public Query() {
minWordSizeForApprox1 = 3;
minWordSizeForApprox2 = 7;
getRankingInfo = false;
ignorePlural = false;
distinct = false;
page = 0;
hitsPerPage = 20;
queryType = QueryType.PREFIX_ALL;
maxNumberOfFacets = -1;
advancedSyntax = false;
analytics = synonyms = replaceSynonyms = typoTolerance = allowTyposOnNumericTokens = true;
removeWordsIfNoResult = RemoveWordsType.REMOVE_NONE;
}
/**
* Select the strategy to adopt when a query does not return any result.
*/
public Query removeWordsIfNoResult(RemoveWordsType type)
{
this.removeWordsIfNoResult = type;
return this;
}
/**
* List of object attributes you want to use for textual search (must be a subset of the attributesToIndex
* index setting). Attributes are separated with a comma (for example @"name,address").
* You can also use a JSON string array encoding (for example encodeURIComponent("[\"name\",\"address\"]")).
* By default, all attributes specified in attributesToIndex settings are used to search.
*/
public Query restrictSearchableAttributes(String attributes)
{
this.restrictSearchableAttributes = attributes;
return this;
}
/**
* Select how the query words are interpreted:
*/
public Query setQueryType(QueryType type)
{
this.queryType = type;
return this;
}
/**
* Set the full text query
*/
public Query setQueryString(String query)
{
this.query = query;
return this;
}
/**
* Specify the list of attribute names to retrieve.
* By default all attributes are retrieved.
*/
public Query setAttributesToRetrieve(List<String> attributes) {
this.attributes = attributes;
return this;
}
/**
* Specify the list of attribute names to highlight.
* By default indexed attributes are highlighted.
*/
public Query setAttributesToHighlight(List<String> attributes) {
this.attributesToHighlight = attributes;
return this;
}
/**
* Specify the list of attribute names to Snippet alongside the number of words to return (syntax is 'attributeName:nbWords').
* By default no snippet is computed.
*/
public Query setAttributesToSnippet(List<String> attributes) {
this.attributesToSnippet = attributes;
return this;
}
/**
*
* @param If set to true, enable the distinct feature (disabled by default) if the attributeForDistinct index setting is set.
* This feature is similar to the SQL "distinct" keyword: when enabled in a query with the distinct=1 parameter,
* all hits containing a duplicate value for the attributeForDistinct attribute are removed from results.
* For example, if the chosen attribute is show_name and several hits have the same value for show_name, then only the best
* one is kept and others are removed.
*/
public Query enableDistinct(boolean distinct) {
this.distinct = distinct;
return this;
}
/**
* @param If set to false, this query will not be taken into account in analytics feature. Default to true.
*/
public Query enableAnalytics(boolean enabled) {
this.analytics = enabled;
return this;
}
/**
* @param If set to false, this query will not use synonyms defined in configuration. Default to true.
*/
public Query enableSynonyms(boolean enabled) {
this.synonyms = enabled;
return this;
}
/**
* @param If set to false, words matched via synonyms expansion will not be replaced by the matched synonym in highlight result. Default to true.
*/
public Query enableReplaceSynonymsInHighlight(boolean enabled) {
this.replaceSynonyms = enabled;
return this;
}
/**
* @param If set to false, disable typo-tolerance. Default to true.
*/
public Query enableTypoTolerance(boolean enabled) {
this.typoTolerance = enabled;
return this;
}
/**
* Specify the minimum number of characters in a query word to accept one typo in this word.
* Defaults to 3.
*/
public Query setMinWordSizeToAllowOneTypo(int nbChars) {
minWordSizeForApprox1 = nbChars;
return this;
}
/**
* Specify the minimum number of characters in a query word to accept two typos in this word.
* Defaults to 7.
*/
public Query setMinWordSizeToAllowTwoTypos(int nbChars) {
minWordSizeForApprox2 = nbChars;
return this;
}
/**
* @param If set to false, disable typo-tolerance on numeric tokens. Default to true.
*/
public Query enableTyposOnNumericTokens(boolean enabled) {
this.allowTyposOnNumericTokens = enabled;
return this;
}
/**
* if set, the result hits will contain ranking information in _rankingInfo attribute.
*/
public Query getRankingInfo(boolean enabled) {
getRankingInfo = enabled;
return this;
}
/**
* If set to true, plural won't be considered as a typo (for example car/cars will be considered as equals). Default to false.
*/
public Query ignorePlural(boolean enabled) {
ignorePlural = enabled;
return this;
}
/**
* Set the page to retrieve (zero base). Defaults to 0.
*/
public Query setPage(int page) {
this.page = page;
return this;
}
/**
* Set the number of hits per page. Defaults to 10.
*/
public Query setHitsPerPage(int nbHitsPerPage) {
this.hitsPerPage = nbHitsPerPage;
return this;
}
/**
* Set the number of hits per page. Defaults to 10.
* @deprecated Use {@code setHitsPerPage}
*/
@Deprecated
public Query setNbHitsPerPage(int nbHitsPerPage) {
return setHitsPerPage(nbHitsPerPage);
}
/**
* Search for entries around a given latitude/longitude.
* @param radius set the maximum distance in meters.
* Note: at indexing, geoloc of an object should be set with _geoloc attribute containing lat and lng attributes (for example {"_geoloc":{"lat":48.853409, "lng":2.348800}})
*/
public Query aroundLatitudeLongitude(float latitude, float longitude, int radius) {
aroundLatLong = "aroundLatLng=" + latitude + "," + longitude + "&aroundRadius=" + radius;
return this;
}
/**
* Search for entries around a given latitude/longitude.
* @param radius set the maximum distance in meters.
* @param precision set the precision for ranking (for example if you set precision=100, two objects that are distant of less than 100m will be considered as identical for "geo" ranking parameter).
* Note: at indexing, geoloc of an object should be set with _geoloc attribute containing lat and lng attributes (for example {"_geoloc":{"lat":48.853409, "lng":2.348800}})
*/
public Query aroundLatitudeLongitude(float latitude, float longitude, int radius, int precision) {
aroundLatLong = "aroundLatLng=" + latitude + "," + longitude + "&aroundRadius=" + radius + "&aroundPrecision=" + precision;
return this;
}
/**
* Search for entries around the latitude/longitude of user (using IP geolocation)
* @param radius set the maximum distance in meters.
* Note: at indexing, geoloc of an object should be set with _geoloc attribute containing lat and lng attributes (for example {"_geoloc":{"lat":48.853409, "lng":2.348800}})
*/
public Query aroundLatitudeLongitudeViaIP(boolean enabled, int radius) {
aroundLatLong = "aroundRadius=" + radius;
aroundLatLongViaIP = enabled;
return this;
}
/**
* Search for entries around the latitude/longitude of user (using IP geolocation)
* @param radius set the maximum distance in meters.
* @param precision set the precision for ranking (for example if you set precision=100, two objects that are distant of less than 100m will be considered as identical for "geo" ranking parameter).
* Note: at indexing, geoloc of an object should be set with _geoloc attribute containing lat and lng attributes (for example {"_geoloc":{"lat":48.853409, "lng":2.348800}})
*/
public Query aroundLatitudeLongitudeViaIP(boolean enabled, int radius, int precision) {
aroundLatLong = "aroundRadius=" + radius + "&aroundPrecision=" + precision;
aroundLatLongViaIP = enabled;
return this;
}
/**
* Search for entries inside a given area defined by the two extreme points of a rectangle.
* At indexing, geoloc of an object should be set with _geoloc attribute containing lat and lng attributes (for example {"_geoloc":{"lat":48.853409, "lng":2.348800}})
*/
public Query insideBoundingBox(float latitudeP1, float longitudeP1, float latitudeP2, float longitudeP2) {
insideBoundingBox = "insideBoundingBox=" + latitudeP1 + "," + longitudeP1 + "," + latitudeP2 + "," + longitudeP2;
return this;
}
/**
* Set the list of words that should be considered as optional when found in the query.
* @param words The list of optional words, comma separated.
*/
public Query setOptionalWords(String words) {
this.optionalWords = words;
return this;
}
/**
* Set the list of words that should be considered as optional when found in the query.
* @param words The list of optional words.
*/
public Query setOptionalWords(List<String> words) {
StringBuilder builder = new StringBuilder();
for (String word : words) {
builder.append(word);
builder.append(",");
}
this.optionalWords = builder.toString();
return this;
}
/**
* Filter the query by a list of facets. Each filter is encoded as `attributeName:value`.
*/
public Query setFacetFilters(List<String> facets) {
JSONArray obj = new JSONArray();
for (String facet : facets) {
obj.put(facet);
}
this.facetsFilter = obj.toString();
return this;
}
public Query setFacetFilters(String facetsFilter) {
this.facetsFilter = facetsFilter;
return this;
}
/**
* List of object attributes that you want to use for faceting. <br/>
* Only attributes that have been added in **attributesForFaceting** index setting can be used in this parameter.
* You can also use `*` to perform faceting on all attributes specified in **attributesForFaceting**.
*/
public Query setFacets(List<String> facets) {
JSONArray obj = new JSONArray();
for (String facet : facets) {
obj.put(facet);
}
this.facets = obj.toString();
return this;
}
/**
* Limit the number of facet values returned for each facet.
*/
public Query setMaxNumberOfFacets(int n) {
this.maxNumberOfFacets = n;
return this;
}
/**
* Filter the query by a set of tags. You can AND tags by separating them by commas. To OR tags, you must add parentheses. For example tag1,(tag2,tag3) means tag1 AND (tag2 OR tag3).
* At indexing, tags should be added in the _tags attribute of objects (for example {"_tags":["tag1","tag2"]} )
*/
public Query setTagFilters(String tags) {
this.tags = tags;
return this;
}
/**
* Add a list of numeric filters separated by a comma.
* The syntax of one filter is `attributeName` followed by `operand` followed by `value. Supported operands are `<`, `<=`, `=`, `>` and `>=`.
* You can have multiple conditions on one attribute like for example `numerics=price>100,price<1000`.
*/
public Query setNumericFilters(String numerics) {
this.numerics = numerics;
return this;
}
/**
* Add a list of numeric filters separated by a comma.
* The syntax of one filter is `attributeName` followed by `operand` followed by `value. Supported operands are `<`, `<=`, `=`, `>` and `>=`.
* You can have multiple conditions on one attribute like for example `numerics=price>100,price<1000`.
*/
public Query setNumericFilters(List<String> numerics) {
StringBuilder builder = new StringBuilder();
boolean first = true;
for (String n : numerics) {
if (!first)
builder.append(",");
builder.append(n);
first = false;
}
this.numerics = builder.toString();
return this;
}
/**
* Enable the advanced query syntax. Defaults to false.
* - Phrase query: a phrase query defines a particular sequence of terms.
* A phrase query is build by Algolia's query parser for words surrounded by ".
* For example, "search engine" will retrieve records having search next to engine only.
* Typo-tolerance is disabled on phrase queries.
* - Prohibit operator: The prohibit operator excludes records that contain the term after the - symbol.
* For example search -engine will retrieve records containing search but not engine.
*/
public Query enableAvancedSyntax(boolean advancedSyntax) {
this.advancedSyntax = advancedSyntax;
return this;
}
protected String getQueryString() {
StringBuilder stringBuilder = new StringBuilder();
try {
if (attributes != null) {
stringBuilder.append("attributes=");
boolean first = true;
for (String attr : this.attributes) {
if (!first)
stringBuilder.append(",");
stringBuilder.append(URLEncoder.encode(attr, "UTF-8"));
first = false;
}
}
if (attributesToHighlight != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("attributesToHighlight=");
boolean first = true;
for (String attr : this.attributesToHighlight) {
if (!first)
stringBuilder.append(',');
stringBuilder.append(URLEncoder.encode(attr, "UTF-8"));
first = false;
}
}
if (attributesToSnippet != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("attributesToSnippet=");
boolean first = true;
for (String attr : this.attributesToSnippet) {
if (!first)
stringBuilder.append(',');
stringBuilder.append(URLEncoder.encode(attr, "UTF-8"));
first = false;
}
}
if (!typoTolerance) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("typoTolerance=false");
}
if (!allowTyposOnNumericTokens) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("allowTyposOnNumericTokens=false");
}
if (minWordSizeForApprox1 != 3) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("minWordSizefor1Typo=");
stringBuilder.append(minWordSizeForApprox1);
}
if (minWordSizeForApprox2 != 7) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("minWordSizefor2Typos=");
stringBuilder.append(minWordSizeForApprox2);
}
switch (removeWordsIfNoResult) {
case REMOVE_LAST_WORDS:
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("removeWordsIfNoResult=LastWords");
break;
case REMOVE_FIRST_WORDS:
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("removeWordsIfNoResult=FirstWords");
break;
case REMOVE_NONE:
break;
}
if (getRankingInfo) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("getRankingInfo=1");
}
if (ignorePlural) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("ignorePlural=true");
}
if (!analytics) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("analytics=0");
}
if (!synonyms) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("synonyms=0");
}
if (!replaceSynonyms) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("replaceSynonymsInHighlight=0");
}
if (distinct) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("distinct=1");
}
if (advancedSyntax) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("advancedSyntax=1");
}
if (page > 0) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("page=");
stringBuilder.append(page);
}
if (hitsPerPage != 20 && hitsPerPage > 0) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("hitsPerPage=");
stringBuilder.append(hitsPerPage);
}
if (tags != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("tagFilters=");
stringBuilder.append(URLEncoder.encode(tags, "UTF-8"));
}
if (numerics != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("numericFilters=");
stringBuilder.append(URLEncoder.encode(numerics, "UTF-8"));
}
if (insideBoundingBox != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append(insideBoundingBox);
} else if (aroundLatLong != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append(aroundLatLong);
}
if (aroundLatLongViaIP) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("aroundLatLngViaIP=true");
}
if (query != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("query=");
stringBuilder.append(URLEncoder.encode(query, "UTF-8"));
}
if (facets != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("facets=");
stringBuilder.append(URLEncoder.encode(facets, "UTF-8"));
}
if (facetsFilter != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("facetFilters=");
stringBuilder.append(URLEncoder.encode(facetsFilter, "UTF-8"));
}
if (maxNumberOfFacets > 0) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("maxNumberOfFacets=");
stringBuilder.append(maxNumberOfFacets);
}
if (optionalWords != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("optionalWords=");
stringBuilder.append(URLEncoder.encode(optionalWords, "UTF-8"));
}
if (restrictSearchableAttributes != null) {
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("restrictSearchableAttributes=");
stringBuilder.append(URLEncoder.encode(restrictSearchableAttributes, "UTF-8"));
}
switch (queryType) {
case PREFIX_ALL:
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("queryType=prefixAll");
break;
case PREFIX_LAST:
break;
case PREFIX_NONE:
if (stringBuilder.length() > 0)
stringBuilder.append('&');
stringBuilder.append("queryType=prefixNone");
break;
}
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
return stringBuilder.toString();
}
}
|
package br.com.dbsoft.ui.core;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.util.HashMap;
import java.util.Map;
import javax.faces.context.FacesContext;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import br.com.dbsoft.core.DBSSDK.CONTENT_TYPE;
import br.com.dbsoft.message.IDBSMessageBase.MESSAGE_TYPE;
import br.com.dbsoft.util.DBSDate;
import br.com.dbsoft.util.DBSFile;
import br.com.dbsoft.util.DBSFormat;
import br.com.dbsoft.util.DBSHttp;
import br.com.dbsoft.util.DBSNumber;
import net.sf.jasperreports.engine.JRException;
import net.sf.jasperreports.engine.JasperCompileManager;
import net.sf.jasperreports.engine.JasperExportManager;
import net.sf.jasperreports.engine.JasperFillManager;
import net.sf.jasperreports.engine.JasperPrint;
import net.sf.jasperreports.engine.data.JRBeanCollectionDataSource;
import net.sf.jasperreports.engine.export.JRPdfExporter;
import net.sf.jasperreports.engine.export.JRPdfExporterParameter;
import net.sf.jasperreports.engine.export.JRXlsExporterParameter;
import net.sf.jasperreports.engine.export.JRXmlExporter;
import net.sf.jasperreports.engine.export.JRXmlExporterParameter;
import net.sf.jasperreports.engine.export.ooxml.JRXlsxExporter;
public class DBSReportFormUtil {
private static Logger wLogger = Logger.getLogger(DBSReportFormUtil.class);
//Os jrxml devem estar na pasta src/main/resources/reports e deve existir uma pasta report a partir da pasta raiz web.
private static final String REPORT_FOLDER = "reports" + File.separator;
private static final String SUBREPORT_SUFIX = "_subReport";
public static JasperPrint createJasperPrint(String pReportFileName, Map<String, Object> pReportParameters, Object pReportData) throws JRException, IOException {
JasperPrint xPrint = null;
String xReportFilePathJASPER = pvGetReportFilePath(DBSFile.getFileNameJASPER(pReportFileName));
String xReportFilePathJRXML = pvGetReportFilePath(DBSFile.getFileNameJRXML(pReportFileName));
InputStream xReportInputStream = null;
Integer xCount;
if (!DBSFile.isEqualDate(xReportFilePathJASPER, xReportFilePathJRXML)){
//Chama metodo para criar e retorna TRUE se criou
if (!pvCreateJasperFile(pReportFileName)){
return null;
} else {
if (!DBSFile.exists(pvGetReportFilePath(DBSFile.getFileNameJASPER("DBSTemplate_Header")))) {
String xSubReportFilePathJRXML = pvGetReportFilePath(DBSFile.getFileNameJRXML("DBSTemplate_Header"));
String xSubReportFilePathJASPER = pvGetReportFilePath(DBSFile.getFileNameJASPER("DBSTemplate_Header"));
if (!DBSFile.isEqualDate(xSubReportFilePathJASPER, xSubReportFilePathJRXML)){
pvCreateJasperFile("DBSTemplate_Header");
}
}
xCount = 1;
while (DBSFile.exists(pvGetReportFilePath(DBSFile.getFileNameJRXML(pReportFileName + SUBREPORT_SUFIX + xCount)))) {
String xSubReportFilePathJRXML = pvGetReportFilePath(DBSFile.getFileNameJRXML(pReportFileName + SUBREPORT_SUFIX + xCount));
String xSubReportFilePathJASPER = pvGetReportFilePath(DBSFile.getFileNameJASPER(pReportFileName + SUBREPORT_SUFIX + xCount));
if (!DBSFile.isEqualDate(xSubReportFilePathJASPER, xSubReportFilePathJRXML)){
pvCreateJasperFile(pReportFileName + SUBREPORT_SUFIX + xCount);
}
xCount++;
}
}
}
xReportInputStream = new FileInputStream(new File(xReportFilePathJASPER));
if (pReportParameters == null) {
pReportParameters = new HashMap<String, Object>();
}
pReportParameters.put("pDATA_IMPRESSAO", DBSFormat.getFormattedDateTimes(System.currentTimeMillis()));
pReportParameters.put("pSUBREPORT_DIR", DBSHttp.getLocalPathWebInfClasses(FacesContext.getCurrentInstance().getExternalContext()) + File.separator + REPORT_FOLDER);
if (JRBeanCollectionDataSource.class.isInstance(pReportData)) {
JRBeanCollectionDataSource xDados = (JRBeanCollectionDataSource) pReportData;
pReportParameters.put("pCOUNT_TOTAL", xDados.getRecordCount()); // Contagem total de registros a serem impressos.
xPrint = JasperFillManager.fillReport(xReportInputStream, pReportParameters, xDados);
} else {
Connection xCn = (Connection) pReportData;
xPrint = JasperFillManager.fillReport(xReportInputStream, pReportParameters, xCn);
}
xReportInputStream.close();
return xPrint;
}
public static String createPDFFile(String pReportFileName, JasperPrint pJasperPrint) {
try {
if (pJasperPrint == null) {
DBSMessagesFacesContext.sendMessage(MESSAGE_TYPE.ERROR, "Relatório [" + pReportFileName + "] não encontrado.", "mensagemErro");
} else if (pJasperPrint.getPages() == null || pJasperPrint.getPages().size() <= 0) {
DBSMessagesFacesContext.sendMessage(MESSAGE_TYPE.ERROR, "Nenhum informação encontrada.", "mensagemErro");
} else {
// String xReportFileName = pReportFileName + DBSSession.getSession().getId();
String xDate = DBSFormat.getFormattedDateCustom(DBSDate.getNowTimestamp(),"yyyyMMddhhmmss");
String xReportFileName = pReportFileName + xDate + DBSNumber.getOnlyNumber(DBSHttp.getHttpServletRequest().getRemoteAddr().toString());
if (DBSFile.mkDir(pvGetReportFilePathWeb(""))) {
String xPDFFile = pvGetReportFilePathWeb(DBSFile.getFileNamePDF(xReportFileName));
JasperExportManager.exportReportToPdfFile(pJasperPrint, xPDFFile);
//Retorna caminho completo do arquivo gerado
return pvGetReportRelativeFilePath(DBSFile.getFileNamePDF(xReportFileName));
} else {
wLogger.error("Erro ao gerar relatório " + pReportFileName + ": arquivo não criado.");
DBSMessagesFacesContext.sendMessage(MESSAGE_TYPE.ERROR, "Erro ao gerar relatório: arquivo não criado.", "mensagemErro");
}
}
} catch (JRException e) {
wLogger.error("Erro ao gerar relatório " + pReportFileName + ": " + e);
DBSMessagesFacesContext.sendMessage(MESSAGE_TYPE.ERROR, "Erro ao gerar relatório: " + e.getMessage(), "mensagemErro");
}
return "";
}
// public static void savePDF(String pReportFilePathPDF) {
// String xReportFilePath = DBSHttp.getRealPath(pReportFilePathPDF);
// String xSessionId = DBSHttp.getSession().getId();
// String xRemoteFileName = DBSString.changeStr(pReportFilePathPDF, xSessionId, "");
// xRemoteFileName = DBSString.changeStr(xRemoteFileName, REPORT_FOLDER, "");
// DBSHttp.sendFile(xReportFilePath, xRemoteFileName);
public static void savePDF(String pReportFileName, JasperPrint pJasperPrint) {
try {
ByteArrayOutputStream xFilePDF = new ByteArrayOutputStream();
JRPdfExporter xExporterPDF = new JRPdfExporter();
xExporterPDF.setParameter(JRPdfExporterParameter.JASPER_PRINT, pJasperPrint);
xExporterPDF.setParameter(JRPdfExporterParameter.OUTPUT_STREAM, xFilePDF);
xExporterPDF.exportReport();
DBSHttp.sendFile(xFilePDF, DBSFile.getFileNamePDF(pReportFileName), CONTENT_TYPE.APPLICATION_PDF);
} catch (JRException e) {
wLogger.error(e);
}
}
public static void saveXML(String pReportFileName, JasperPrint pJasperPrint) {
try {
ByteArrayOutputStream xFileXML = new ByteArrayOutputStream();
JRXmlExporter xExporterXML = new JRXmlExporter();
xExporterXML.setParameter(JRXmlExporterParameter.JASPER_PRINT, pJasperPrint);
xExporterXML.setParameter(JRXmlExporterParameter.OUTPUT_STREAM, xFileXML);
xExporterXML.setParameter(JRXmlExporterParameter.IS_EMBEDDING_IMAGES, true);
xExporterXML.exportReport();
DBSHttp.sendFile(xFileXML, DBSFile.getFileNameXML(pReportFileName), CONTENT_TYPE.APPLICATION_XML);
} catch (JRException e) {
wLogger.error(e);
}
}
@Deprecated
public static void saveXLS(String pReportFileName, JasperPrint pJasperPrint) {
try {
ByteArrayOutputStream xFileXLS = new ByteArrayOutputStream();
JRXlsxExporter xExporterXLSX = new JRXlsxExporter();
xExporterXLSX.setParameter(JRXlsExporterParameter.JASPER_PRINT, pJasperPrint);
xExporterXLSX.setParameter(JRXlsExporterParameter.OUTPUT_STREAM, xFileXLS);
xExporterXLSX.exportReport();
DBSHttp.sendFile(xFileXLS, DBSFile.getFileNameXLS(pReportFileName), CONTENT_TYPE.APPLICATION_XLS);
} catch (JRException e) {
wLogger.error(e);
}
}
public static void saveXLSX(String pReportFileName, JasperPrint pJasperPrint) {
try {
ByteArrayOutputStream xFileXLS = new ByteArrayOutputStream();
JRXlsxExporter xExporterXLSX = new JRXlsxExporter();
xExporterXLSX.setParameter(JRXlsExporterParameter.JASPER_PRINT, pJasperPrint);
xExporterXLSX.setParameter(JRXlsExporterParameter.OUTPUT_STREAM, xFileXLS);
xExporterXLSX.exportReport();
DBSHttp.sendFile(xFileXLS, DBSFile.getFileNameXLSX(pReportFileName), CONTENT_TYPE.APPLICATION_XLSX);
} catch (JRException e) {
wLogger.error(e);
}
}
public static void saveHTML(String pReportFileName, JasperPrint pJasperPrint) {
HttpServletResponse xResponse = DBSHttp.getHttpServletResponse();
try {
JasperExportManager.exportReportToHtmlFile(pJasperPrint, pvGetReportFilePathWeb(DBSFile.getFileNameHTML(pReportFileName)));
xResponse.getWriter().println("<script>window.open(\""+ pvGetReportRelativeFilePath(DBSFile.getFileNameHTML(pReportFileName)) + "\")</script>");
} catch (JRException | IOException e) {
wLogger.error(e);
DBSMessagesFacesContext.sendMessage(MESSAGE_TYPE.ERROR, e.getMessage(), "mensagemErro");
}
}
// PRIVATE
private static String pvGetReportFilePath(String pReportFileName){
return DBSHttp.getLocalPathWebInfClasses(FacesContext.getCurrentInstance().getExternalContext()) + File.separator + pvGetReportRelativeFilePath(pReportFileName);
}
private static String pvGetReportFilePathWeb(String pReportFileName){
return DBSHttp.getLocalPath(FacesContext.getCurrentInstance().getExternalContext(), "") + File.separator + pvGetReportRelativeFilePath(pReportFileName);
}
private static String pvGetReportRelativeFilePath(String pReportFileName){
return REPORT_FOLDER + pReportFileName;
}
private static Boolean pvCreateJasperFile(String pReportFileName) throws JRException {
String xReportFilePathJRXML;
xReportFilePathJRXML = pvGetReportFilePath(DBSFile.getFileNameJRXML(pReportFileName));
if (!DBSFile.exists(xReportFilePathJRXML)) {
wLogger.error("Relatório [" + xReportFilePathJRXML + "] não encontrado.");
} else {
String xReportFilePathJASPER = pvGetReportFilePath(DBSFile.getFileNameJASPER(pReportFileName));
JasperCompileManager.compileReportToFile(xReportFilePathJRXML, xReportFilePathJASPER);
if (DBSFile.exists(xReportFilePathJASPER)){
DBSFile.copyLastModifiedData(xReportFilePathJRXML, xReportFilePathJASPER);
return true;
}
}
return false;
}
}
|
package eu.ydp.empiria.player.client.controller.multiview.touch;
import com.google.gwt.dom.client.NativeEvent;
import com.google.inject.Inject;
import eu.ydp.empiria.player.client.controller.multiview.IMultiPageController;
import eu.ydp.empiria.player.client.module.button.NavigationButtonDirection;
import eu.ydp.empiria.player.client.util.events.bus.EventsBus;
import eu.ydp.empiria.player.client.util.events.player.PlayerEvent;
import eu.ydp.empiria.player.client.util.events.player.PlayerEventTypes;
import eu.ydp.gwtutil.client.event.TouchEventReader;
import eu.ydp.gwtutil.client.proxy.RootPanelDelegate;
import eu.ydp.gwtutil.client.proxy.WindowDelegate;
public class TouchController {
private static final int PERCENT_MAX = 100;
private static final int SWYPE_WIDTH_TO_HEIGHT_LIMIT_RATE = 5;
private static final int MINIMAL_LENGTH_LIMIT_RATE = 4;
private final WindowDelegate windowDelegate;
private final TouchEventReader touchEventReader;
private final EventsBus eventsBus;
private final TouchModel touchModel;
private final RootPanelDelegate rootPanelDelegate;
@Inject
public TouchController(WindowDelegate windowDelegate, TouchEventReader touchEventReader, EventsBus eventsBus, TouchModel touchModel,
RootPanelDelegate rootPanelDelegate) {
this.windowDelegate = windowDelegate;
this.touchEventReader = touchEventReader;
this.eventsBus = eventsBus;
this.touchModel = touchModel;
this.rootPanelDelegate = rootPanelDelegate;
}
public void updateOnTouchStart(NativeEvent onTouchStartEvent) {
int y = touchEventReader.getScreenY(onTouchStartEvent);
int x = touchEventReader.getX(onTouchStartEvent);
boolean multiTouch = touchEventReader.isMoreThenOneFingerTouch(onTouchStartEvent);
touchModel.setStartScrollTopPossition(windowDelegate.getScrollTop());
touchModel.setStartY(y);
touchModel.setStartX(x);
touchModel.setLastEndX(x);
touchModel.setEndX(-1);
touchModel.setMultiTouch(multiTouch);
touchModel.setSwipeStarted(false);
touchModel.setTouchReservation(false);
}
public boolean isSwipeStarted() {
return touchModel.isSwipeStarted();
}
public boolean canSwitchPage() {
int swipeWidth = Math.abs(touchModel.getStartX() - touchModel.getEndX());
int swipeHeight = Math.abs(touchModel.getStartY() - touchModel.getEndY());
return touchModel.getEndX() > 0 && isCorrectSwypeAngle(swipeWidth, swipeHeight) && isCorrectSwypeWidth(swipeWidth);
}
private boolean isCorrectSwypeWidth(int swipeWidth) {
return swipeWidth > windowDelegate.getClientWidth() / TouchController.MINIMAL_LENGTH_LIMIT_RATE;
}
private boolean isCorrectSwypeAngle(int swipeWidth, int swipeHeight) {
return swipeWidth / swipeHeight > SWYPE_WIDTH_TO_HEIGHT_LIMIT_RATE;
}
public boolean isHorizontalSwipe() {
int swipeWidth = Math.abs(touchModel.getStartX() - touchModel.getEndX());
int swipeHeight = Math.abs(touchModel.getStartY() - touchModel.getEndY());
return swipeHeight < swipeWidth;
}
public boolean isTouchReservation() {
return touchModel.isTouchReservation();
}
public boolean isSwypeStarted() {
return touchModel.getLastEndX() != touchModel.getStartX();
}
public void updateEndPoint(NativeEvent onTouchMoveEvent) {
int y = touchEventReader.getScreenY(onTouchMoveEvent);
int x = touchEventReader.getX(onTouchMoveEvent);
touchModel.setEndX(x);
touchModel.setEndY(y);
}
public void updateAfterSwypeDetected() {
if (!touchModel.isSwipeStarted()) {
eventsBus.fireEvent(new PlayerEvent(PlayerEventTypes.PAGE_SWIPE_STARTED));
}
touchModel.setLastEndX(touchModel.getEndX());
touchModel.setSwipeStarted(true);
}
public float getSwypePercentLength() {
int swypeWidth = Math.abs(touchModel.getLastEndX() - touchModel.getEndX());
return ((float) swypeWidth / rootPanelDelegate.getOffsetWidth()) * PERCENT_MAX;
}
public boolean isSwypeDetected() {
return touchModel.getLastEndX() != touchModel.getEndX() && touchModel.getLastEndX() > 0;
}
public boolean isSwipeRight() {
return touchModel.getLastEndX() > touchModel.getEndX();
}
private boolean isVerticalSwipe() {
return Math.abs(windowDelegate.getScrollTop() - touchModel.getStartScrollTopPossition()) > 1;
}
public void resetTouchModel() {
touchModel.setStartX(touchModel.getEndX());
touchModel.setLastEndX(touchModel.getEndX());
touchModel.setTouchReservation(false);
}
public void updateOnTouchEnd(NativeEvent event) {
int y = touchEventReader.getFromChangedTouchesScreenY(event);
int x = touchEventReader.getFromChangedTouchesX(event);
touchModel.setEndX(x);
touchModel.setEndY(y);
}
public void setTouchReservation(boolean touchReservation) {
touchModel.setTouchReservation(touchReservation);
}
public NavigationButtonDirection getDirection() {
NavigationButtonDirection direction = null;
if (touchModel.getEndX() > touchModel.getStartX()) {
direction = NavigationButtonDirection.PREVIOUS;
} else if (touchModel.getStartX() > touchModel.getEndX()) {
direction = NavigationButtonDirection.NEXT;
}
return direction;
}
public boolean canSwype(IMultiPageController multiPageController) {
return !multiPageController.isZoomed() && !multiPageController.isAnimationRunning() && !touchModel.isTouchReservation() && !touchModel.isSwypeLock();
}
public boolean canMove(IMultiPageController multiPageController) {
return canSwype(multiPageController) && !isVerticalSwipe() && !touchModel.isMultiTouch();
}
public void setSwypeLock(boolean swypeLock) {
touchModel.setSwypeLock(swypeLock);
}
public void setSwypeStarted(boolean b) {
touchModel.setSwipeStarted(b);
}
}
|
package com.company.chapter2;
import java.util.Scanner;
/**
*
* @author BANK.CPE
*/
public class ShowEven {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
int size = scanner.nextInt();
System.out.println("size : "+size);
for(int i=1;i<=size;i++){
if(i%2 == 0){
System.out.println("number : " +i);
}
}
}
}
|
package by.homesite.kpparser.parsers;
import by.homesite.kpparser.model.FileInfo;
import by.homesite.kpparser.model.Film;
import by.homesite.kpparser.model.SearchResultItem;
import by.homesite.kpparser.net.HttpClient;
import by.homesite.kpparser.utils.Constants;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static by.homesite.kpparser.utils.Constants.CHARSET;
import static by.homesite.kpparser.utils.FilenameUtils.cleanHtml;
/**
* @author alex on 9/10/17.
*/
@Component(Constants.INPUT_SYSTEMS_IMDB)
public class IMDBParser implements Parser {
private static final String SEARCH_URL = "http:
private static final String FILM_INFO_URL = "http:
private static final Logger log = LoggerFactory.getLogger(IMDBParser.class);
private static final String TD_COUNTRY = "Country:";
private static final String TD_GENRES = "Genres:";
private static final String TD_DIRECTOR = "Director:";
private static final String TD_ROLES = "Stars:";
@Autowired
private HttpClient httpClient;
@Override
public List<SearchResultItem> searchFilms(FileInfo fileInfo) {
Document doc;
try {
doc = httpClient.get(SEARCH_URL + URLEncoder.encode(fileInfo.getTitle(), CHARSET));
} catch (IOException e) {
log.error("Can't get search results for {}", fileInfo.getName());
return null;
}
if (doc == null) {
return null;
}
Elements blocks = doc.select(".result_text");
if (blocks.size() > 0) {
List<SearchResultItem> result = new ArrayList();
for (Element block : blocks) {
SearchResultItem item = new SearchResultItem();
String blockContent = block.text();
item.setYear(findYear(blockContent));
Element link = block.select("a").first();
item.setTitle(link.text());
item.setUrl(FILM_INFO_URL + link.attr("href"));
result.add(item);
}
return result;
}
return null;
}
private String findYear(String blockContent) {
if (StringUtils.isEmpty(blockContent)) {
return "";
}
int startBracket = blockContent.indexOf("(");
int endBracket = blockContent.indexOf(")");
return blockContent.substring(startBracket + 1, endBracket);
}
@Override
public Film parseFilmInfo(SearchResultItem searchItem, FileInfo inputFile) {
Document doc;
Film film = new Film();
film.setFileName(inputFile.getName());
film.setYear(inputFile.getYear());
if (!StringUtils.isEmpty(searchItem.getUrl())) {
film.setUrl(searchItem.getUrl());
film.setTitle(searchItem.getTitle());
doc = httpClient.get(searchItem.getUrl());
if (doc == null) {
return film;
}
Element img = doc.select(".poster img").first();
if (img != null)
film.setImg(extractBigImg(img.attr("src")));
film.setCountry(extractTag(doc, ".txt-block", TD_COUNTRY));
film.setDirector(extractTag(doc, ".credit_summary_item", TD_DIRECTOR));
film.setGenre(extractTag(doc, ".see-more", TD_GENRES));
film.setRoles(extractTag(doc, ".credit_summary_item", TD_ROLES));
film.setDescription(doc.select("div[itemprop=description]").first().text());
}
return film;
}
private String extractBigImg(String src) {
return src.replaceFirst("([^@]*)\\.([^\\.]+)$", ".$2");
}
private String extractTag(Document doc, String selector, String tag) {
Elements info = doc.select(selector);
for (Element element: info) {
Element blockName = element.select("h4").first();
if (blockName != null && tag.equalsIgnoreCase(blockName.text())) {
Elements countries = element.select("a");
return cleanHtml(countries.stream().map(Element::text).collect(Collectors.joining(", ")));
}
}
return "";
}
}
|
package eu.ydp.empiria.player.client.module.media.progress;
import static eu.ydp.empiria.player.client.util.events.media.MediaEventTypes.ON_DURATION_CHANGE;
import static eu.ydp.empiria.player.client.util.events.media.MediaEventTypes.ON_END;
import static eu.ydp.empiria.player.client.util.events.media.MediaEventTypes.ON_FULL_SCREEN_SHOW_CONTROLS;
import static eu.ydp.empiria.player.client.util.events.media.MediaEventTypes.ON_STOP;
import static eu.ydp.empiria.player.client.util.events.media.MediaEventTypes.ON_TIME_UPDATE;
import static eu.ydp.empiria.player.client.util.events.media.MediaEventTypes.SET_CURRENT_TIME;
import javax.annotation.PostConstruct;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.NativeEvent;
import com.google.gwt.dom.client.Style;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
import eu.ydp.empiria.player.client.module.media.button.AbstractMediaScroll;
import eu.ydp.empiria.player.client.module.media.button.SimpleMediaButton;
import eu.ydp.empiria.player.client.resources.StyleNameConstants;
import eu.ydp.empiria.player.client.style.ComputedStyle;
import eu.ydp.empiria.player.client.util.events.bus.EventsBus;
import eu.ydp.empiria.player.client.util.events.media.MediaEvent;
import eu.ydp.empiria.player.client.util.events.scope.CurrentPageScope;
import eu.ydp.empiria.player.client.util.position.PositionHelper;
public class MediaProgressBarImpl extends AbstractMediaScroll<MediaProgressBarImpl> implements MediaProgressBar {
interface MediaProgressBarUiBinder extends UiBinder<Widget, MediaProgressBarImpl> {}
private static MediaProgressBarUiBinder uiBinder = GWT.create(MediaProgressBarUiBinder.class);
@UiField(provided = true) protected SimpleMediaButton button;
@UiField protected FlowPanel progressBar;
@UiField protected FlowPanel mainProgressDiv;
@UiField protected FlowPanel beforeButton;
@UiField protected FlowPanel afterButton;
@Inject private StyleNameConstants styleNames;
@Inject protected EventsBus eventsBus;
@Inject private ElementSizeCalculator elementSizeCalculator;
@Inject private PositionHelper positionHelper;
@Inject private ComputedStyle computedStyle;
private ProgressBarUpdateEventHandler progressBarEventHandler;
private MediaProgressBarPositionCalculator progressBarPositionCalculator;
@PostConstruct
public void postConstruct() {
button = new SimpleMediaButton(styleNames.QP_MEDIA_CENTER_PROGRESS_BUTTON(), false);
progressBarPositionCalculator = new MediaProgressBarPositionCalculator(this, computedStyle);
initWidget(uiBinder.createAndBindUi(this));
}
/**
* wielkosc przycisku wyswietlanego na pasku postepu
*
* @return
*/
@Override
public int getButtonWidth() {
int buttonWidth = 0;
if (button != null) {
buttonWidth = elementSizeCalculator.getWidth(button);
}
return buttonWidth;
}
@Override
public MediaProgressBarImpl getNewInstance() {
return new MediaProgressBarImpl();
}
@Override
public boolean isSupported() {
return getMediaAvailableOptions().isSeekSupported();
}
/**
* dlugosc paska postepu
*
* @return
*/
@Override
public int getScrollWidth() {
return elementSizeCalculator.getWidth(mainProgressDiv) - getButtonWidth();
}
@Override
public void init() {
super.init();
if (isSupported()) {
progressBarEventHandler = new ProgressBarUpdateEventHandler(this);
CurrentPageScope scope = new CurrentPageScope();
eventsBus.addAsyncHandlerToSource(MediaEvent.getType(ON_TIME_UPDATE), getMediaWrapper(), progressBarEventHandler, scope);
eventsBus.addAsyncHandlerToSource(MediaEvent.getType(ON_DURATION_CHANGE), getMediaWrapper(), progressBarEventHandler, scope);
eventsBus.addAsyncHandlerToSource(MediaEvent.getType(ON_STOP), getMediaWrapper(), progressBarEventHandler, scope);
eventsBus.addAsyncHandlerToSource(MediaEvent.getType(ON_FULL_SCREEN_SHOW_CONTROLS), getMediaWrapper(), progressBarEventHandler, scope);
// nie zawsze zostanie wyzwolony timeupdate ze wzgledu na
// ograniczenie
// na 1s postepu wiec robimy to tu
ProgressBarEndEventHandler handlerForEnd = new ProgressBarEndEventHandler(this, eventsBus);
eventsBus.addHandlerToSource(MediaEvent.getType(ON_END), getMediaWrapper(), handlerForEnd, new CurrentPageScope());
} else {
progressBar.setStyleName(styleNames.QP_MEDIA_PROGRESSBAR() + UNSUPPORTED_SUFFIX);
progressBar.clear();
}
}
/**
* ustawia suwak na odpowiedniej pozycji
*
* @param positionX
*/
@Override
public void moveScroll(int positionX) {// NOPMD
moveScroll(positionX, false);
}
/**
* ustawia suwak na odpowiedniej pozycji
*
* @param positionX
*/
protected void moveScroll(final int positionX, boolean force) {// NOPMD
if (!isPressed() || force) {
setButtonPosition(positionX);
setBeforeButtonPositionAndStyle(positionX);
setAfterButtonPositionAndStyle(positionX);
}
}
private void setAfterButtonPositionAndStyle(final int leftOffsetForProgressButton) {
Style afterButtonStyle = afterButton.getElement().getStyle();
int leftPositionForAfterProgress = progressBarPositionCalculator.getLeftPositionForAfterProgressElement(leftOffsetForProgressButton);
afterButtonStyle.setWidth(progressBarPositionCalculator.getWidthForAfterProgressElement(leftOffsetForProgressButton), Unit.PX);
afterButtonStyle.setLeft(leftPositionForAfterProgress, Unit.PX);
}
private void setBeforeButtonPositionAndStyle(final int leftOffsetForProgressButton) {
Style beforeButtonStyle = beforeButton.getElement().getStyle();
beforeButtonStyle.setWidth(leftOffsetForProgressButton + getHalfOfProgressButtonWidth(), Unit.PX);
beforeButtonStyle.setLeft( progressBarPositionCalculator.getLeftPositionForBeforeProgressElement(leftOffsetForProgressButton), Unit.PX);
}
private void setButtonPosition(final int leftOffsetForProgressButton) {
int leftOffsetForProgress = progressBarPositionCalculator.calculateCurrentPosistionForScroll(leftOffsetForProgressButton);
button.getElement().getStyle().setLeft(leftOffsetForProgress, Unit.PX);
}
private int getHalfOfProgressButtonWidth() {
return getButtonWidth() / 2;
}
/**
* @param positionX
*/
protected void seekInMedia(int positionX) {
if (isAttached()) {
double position = progressBarPositionCalculator.calculateCurrentPosistion(positionX);
fireSetCurrentTimeEvent(position);
}
}
private void fireSetCurrentTimeEvent(double position) {
MediaEvent event = new MediaEvent(SET_CURRENT_TIME, getMediaWrapper());
event.setCurrentTime(position);
eventsBus.fireAsyncEventFromSource(event, getMediaWrapper());
}
protected int getPositionX(NativeEvent event) {
return positionHelper.getPositionX(event, mainProgressDiv.getElement());
}
@Override
protected void setPosition(NativeEvent event) {
if (isPressed() && isAttached()) {
int positionX = getPositionX(event);
int positionNonNegative = Math.max(positionX, 0);
positionNonNegative -= getHalfOfProgressButtonWidth();
seekInMedia(positionNonNegative);
progressBarEventHandler.resetCurrentTime();
}
}
@Override
public void setStyleNames() {
if (isInFullScreen()) {
progressBar.removeStyleName(styleNames.QP_MEDIA_PROGRESSBAR());
progressBar.addStyleName(styleNames.QP_MEDIA_PROGRESSBAR() + FULL_SCREEN_SUFFIX);
}
}
}
|
package com.fishercoder.solutions;
public class _60 {
public static class Solution1 {
public String getPermutation(int n, int k) {
int[] nums = new int[n + 1];
int permcount = 1;
for (int i = 0; i < n; i++) {
nums[i] = i + 1; // put 1, 2, 3 ... n into nums[]
permcount *= (i + 1);
}
k
StringBuilder sb = new StringBuilder();
for (int i = 0; i < n; i++) {
permcount = permcount / (n - i);
int idx = k / permcount;// the index that this position should
// choose
sb.append(nums[idx]);
// left shift nums[] by one bit
for (int j = idx; j < n - i; j++) {
nums[j] = nums[j + 1];
}
k %= permcount;
}
return sb.toString();
}
}
}
|
package ch.tkuhn.nanopub.server;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.GZIPInputStream;
import net.trustyuri.TrustyUriUtils;
import org.apache.commons.lang.time.StopWatch;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClientBuilder;
import org.nanopub.MultiNanopubRdfHandler;
import org.nanopub.MultiNanopubRdfHandler.NanopubHandler;
import org.nanopub.Nanopub;
import org.nanopub.NanopubImpl;
import org.nanopub.extra.server.NanopubServerUtils;
import org.nanopub.extra.server.NanopubSurfacePattern;
import org.openrdf.rio.RDFFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CollectNanopubs {
private static final int processPagesPerRun = 10;
private static NanopubDb db = NanopubDb.get();
private static NanopubSurfacePattern ourPattern = ServerConf.getInfo().getNanopubSurfacePattern();
private Logger logger = LoggerFactory.getLogger(this.getClass());
private ServerInfo peerInfo;
private ScanPeers parent;
private int peerPageSize;
private boolean isFinished = false;
private int loaded;
private StopWatch watch;
private long nextNp;
public CollectNanopubs(ServerInfo peerInfo, ScanPeers parent) {
this.peerInfo = peerInfo;
this.parent = parent;
parent.stillAlive();
}
public void run() {
parent.stillAlive();
try {
logger.info("Checking if there are new nanopubs at " + peerInfo.getPublicUrl());
int startFromPage = 1;
long startFromNp = 0;
long newNanopubsCount;
peerPageSize = peerInfo.getPageSize();
long peerNanopubNo = peerInfo.getNextNanopubNo();
long peerJid = peerInfo.getJournalId();
Pair<Long,Long> lastSeenPeerState = db.getLastSeenPeerState(peerInfo.getPublicUrl());
if (lastSeenPeerState != null) {
startFromNp = lastSeenPeerState.getRight();
newNanopubsCount = peerNanopubNo - startFromNp;
if (lastSeenPeerState.getLeft() == peerJid) {
if (startFromNp == peerNanopubNo) {
logger.info("Already up-to-date");
isFinished = true;
return;
}
startFromPage = (int) (startFromNp/peerPageSize) + 1;
logger.info(newNanopubsCount + " new nanopubs");
} else {
logger.info(newNanopubsCount + " nanopubs in total (unknown journal)");
}
} else {
newNanopubsCount = peerNanopubNo;
logger.info(newNanopubsCount + " nanopubs in total (unknown peer state)");
}
int lastPage = (int) (peerNanopubNo/peerPageSize) + 1;
long ignoreBeforePos = startFromNp;
logger.info("Starting from page " + startFromPage + " of " + lastPage);
int pageCountThisRun = 0;
boolean interrupted = false;
for (int p = startFromPage ; p <= lastPage ; p++) {
pageCountThisRun++;
if (pageCountThisRun > processPagesPerRun) {
interrupted = true;
break;
}
processPage(p, p == lastPage, ignoreBeforePos);
ignoreBeforePos = 0;
}
if (interrupted) {
logger.info("To be continued (see if other peers have new nanopubs)");
} else {
logger.info("Done");
isFinished = true;
}
} catch (Exception ex) {
logger.error(ex.getMessage(), ex);
isFinished = true;
ScanPeers.lastTimeMeasureMap.put(peerInfo.getPublicUrl(), Float.MAX_VALUE);
}
}
public boolean isFinished() {
return isFinished;
}
private void processPage(int page, boolean isLastPage, long ignoreBeforePos) throws Exception {
parent.stillAlive();
logger.info("Process page " + page + " from " + peerInfo.getPublicUrl());
loaded = 0;
nextNp = (page-1) * peerPageSize;
List<String> toLoad = new ArrayList<>();
boolean downloadAsPackage = false;
for (String nanopubUri : NanopubServerUtils.loadNanopubUriList(peerInfo, page)) {
parent.stillAlive();
if (nextNp >= ignoreBeforePos) {
String ac = TrustyUriUtils.getArtifactCode(nanopubUri);
if (ac != null && ourPattern.matchesUri(nanopubUri) && !db.hasNanopub(ac)) {
toLoad.add(ac);
if (!isLastPage && toLoad.size() > 5) {
// Download entire package if more than 5 nanopubs are new
downloadAsPackage = true;
nextNp = (page-1) * peerPageSize;
break;
}
}
}
nextNp++;
}
RequestConfig requestConfig = RequestConfig.custom().setConnectTimeout(5 * 1000).build();
HttpClient c = HttpClientBuilder.create().setDefaultRequestConfig(requestConfig).build();
watch = new StopWatch();
watch.start();
if (downloadAsPackage) {
logger.info("Download page " + page + " as compressed package...");
HttpGet get = new HttpGet(peerInfo.getPublicUrl() + "package.gz?page=" + page);
get.setHeader("Accept", "application/x-gzip");
HttpResponse resp = c.execute(get);
InputStream in = null;
try {
if (wasSuccessful(resp)) {
in = new GZIPInputStream(resp.getEntity().getContent());
} else {
logger.info("Failed. Trying uncompressed package...");
// This is for compability with older versions; to be removed at some point...
get = new HttpGet(peerInfo.getPublicUrl() + "package?page=" + page);
get.setHeader("Accept", "application/trig");
resp = c.execute(get);
if (!wasSuccessful(resp)) {
logger.error("HTTP request failed: " + resp.getStatusLine().getReasonPhrase());
recordTime();
throw new RuntimeException(resp.getStatusLine().getReasonPhrase());
}
in = resp.getEntity().getContent();
}
MultiNanopubRdfHandler.process(RDFFormat.TRIG, in, new NanopubHandler() {
@Override
public void handleNanopub(Nanopub np) {
nextNp++;
if (watch.getTime() > 5 * 60 * 1000) {
// Downloading the whole package should never take more than 5 minutes.
logger.error("Downloading package took too long; interrupting");
recordTime();
throw new RuntimeException("Downloading package took too long; interrupting");
}
if (!ourPattern.matchesUri(np.getUri().stringValue())) return;
try {
loadNanopub(np);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
});
} finally {
if (in != null) in.close();
}
} else {
logger.info("Download " + toLoad.size() + " nanopubs individually...");
for (String ac : toLoad) {
parent.stillAlive();
HttpGet get = new HttpGet(peerInfo.getPublicUrl() + ac);
get.setHeader("Accept", "application/trig");
HttpResponse resp = c.execute(get);
if (!wasSuccessful(resp)) {
logger.error("HTTP request failed: " + resp.getStatusLine().getReasonPhrase());
recordTime();
throw new RuntimeException(resp.getStatusLine().getReasonPhrase());
}
InputStream in = null;
try {
in = resp.getEntity().getContent();
loadNanopub(new NanopubImpl(in, RDFFormat.TRIG));
} finally {
if (in != null) in.close();
}
}
}
recordTime();
logger.info("Update peer state: " + peerInfo.getPublicUrl() + " at position " + nextNp);
db.updatePeerState(peerInfo, nextNp);
}
private void recordTime() {
try {
watch.stop();
Float avg = null;
if (loaded > 0) {
avg = (float) watch.getTime() / loaded;
ScanPeers.lastTimeMeasureMap.put(peerInfo.getPublicUrl(), avg);
}
logger.info("Time measurement: " + watch.getTime() + " for " + loaded + " nanopubs (average: " + avg + ")");
} catch (Exception ex) {
// ignore
}
}
private boolean wasSuccessful(HttpResponse resp) {
int c = resp.getStatusLine().getStatusCode();
return c >= 200 && c < 300;
}
private void loadNanopub(Nanopub np) throws Exception {
db.loadNanopub(np);
loaded++;
}
}
|
package com.github.pagehelper;
import org.apache.ibatis.builder.SqlSourceBuilder;
import org.apache.ibatis.mapping.*;
import org.apache.ibatis.reflection.MetaObject;
import org.apache.ibatis.reflection.factory.DefaultObjectFactory;
import org.apache.ibatis.reflection.factory.ObjectFactory;
import org.apache.ibatis.reflection.wrapper.DefaultObjectWrapperFactory;
import org.apache.ibatis.reflection.wrapper.ObjectWrapperFactory;
import org.apache.ibatis.scripting.xmltags.DynamicContext;
import org.apache.ibatis.scripting.xmltags.DynamicSqlSource;
import org.apache.ibatis.scripting.xmltags.MixedSqlNode;
import org.apache.ibatis.scripting.xmltags.SqlNode;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.type.TypeHandlerRegistry;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@SuppressWarnings({"rawtypes", "unchecked"})
public class SqlUtil {
private static final List<ResultMapping> EMPTY_RESULTMAPPING = new ArrayList<ResultMapping>(0);
private static final String SUFFIX_PAGE = "_PageHelper";
//countid
private static final String SUFFIX_COUNT = SUFFIX_PAGE + "_Count";
private static final String PAGEPARAMETER_FIRST = "First" + SUFFIX_PAGE;
private static final String PAGEPARAMETER_SECOND = "Second" + SUFFIX_PAGE;
private static final TypeHandlerRegistry TYPE_HANDLER_REGISTRY = new TypeHandlerRegistry();
private static final ObjectFactory DEFAULT_OBJECT_FACTORY = new DefaultObjectFactory();
private static final ObjectWrapperFactory DEFAULT_OBJECT_WRAPPER_FACTORY = new DefaultObjectWrapperFactory();
/**
* Mybatis
*
* @param object
* @return
*/
private static MetaObject forObject(Object object) {
return MetaObject.forObject(object, DEFAULT_OBJECT_FACTORY, DEFAULT_OBJECT_WRAPPER_FACTORY);
}
private SqlUtil.Parser sqlParser;
public enum Dialect {
mysql, mariadb, sqlite, oracle, hsqldb, postgresql
}
/**
*
*
* @param strDialect
*/
public SqlUtil(String strDialect) {
if (strDialect == null || "".equals(strDialect)) {
throw new IllegalArgumentException("Mybatisdialect!");
}
try {
Dialect dialect = Dialect.valueOf(strDialect);
String sqlParserClass = this.getClass().getPackage().getName() + ".SqlParser";
try {
//SqlParserjsqlparser-x.x.x.jar
Class.forName("net.sf.jsqlparser.statement.select.Select");
sqlParser = (Parser) Class.forName(sqlParserClass).getConstructor(Dialect.class).newInstance(dialect);
} catch (Exception e) {
}
if (sqlParser == null) {
sqlParser = SimpleParser.newParser(dialect);
}
} catch (IllegalArgumentException e) {
String dialects = null;
for (Dialect d : Dialect.values()) {
if (dialects == null) {
dialects = d.toString();
} else {
dialects += "," + d;
}
}
throw new IllegalArgumentException("Mybatisdialect[" + dialects + "]");
}
}
/**
*
*
* @param parameterObject
* @param boundSql
* @param page
* @return
*/
public Map setPageParameter(Object parameterObject, BoundSql boundSql, Page page) {
return sqlParser.setPageParameter(parameterObject, boundSql, page);
}
/**
* countMappedStatement
*
* @param ms
* @param boundSql
* @return
*/
public MappedStatement getCountMappedStatement(MappedStatement ms, BoundSql boundSql) {
return getMappedStatement(ms, boundSql, SUFFIX_COUNT);
}
/**
* MappedStatement
*
* @param ms
* @param boundSql
* @return
*/
public MappedStatement getPageMappedStatement(MappedStatement ms, BoundSql boundSql) {
return getMappedStatement(ms, boundSql, SUFFIX_PAGE);
}
/**
* SQL
*/
public static interface Parser {
void isSupportedSql(String sql);
String getCountSql(String sql);
String getPageSql(String sql);
Map setPageParameter(Object parameterObject, BoundSql boundSql, Page page);
}
public static abstract class SimpleParser implements Parser {
public static Parser newParser(Dialect dialect) {
Parser parser = null;
switch (dialect) {
case mysql:
case mariadb:
case sqlite:
parser = new MysqlParser();
break;
case oracle:
parser = new OracleParser();
break;
case hsqldb:
parser = new HsqldbParser();
break;
case postgresql:
default:
parser = new PostgreSQLParser();
}
return parser;
}
public void isSupportedSql(String sql) {
if (sql.trim().toUpperCase().endsWith("FOR UPDATE")) {
throw new RuntimeException("for updatesql");
}
}
/**
* sql -
*
* @param sql sql
* @return countsql
*/
public String getCountSql(final String sql) {
isSupportedSql(sql);
StringBuilder stringBuilder = new StringBuilder(sql.length() + 40);
stringBuilder.append("select count(*) from (");
stringBuilder.append(sql);
stringBuilder.append(") tmp_count");
return stringBuilder.toString();
}
/**
* sql -
*
* @param sql sql
* @return sql
*/
public abstract String getPageSql(String sql);
public Map setPageParameter(Object parameterObject, BoundSql boundSql, Page page) {
Map paramMap = null;
if (parameterObject == null) {
paramMap = new HashMap();
} else if (parameterObject instanceof Map) {
paramMap = (Map) parameterObject;
} else {
paramMap = new HashMap();
//sqlParameterMappinggetter
//TYPE_HANDLER_REGISTRY
//typeHandler
boolean hasTypeHandler = TYPE_HANDLER_REGISTRY.hasTypeHandler(parameterObject.getClass());
if (!hasTypeHandler) {
MetaObject metaObject = forObject(parameterObject);
for (String name : metaObject.getGetterNames()) {
paramMap.put(name, metaObject.getValue(name));
}
}
if (boundSql.getParameterMappings() != null && boundSql.getParameterMappings().size() > 0) {
for (ParameterMapping parameterMapping : boundSql.getParameterMappings()) {
String name = parameterMapping.getProperty();
if (!name.equals(PAGEPARAMETER_FIRST)
&& !name.equals(PAGEPARAMETER_SECOND)
&& paramMap.get(name) == null) {
if (hasTypeHandler
|| parameterMapping.getJavaType().isAssignableFrom(parameterObject.getClass())) {
paramMap.put(name, parameterObject);
}
}
}
}
}
return paramMap;
}
}
//Mysql
private static class MysqlParser extends SimpleParser {
@Override
public String getPageSql(String sql) {
StringBuilder sqlBuilder = new StringBuilder(sql.length() + 14);
sqlBuilder.append(sql);
sqlBuilder.append(" limit ?,?");
return sqlBuilder.toString();
}
@Override
public Map setPageParameter(Object parameterObject, BoundSql boundSql, Page page) {
Map paramMap = super.setPageParameter(parameterObject, boundSql, page);
paramMap.put(PAGEPARAMETER_FIRST, page.getStartRow());
paramMap.put(PAGEPARAMETER_SECOND, page.getPageSize());
return paramMap;
}
}
//Oracle
private static class OracleParser extends SimpleParser {
@Override
public String getPageSql(String sql) {
StringBuilder sqlBuilder = new StringBuilder(sql.length() + 120);
sqlBuilder.append("select * from ( select tmp_page.*, rownum row_id from ( ");
sqlBuilder.append(sql);
sqlBuilder.append(" ) tmp_page where rownum <= ? ) where row_id > ?");
return sqlBuilder.toString();
}
@Override
public Map setPageParameter(Object parameterObject, BoundSql boundSql, Page page) {
Map paramMap = super.setPageParameter(parameterObject, boundSql, page);
paramMap.put(PAGEPARAMETER_FIRST, page.getEndRow());
paramMap.put(PAGEPARAMETER_SECOND, page.getStartRow());
return paramMap;
}
}
//Oracle
private static class HsqldbParser extends SimpleParser {
@Override
public String getPageSql(String sql) {
StringBuilder sqlBuilder = new StringBuilder(sql.length() + 20);
sqlBuilder.append(sql);
sqlBuilder.append(" limit ? offset ?");
return sqlBuilder.toString();
}
@Override
public Map setPageParameter(Object parameterObject, BoundSql boundSql, Page page) {
Map paramMap = super.setPageParameter(parameterObject, boundSql, page);
paramMap.put(PAGEPARAMETER_FIRST, page.getPageSize());
paramMap.put(PAGEPARAMETER_SECOND, page.getStartRow());
return paramMap;
}
}
//PostgreSQL
private static class PostgreSQLParser extends SimpleParser {
@Override
public String getPageSql(String sql) {
StringBuilder sqlBuilder = new StringBuilder(sql.length() + 14);
sqlBuilder.append(sql);
sqlBuilder.append(" limit ? offset ?");
return sqlBuilder.toString();
}
@Override
public Map setPageParameter(Object parameterObject, BoundSql boundSql, Page page) {
Map paramMap = super.setPageParameter(parameterObject, boundSql, page);
paramMap.put(PAGEPARAMETER_FIRST, page.getPageSize());
paramMap.put(PAGEPARAMETER_SECOND, page.getStartRow());
return paramMap;
}
}
/**
* SqlSource
*/
private class BoundSqlSqlSource implements SqlSource {
BoundSql boundSql;
public BoundSqlSqlSource(BoundSql boundSql) {
this.boundSql = boundSql;
}
public BoundSql getBoundSql(Object parameterObject) {
return boundSql;
}
public BoundSql getBoundSql() {
return boundSql;
}
}
/**
* SqlSource
*/
private class MyDynamicSqlSource implements SqlSource {
private Configuration configuration;
private SqlNode rootSqlNode;
/**
* count
*/
private Boolean count;
public MyDynamicSqlSource(Configuration configuration, SqlNode rootSqlNode, Boolean count) {
this.configuration = configuration;
this.rootSqlNode = rootSqlNode;
this.count = count;
}
public BoundSql getBoundSql(Object parameterObject) {
DynamicContext context = new DynamicContext(configuration, parameterObject);
rootSqlNode.apply(context);
SqlSourceBuilder sqlSourceParser = new SqlSourceBuilder(configuration);
Class<?> parameterType = parameterObject == null ? Object.class : parameterObject.getClass();
SqlSource sqlSource = sqlSourceParser.parse(context.getSql(), parameterType, context.getBindings());
BoundSql boundSql = sqlSource.getBoundSql(parameterObject);
for (Map.Entry<String, Object> entry : context.getBindings().entrySet()) {
boundSql.setAdditionalParameter(entry.getKey(), entry.getValue());
}
BoundSqlSqlSource boundSqlSqlSource = new BoundSqlSqlSource(boundSql);
if (count) {
boundSqlSqlSource = getCountSqlSource(boundSqlSqlSource);
} else {
boundSqlSqlSource = getPageSqlSource(configuration, boundSqlSqlSource);
}
return boundSqlSqlSource.getBoundSql();
}
}
/**
* ms - ms
*
* @param ms
* @param boundSql
* @param suffix
* @return
*/
private MappedStatement getMappedStatement(MappedStatement ms, BoundSql boundSql, String suffix) {
MappedStatement qs = null;
try {
qs = ms.getConfiguration().getMappedStatement(ms.getId() + suffix);
} catch (Exception e) {
//ignore
}
if (qs == null) {
//MappedStatement
qs = newMappedStatement(ms, getNewSqlSource(ms, new BoundSqlSqlSource(boundSql), suffix), suffix);
try {
ms.getConfiguration().addMappedStatement(qs);
} catch (Exception e) {
//ignore
}
}
return qs;
}
/**
* countMappedStatement
*
* @param ms
* @param newSqlSource
* @param suffix
* @return
*/
private MappedStatement newMappedStatement(MappedStatement ms, SqlSource newSqlSource, String suffix) {
String id = ms.getId() + suffix;
MappedStatement.Builder builder = new MappedStatement.Builder(ms.getConfiguration(), id, newSqlSource, ms.getSqlCommandType());
builder.resource(ms.getResource());
builder.fetchSize(ms.getFetchSize());
builder.statementType(ms.getStatementType());
builder.keyGenerator(ms.getKeyGenerator());
if (ms.getKeyProperties() != null && ms.getKeyProperties().length != 0) {
StringBuilder keyProperties = new StringBuilder();
for (String keyProperty : ms.getKeyProperties()) {
keyProperties.append(keyProperty).append(",");
}
keyProperties.delete(keyProperties.length() - 1, keyProperties.length());
builder.keyProperty(keyProperties.toString());
}
builder.timeout(ms.getTimeout());
builder.parameterMap(ms.getParameterMap());
if (suffix == SUFFIX_PAGE) {
builder.resultMaps(ms.getResultMaps());
} else {
//countint
List<ResultMap> resultMaps = new ArrayList<ResultMap>();
ResultMap resultMap = new ResultMap.Builder(ms.getConfiguration(), id, int.class, EMPTY_RESULTMAPPING).build();
resultMaps.add(resultMap);
builder.resultMaps(resultMaps);
}
builder.resultSetType(ms.getResultSetType());
builder.cache(ms.getCache());
builder.flushCacheRequired(ms.isFlushCacheRequired());
builder.useCache(ms.isUseCache());
return builder.build();
}
/**
* sql
*
* @param ms
* @return
*/
public boolean isDynamic(MappedStatement ms) {
return ms.getSqlSource() instanceof DynamicSqlSource;
}
/**
* sqlSource
*
* @param ms
* @param newSqlSource
* @param suffix
* @return
*/
private SqlSource getNewSqlSource(MappedStatement ms, BoundSqlSqlSource newSqlSource, String suffix) {
//XMLLanguageDriver.javaXMLScriptBuilder.javaSqlSource
//sql
if (isDynamic(ms)) {
MetaObject msObject = forObject(ms);
SqlNode sqlNode = (SqlNode) msObject.getValue("sqlSource.rootSqlNode");
MixedSqlNode mixedSqlNode = null;
if (sqlNode instanceof MixedSqlNode) {
mixedSqlNode = (MixedSqlNode) sqlNode;
} else {
List<SqlNode> contents = new ArrayList<SqlNode>(1);
contents.add(sqlNode);
mixedSqlNode = new MixedSqlNode(contents);
}
return new MyDynamicSqlSource(ms.getConfiguration(), mixedSqlNode, suffix == SUFFIX_COUNT);
}
//sql
else if (suffix == SUFFIX_PAGE) {
//sql
return getPageSqlSource(ms.getConfiguration(), newSqlSource);
}
//count-sql
else {
return getCountSqlSource(newSqlSource);
}
}
/**
* sqlSource
*
* @param configuration
* @param newSqlSource
* @return
*/
private BoundSqlSqlSource getPageSqlSource(Configuration configuration, BoundSqlSqlSource newSqlSource) {
String sql = newSqlSource.getBoundSql().getSql();
//sql
MetaObject sqlObject = forObject(newSqlSource);
sqlObject.setValue("boundSql.sql", sqlParser.getPageSql(sql));
List<ParameterMapping> newParameterMappings = new ArrayList<ParameterMapping>();
newParameterMappings.addAll(newSqlSource.getBoundSql().getParameterMappings());
newParameterMappings.add(new ParameterMapping.Builder(configuration, PAGEPARAMETER_FIRST, Integer.class).build());
newParameterMappings.add(new ParameterMapping.Builder(configuration, PAGEPARAMETER_SECOND, Integer.class).build());
sqlObject.setValue("boundSql.parameterMappings", newParameterMappings);
return newSqlSource;
}
/**
* countsqlSource
*
* @param newSqlSource
* @return
*/
private BoundSqlSqlSource getCountSqlSource(BoundSqlSqlSource newSqlSource) {
String sql = newSqlSource.getBoundSql().getSql();
MetaObject sqlObject = forObject(newSqlSource);
sqlObject.setValue("boundSql.sql", sqlParser.getCountSql(sql));
return newSqlSource;
}
/**
* []countsql
*
* @param dialet
* @param originalSql sql
*/
public static void testSql(String dialet, String originalSql) {
SqlUtil sqlUtil = new SqlUtil(dialet);
String countSql = sqlUtil.sqlParser.getCountSql(originalSql);
System.out.println(countSql);
String pageSql = sqlUtil.sqlParser.getPageSql(originalSql);
System.out.println(pageSql);
}
}
|
package checkdep.check;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
import jdepend.framework.DependencyConstraint;
import checkdep.common.JDependDependency;
import checkdep.value.depend.Dependencies;
import checkdep.value.depend.Dependency;
import checkdep.value.depend.PackageName;
import checkdep.value.violation.Violation;
import checkdep.value.violation.Violations;
public class JDependConstraintChecker implements ConstraintChecker {
private final Constraints constraints;
public JDependConstraintChecker(Constraints constraints) {
this.constraints = constraints;
}
@Override
public Violations check(Dependencies dependencies) {
Dependencies constraintDeps = toDependencies(createJDependConstraint());
Set<Violation> needlessSet = check(constraintDeps, dependencies);
if (!needlessSet.isEmpty()) {
throw new NeedlessConstraintException(needlessSet.toString());
}
return new Violations(check(dependencies, constraintDeps));
}
private Set<Violation> check(Dependencies actualDeps, Dependencies expectDeps) {
Set<Violation> res = new TreeSet<>();
for (Dependency actual : actualDeps.values()) {
res.addAll(checkEfferents(actual,
expectDeps.get(actual.getName()).orElse(Dependency.NULL)));
}
return res;
}
private Set<Violation> checkEfferents(Dependency actual, Dependency expect) {
return actual.getEfferents().stream()
.filter(efferent -> !expect.getEfferents().contains(efferent))
.map(efferent -> new Violation(actual.getName(), efferent))
.collect(Collectors.toSet());
}
private DependencyConstraint createJDependConstraint() {
DependencyConstraint constraint = new DependencyConstraint();
for (Constraint item : constraints) {
PackageName from = item.getFrom();
PackageName to = item.getTo();
constraint.addPackage(from.getValue()).dependsUpon(
constraint.addPackage(to.getValue()));
}
return constraint;
}
@SuppressWarnings("unchecked")
private Dependencies toDependencies(DependencyConstraint constraint) {
return JDependDependency.toDependencies(constraint.getPackages());
}
}
|
package com.hunantv.fw.view;
import java.io.IOException;
import java.io.Writer;
import com.alibaba.fastjson.JSON;
import com.hunantv.fw.result.Result;
public class JsonPView extends AbsView {
protected String callback = "";
public JsonPView(Object v) {
this("", v);
}
public JsonPView(String callback, Object v) {
this.callback = callback;
this.v = v;
}
@Override
public String render() {
StringBuilder strb = new StringBuilder();
strb.append(callback).append("(");
if (v instanceof Result)
strb.append(((Result) v).toJson());
strb.append(JSON.toJSONString(v));
strb.append(")");
return strb.toString();
}
@Override
public void renderTo(Writer out) throws IOException {
out.write(this.render());
}
}
|
package cn.momia.mapi.api.v1.course;
import cn.momia.api.course.CourseServiceApi;
import cn.momia.api.course.dto.CourseBookDto;
import cn.momia.api.course.dto.CourseDto;
import cn.momia.api.course.dto.TeacherDto;
import cn.momia.api.user.UserServiceApi;
import cn.momia.api.user.dto.UserDto;
import cn.momia.common.api.dto.PagedList;
import cn.momia.common.api.http.MomiaHttpResponse;
import cn.momia.common.webapp.config.Configuration;
import cn.momia.image.api.ImageFile;
import cn.momia.mapi.api.v1.AbstractV1Api;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.List;
@RestController
@RequestMapping(value = "/v1/course")
public class CourseV1Api extends AbstractV1Api {
@Autowired private CourseServiceApi courseServiceApi;
@Autowired private UserServiceApi userServiceApi;
@RequestMapping(method = RequestMethod.GET)
public MomiaHttpResponse get(@RequestParam(required = false, defaultValue = "") String utoken, @RequestParam long id) {
if (id <= 0) return MomiaHttpResponse.BAD_REQUEST;
CourseDto course = processCourse(courseServiceApi.get(id));
JSONObject courseJson = (JSONObject) JSON.toJSON(course);
if (!StringUtils.isBlank(utoken)) {
UserDto user = userServiceApi.get(utoken);
courseJson.put("favored", courseServiceApi.isFavored(user.getId(), id));
}
List<TeacherDto> teachers = processTeachers(courseServiceApi.queryTeachers(id, 0, Configuration.getInt("PageSize.CourseTeacher")).getList());
if (!teachers.isEmpty()) courseJson.put("teachers", teachers);
return MomiaHttpResponse.SUCCESS(courseJson);
}
protected CourseDto processCourse(CourseDto course) {
course.setCover(ImageFile.largeUrl(course.getCover()));
processLargeImgs(course.getImgs());
processCourseBook(course.getBook());
return course;
}
private CourseBookDto processCourseBook(CourseBookDto book) {
if (book == null) return null;
List<String> imgs = new ArrayList<String>();
List<String> largeImgs = new ArrayList<String>();
for (String img : book.getImgs()) {
imgs.add(ImageFile.smallUrl(img));
largeImgs.add(ImageFile.largeUrl(img));
}
book.setImgs(imgs);
book.setLargeImgs(largeImgs);
return book;
}
private List<TeacherDto> processTeachers(List<TeacherDto> teachers) {
for (TeacherDto teacher : teachers) {
teacher.setAvatar(ImageFile.smallUrl(teacher.getAvatar()));
}
return teachers;
}
@RequestMapping(value = "/detail", method = RequestMethod.GET)
public MomiaHttpResponse detail(@RequestParam long id) {
if (id <= 0) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(courseServiceApi.detail(id));
}
@RequestMapping(value = "/sku/week", method = RequestMethod.GET)
public MomiaHttpResponse listWeekSkus(@RequestParam long id) {
if (id <= 0) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(courseServiceApi.listWeekSkus(id));
}
@RequestMapping(value = "/sku/month", method = RequestMethod.GET)
public MomiaHttpResponse listWeekSkus(@RequestParam long id, @RequestParam int month) {
if (id <= 0 || month <= 0 || month > 12) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(courseServiceApi.listMonthSkus(id, month));
}
@RequestMapping(value = "/book", method = RequestMethod.GET)
public MomiaHttpResponse book(@RequestParam long id, @RequestParam int start) {
if (id <= 0 || start < 0) return MomiaHttpResponse.BAD_REQUEST;
PagedList<String> book = courseServiceApi.book(id, start, Configuration.getInt("PageSize.BookImg"));
processLargeImgs(book.getList());
return MomiaHttpResponse.SUCCESS(book);
}
@RequestMapping(value = "/teacher", method = RequestMethod.GET)
public MomiaHttpResponse teacher(@RequestParam long id, @RequestParam int start) {
if (id <= 0 || start < 0) return MomiaHttpResponse.BAD_REQUEST;
PagedList<TeacherDto> teachers = courseServiceApi.queryTeachers(id, start, Configuration.getInt("PageSize.Teacher"));
processTeachers(teachers.getList());
return MomiaHttpResponse.SUCCESS(teachers);
}
@RequestMapping(value = "/favor", method = RequestMethod.POST)
public MomiaHttpResponse favor(@RequestParam String utoken, @RequestParam long id) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (id <= 0) return MomiaHttpResponse.BAD_REQUEST;
UserDto user = userServiceApi.get(utoken);
if (!courseServiceApi.favor(user.getId(), id)) return MomiaHttpResponse.FAILED("");
return MomiaHttpResponse.SUCCESS;
}
@RequestMapping(value = "/unfavor", method = RequestMethod.POST)
public MomiaHttpResponse unfavor(@RequestParam String utoken, @RequestParam long id) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (id <= 0) return MomiaHttpResponse.BAD_REQUEST;
UserDto user = userServiceApi.get(utoken);
if (!courseServiceApi.unfavor(user.getId(), id)) return MomiaHttpResponse.FAILED("");
return MomiaHttpResponse.SUCCESS;
}
}
|
package co.poynt.postman.model;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class PostmanVariables {
public static final String POSTMAN_EXP = "\\{\\{[^\\}]+\\}\\}";
public static final String GUID = "{{$guid}}";
public static final String TIMESTAMP = "{{$timestamp}}";
public static final String RANDOMINT = "{{$randomInt}}";
private Random r = new Random(1000);
private PostmanEnvironment env;
public PostmanVariables(PostmanEnvironment env) {
this.env = env;
}
private String getConstantVal(String exp) {
if (exp.equalsIgnoreCase(GUID)) {
return UUID.randomUUID().toString();
} else if (exp.equalsIgnoreCase(TIMESTAMP)) {
return Long.toString(System.currentTimeMillis() / 1000);
} else if (exp.equalsIgnoreCase(RANDOMINT)) {
return Integer.toString(r.nextInt(1000));
} else {
throw new IllegalArgumentException("Invalid POSTMAN dynamic variable " + exp);
}
}
private String getVal(String name) {
if (name.startsWith("{{$")) {
return getConstantVal(name);
}
String key = name.substring(2, name.length() - 2).trim();
PostmanEnvValue val = this.env.lookup.get(key);
if (val == null) {
return "UNDEFINED";
}
return val.value;
}
/**
* Replace all {{dynamic variable}} in orig string with values found in the
* environment. If variable is not found, replace it with constant string
* "UNDEFINED".
*
* @param orig
* @return
*/
public String replace(String orig) {
if (orig == null || orig.isEmpty()) {
return orig;
}
// Get all the dynamic variables
List<String> allMatches = new ArrayList<String>();
Matcher m = Pattern.compile(POSTMAN_EXP).matcher(orig);
while (m.find()) {
allMatches.add(m.group().trim());
}
// TODO: this is not the most efficient way to do it
// but it is the simplest in term of code and this is not
// production code anyway.
String result = orig;
for (String var : allMatches) {
String varVal = getVal(var);
// System.out.println(var + " ==> " + varVal);
result = result.replace((CharSequence) var, (CharSequence) varVal);
}
return result;
}
public PostmanEnvironment getEnv() {
return env;
}
}
|
package fr.paris.lutece.portal.web.documentation;
import fr.paris.lutece.portal.business.right.FeatureGroup;
import fr.paris.lutece.portal.business.right.FeatureGroupHome;
import fr.paris.lutece.portal.business.right.Right;
import fr.paris.lutece.portal.business.user.AdminUser;
import fr.paris.lutece.portal.service.admin.AccessDeniedException;
import fr.paris.lutece.portal.service.admin.AdminUserService;
import fr.paris.lutece.portal.service.message.AdminMessage;
import fr.paris.lutece.portal.service.message.AdminMessageService;
import fr.paris.lutece.portal.service.template.AppTemplateService;
import fr.paris.lutece.portal.service.util.AppLogService;
import fr.paris.lutece.portal.service.util.AppPathService;
import fr.paris.lutece.util.html.HtmlTemplate;
import fr.paris.lutece.util.xml.XmlUtil;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.xml.transform.stream.StreamSource;
/**
*
* Classe for display the admin features documentation
*
*/
public class AdminDocumentationJspBean
{
//xsl
private static final String XSL_PATH = "admin_documentation.xsl";
//xsl paramaters
private static final String PARAMS_LOCAL = "locale";
private static final String PARAMS_DEFAULT_LOCAL = "default_locale";
//parameters
private static final String PARAMETER_FEATURE_DOC = "doc";
//jsp
private static final String JSP_CLOSE = "javascript:window.close()";
//templates
private static final String TEMPLATE_ADMIN_SUMMARY_DOCUMENTATION = "admin/documentation/admin_summary_documentation.html";
//bookmark
private static final String BOOKMARK_FEATURE_GROUP_LIST = "feature_group_list";
private static final String BOOKMARK_HELP_ICON = "help_icon";
//images
private static final String IMAGE_HELP_PATH = "images/admin/skin/features/admin_help.png";
//properties
private static final String PROPERTY_XSL_BASE_PATH = "lutece.documentation.xsl.path";
//messages
private static final String MESSAGE_ERROR = "portal.features.documentation.message.error";
//utils
private static final String LOCAL_DEFAULT = "en";
private static final String XML_BASE_PATH = "/doc/xml/";
private static final String XML_USER_PATH = "/xdoc/user/";
private static final String FEATURES_GROUP_SYSTEM = "SYSTEM";
/**
* Returns the view of features documentation
*
* @param request The request
* @return The HTML documentation
* @throws AccessDeniedException If the access is refused to the user
*/
public String getDocumentation( HttpServletRequest request )
throws AccessDeniedException
{
String strFeature = request.getParameter( PARAMETER_FEATURE_DOC );
AdminUser user = AdminUserService.getAdminUser( request );
Locale locale = user.getLocale( );
//get the xsl file
String strXslPath = AppPathService.getPath( PROPERTY_XSL_BASE_PATH, XSL_PATH );
File fileXsl = new File( strXslPath );
StreamSource sourceStyleSheet = new StreamSource( fileXsl );
//get the xml documentation file
File fileXml;
StreamSource sourceXml;
String strLocal = locale.toString( );
if ( ( locale == null ) || strLocal.equals( LOCAL_DEFAULT ) )
{
String strXmlPath = AppPathService.getWebAppPath( ) + XML_BASE_PATH + XML_USER_PATH + strFeature + ".xml";
fileXml = new File( strXmlPath );
}
else
{
String strXmlPath = AppPathService.getWebAppPath( ) + XML_BASE_PATH + locale.toString( ) + XML_USER_PATH +
strFeature + ".xml";
fileXml = new File( strXmlPath );
}
sourceXml = new StreamSource( fileXml );
String strHtmlDoc = null;
Map<String, String> params = new HashMap<String, String>( );
params.put( PARAMS_LOCAL, locale.toString( ) );
params.put( PARAMS_DEFAULT_LOCAL, LOCAL_DEFAULT );
try
{
strHtmlDoc = XmlUtil.transform( sourceXml, sourceStyleSheet, params, null );
}
catch ( Exception e )
{
AppLogService.debug( "Can't parse XML: " + e.getMessage( ), e );
AppLogService.error( "Can't parse XML: " + e.getMessage( ), e );
return null;
}
return strHtmlDoc;
}
/**
* Returns an error message when an error occured
*
* @param request The request
* @return The URL of message
*/
public String doAdminMessage( HttpServletRequest request )
{
return AdminMessageService.getMessageUrl( request, MESSAGE_ERROR, JSP_CLOSE, AdminMessage.TYPE_ERROR );
}
/**
* Returns the view of summary documentation
*
* @param request The request
* @return The HTML documentation
*/
public String getSummaryDocumentation( HttpServletRequest request )
{
AdminUser user = AdminUserService.getAdminUser( request );
ArrayList listFeatureGroups = getFeatureGroupsList( user );
HashMap model = new HashMap( );
model.put( BOOKMARK_FEATURE_GROUP_LIST, listFeatureGroups );
model.put( BOOKMARK_HELP_ICON, IMAGE_HELP_PATH );
HtmlTemplate template = AppTemplateService.getTemplate( TEMPLATE_ADMIN_SUMMARY_DOCUMENTATION,
user.getLocale( ), model );
return template.getHtml( );
}
/**
* Returns an array that contains all feature groups corresponding to the user
* @param nUserId The user Id
* @param locale the locale
* @return An array of FeatureGroup objects
*/
private ArrayList getFeatureGroupsList( AdminUser user )
{
// structure that will be returned
ArrayList<FeatureGroup> aOutFeatureGroupList = new ArrayList<FeatureGroup>( );
// get the list of user's features
Map<String, Right> featuresMap = user.getRights( );
Collection<Right> features = featuresMap.values( );
// for each group, load the features
for ( FeatureGroup featureGroup : FeatureGroupHome.getFeatureGroupsList( ) )
{
ArrayList<Right> aLeftFeatures = new ArrayList<Right>( );
for ( Right right : features )
{
right.setLocale( user.getLocale( ) );
String strFeatureGroup = right.getFeatureGroup( );
String strUrlDocumentation = right.getDocumentationUrl( );
if ( featureGroup.getId( ).equalsIgnoreCase( strFeatureGroup ) && ( strUrlDocumentation != null ) &&
!( strUrlDocumentation.equals( "" ) ) )
{
featureGroup.addFeature( right );
}
else if ( ( strUrlDocumentation != null ) && !( strUrlDocumentation.equals( "" ) ) )
{
aLeftFeatures.add( right );
}
}
if ( !featureGroup.isEmpty( ) )
{
featureGroup.setLocale( user.getLocale( ) );
aOutFeatureGroupList.add( featureGroup );
}
features = aLeftFeatures;
}
FeatureGroup featureGroupSystem = FeatureGroupHome.findByPrimaryKey( FEATURES_GROUP_SYSTEM );
if ( ( featureGroupSystem != null ) && !features.isEmpty( ) )
{
boolean bSystemFeaturesGroupExist = false;
//Check if the features group system exist in list features group
for ( FeatureGroup featureGroup : aOutFeatureGroupList )
{
//if exist
if ( featureGroup.getId( ).equalsIgnoreCase( featureGroupSystem.getId( ) ) )
{
// add the features with no group to the list in features group SYSTEM
for ( Right right : features )
{
featureGroup.addFeature( right );
}
bSystemFeaturesGroupExist = true;
break;
}
}
// if not, add features group SYSTEM to the list with the feautres no group
if ( !bSystemFeaturesGroupExist )
{
for ( Right right : features )
{
featureGroupSystem.addFeature( right );
}
featureGroupSystem.setLocale( user.getLocale( ) );
aOutFeatureGroupList.add( featureGroupSystem );
}
}
else if ( ( aOutFeatureGroupList.size( ) > 0 ) && !features.isEmpty( ) )
{
FeatureGroup lastFeatureGroup = aOutFeatureGroupList.get( aOutFeatureGroupList.size( ) - 1 );
for ( Right right : features )
{
lastFeatureGroup.addFeature( right );
}
}
return aOutFeatureGroupList;
}
}
|
package com.alan344happyframework.bean;
import com.alan344happyframework.constants.SeparatorConstants;
import com.alan344happyframework.core.Product;
import com.alan344happyframework.util.StringUtils;
import com.alan344happyframework.weixin.entity.WechatPayOrder;
import com.alipay.api.domain.AlipayTradeAppPayModel;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.validator.constraints.Length;
import javax.validation.constraints.Digits;
import javax.validation.constraints.NotBlank;
import java.util.List;
@Getter
@Setter
public class OrderPay extends PayBase {
@NotBlank
@Length(max = 32, min = 1)
private String orderId;
@NotBlank
@Digits(integer = 30, fraction = 2)
private String price;
/**
* body
* <p>
* body products body
* <p>
*
*/
@NotBlank
private String body;
/**
* body products body
*/
private List<? extends Product> bodyProducts;
@NotBlank
private String subject;
private String notifyUrl;
private AlipayTradeAppPayModel alipayTradeAppPayModel;
private WechatPayOrder wechatPayOrder = new WechatPayOrder();
public void setBodyProducts(List<? extends Product> products) {
if (StringUtils.isEmpty(this.body)) {
this.body = Product.getNames(products);
}
}
/**
* body
*
* @param replaceStr
*/
public void filterBody(String replaceStr) {
if (!org.springframework.util.StringUtils.isEmpty(replaceStr) && StringUtils.isNotEmpty(this.body)) {
String[] split = replaceStr.split(SeparatorConstants.COMMA);
for (String s : split) {
this.body = body.replaceAll(s, SeparatorConstants.EMPTY);
}
}
}
}
|
package com.mongodb;
import com.github.fakemongo.FongoException;
import com.github.fakemongo.impl.Aggregator;
import com.github.fakemongo.impl.ExpressionParser;
import com.github.fakemongo.impl.Filter;
import com.github.fakemongo.impl.MapReduce;
import com.github.fakemongo.impl.Tuple2;
import com.github.fakemongo.impl.UpdateEngine;
import com.github.fakemongo.impl.Util;
import com.github.fakemongo.impl.geo.GeoUtil;
import com.github.fakemongo.impl.index.GeoIndex;
import com.github.fakemongo.impl.index.IndexAbstract;
import com.github.fakemongo.impl.index.IndexFactory;
import com.github.fakemongo.impl.text.TextSearch;
import static com.mongodb.assertions.Assertions.isTrueArgument;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import static java.util.Collections.emptyList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.bson.BSON;
import org.bson.BsonArray;
import org.bson.BsonDocument;
import org.bson.BsonDocumentReader;
import org.bson.BsonDocumentWriter;
import org.bson.BsonValue;
import org.bson.codecs.Codec;
import org.bson.codecs.Decoder;
import org.bson.codecs.DecoderContext;
import org.bson.codecs.EncoderContext;
import org.bson.codecs.configuration.CodecRegistry;
import org.bson.io.BasicOutputBuffer;
import org.bson.io.OutputBuffer;
import org.bson.types.Binary;
import org.bson.types.ObjectId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sun.reflect.generics.reflectiveObjects.NotImplementedException;
/**
* fongo override of com.mongodb.DBCollection
* you shouldn't need to use this class directly
*
* @author jon
*/
public class FongoDBCollection extends DBCollection {
private final static Logger LOG = LoggerFactory.getLogger(FongoDBCollection.class);
public static final String FONGO_SPECIAL_ORDER_BY = "$$$$$FONGO_ORDER_BY$$$$$";
private static final String ID_NAME_INDEX = "_id_";
private final FongoDB fongoDb;
private final ExpressionParser expressionParser;
private final UpdateEngine updateEngine;
private final boolean nonIdCollection;
private final ExpressionParser.ObjectComparator objectComparator;
// Fields/Index
private final List<IndexAbstract> indexes = new ArrayList<IndexAbstract>();
private final IndexAbstract _idIndex;
public FongoDBCollection(FongoDB db, String name) {
this(db, name, false);
}
public FongoDBCollection(FongoDB db, String name, boolean idIsNotUniq) {
super(db, name);
this.fongoDb = db;
this.nonIdCollection = name.startsWith("system.");
this.expressionParser = new ExpressionParser();
this.updateEngine = new UpdateEngine();
this.objectComparator = expressionParser.buildObjectComparator(true);
this._idIndex = IndexFactory.create(ID_FIELD_NAME, new BasicDBObject(ID_FIELD_NAME, 1), !idIsNotUniq);
this.indexes.add(_idIndex);
if (!this.nonIdCollection) {
this.createIndex(new BasicDBObject(ID_FIELD_NAME, 1), new BasicDBObject("name", ID_NAME_INDEX));
}
}
private synchronized WriteResult updateResult(int updateCount, boolean updatedExisting, final Object upsertedId) {
return new WriteResult(updateCount, updatedExisting, upsertedId);
}
private DBObject encodeDecode(DBObject dbObject, DBEncoder encoder) {
if (dbObject instanceof LazyDBObject) {
if (encoder == null) {
encoder = DefaultDBEncoder.FACTORY.create();
}
OutputBuffer outputBuffer = new BasicOutputBuffer();
encoder.writeObject(outputBuffer, dbObject);
return DefaultDBDecoder.FACTORY.create().decode(outputBuffer.toByteArray(), this);
}
return dbObject;
}
@Override
public synchronized WriteResult insert(final List<? extends DBObject> documents, final InsertOptions insertOptions) {
WriteConcern writeConcern = insertOptions.getWriteConcern() != null ? insertOptions.getWriteConcern() : getWriteConcern();
for (final DBObject obj : documents) {
DBObject cloned = filterLists(Util.cloneIdFirst(encodeDecode(obj, insertOptions.getDbEncoder())));
if (LOG.isDebugEnabled()) {
LOG.debug("insert: " + cloned);
}
ObjectId id = putIdIfNotPresent(cloned);
// Save the id field in the caller.
if (!(obj instanceof LazyDBObject) && obj.get(ID_FIELD_NAME) == null) {
obj.put(ID_FIELD_NAME, Util.clone(id));
}
putSizeCheck(cloned, writeConcern);
}
// Don't know why, but there is not more number of inserted results...
// return new WriteResult(insertResult(0), concern);
if (!writeConcern.isAcknowledged()) {
return WriteResult.unacknowledged();
}
return new WriteResult(documents.size(), false, null);
}
boolean enforceDuplicates(WriteConcern concern) {
WriteConcern writeConcern = concern == null ? getWriteConcern() : concern;
return writeConcern.isAcknowledged();
}
public ObjectId putIdIfNotPresent(DBObject obj) {
Object object = obj.get(ID_FIELD_NAME);
if (object == null) {
ObjectId id = new ObjectId();
obj.put(ID_FIELD_NAME, id);
return id;
} else if (object instanceof ObjectId) {
ObjectId id = (ObjectId) object;
return id;
}
return null;
}
public void putSizeCheck(DBObject obj, WriteConcern concern) {
if (_idIndex.size() > 100000) {
throw new FongoException("Whoa, hold up there. Fongo's designed for lightweight testing. 100,000 items per collection max");
}
addToIndexes(obj, null, concern);
}
public DBObject filterLists(DBObject dbo) {
if (dbo == null) {
return null;
}
dbo = Util.clone(dbo);
for (Map.Entry<String, Object> entry : Util.entrySet(dbo)) {
Object replacementValue = replaceListAndMap(entry.getValue());
dbo.put(entry.getKey(), replacementValue);
}
return dbo;
}
public Object replaceListAndMap(Object value) {
Object replacementValue = BSON.applyEncodingHooks(value);
if (ExpressionParser.isDbObject(replacementValue)) {
replacementValue = filterLists(ExpressionParser.toDbObject(replacementValue));
} else if (replacementValue instanceof Collection) {
BasicDBList list = new BasicDBList();
for (Object listItem : (Collection) replacementValue) {
list.add(replaceListAndMap(listItem));
}
replacementValue = list;
} else if (replacementValue instanceof DBObject) {
replacementValue = filterLists((DBObject) replacementValue);
} else if (replacementValue instanceof Object[]) {
BasicDBList list = new BasicDBList();
for (Object listItem : (Object[]) replacementValue) {
list.add(replaceListAndMap(listItem));
}
replacementValue = list;
} else if (replacementValue instanceof long[]) {
BasicDBList list = new BasicDBList();
for (Object listItem : (long[]) replacementValue) {
list.add(replaceListAndMap(listItem));
}
replacementValue = list;
} else if (replacementValue instanceof int[]) {
BasicDBList list = new BasicDBList();
for (Object listItem : (int[]) replacementValue) {
list.add(replaceListAndMap(listItem));
}
replacementValue = list;
} else if (replacementValue instanceof double[]) {
BasicDBList list = new BasicDBList();
for (Object listItem : (double[]) replacementValue) {
list.add(replaceListAndMap(listItem));
}
replacementValue = list;
} else if (replacementValue instanceof float[]) {
BasicDBList list = new BasicDBList();
for (Object listItem : (float[]) replacementValue) {
list.add(replaceListAndMap(listItem));
}
replacementValue = list;
} else if (replacementValue instanceof boolean[]) {
BasicDBList list = new BasicDBList();
for (Object listItem : (boolean[]) replacementValue) {
list.add(replaceListAndMap(listItem));
}
replacementValue = list;
} else if (replacementValue instanceof Map) {
BasicDBObject newDbo = new BasicDBObject();
//noinspection unchecked
for (Map.Entry<String, Object> entry : (Set<Map.Entry<String, Object>>) ((Map) replacementValue).entrySet()) {
newDbo.put(entry.getKey(), replaceListAndMap(entry.getValue()));
}
replacementValue = newDbo;
} else if (replacementValue instanceof Binary) {
replacementValue = ((Binary) replacementValue).getData();
}
return Util.clone(replacementValue);
}
protected synchronized void fInsert(DBObject obj, WriteConcern concern) {
putIdIfNotPresent(obj);
putSizeCheck(obj, concern);
}
@Override
public synchronized WriteResult update(DBObject q, DBObject o, boolean upsert, boolean multi, WriteConcern concern,
DBEncoder encoder) throws MongoException {
q = filterLists(q);
o = filterLists(o);
if (o == null) {
throw new IllegalArgumentException("update can not be null");
}
if (concern == null) {
throw new IllegalArgumentException("Write concern can not be null");
}
if (!o.keySet().isEmpty()) {
// if 1st key doesn't start with $, then object will be inserted as is, need to check it
String key = o.keySet().iterator().next();
if (!key.startsWith("$")) {
_checkObject(o, false, false);
}
}
// if (multi) {
// try {
// checkMultiUpdateDocument(o);
// this.fongoDb.notOkErrorResult(9, e.getMessage()).throwOnError();
if (LOG.isDebugEnabled()) {
LOG.debug("update(" + q + ", " + o + ", " + upsert + ", " + multi + ")");
}
if (o.containsField(ID_FIELD_NAME) && q.containsField(ID_FIELD_NAME) && objectComparator.compare(o.get(ID_FIELD_NAME), q.get(ID_FIELD_NAME)) != 0) {
LOG.warn("can not change _id of a document query={}, document={}", q, o);
throw fongoDb.writeConcernException(16837, "can not change _id of a document " + ID_FIELD_NAME);
}
int updatedDocuments = 0;
boolean idOnlyUpdate = q.containsField(ID_FIELD_NAME) && q.keySet().size() == 1;
boolean updatedExisting = false;
Object upsertedId = null;
if (idOnlyUpdate && isNotUpdateCommand(o)) {
if (!o.containsField(ID_FIELD_NAME)) {
o.put(ID_FIELD_NAME, Util.clone(q.get(ID_FIELD_NAME)));
} else {
o.put(ID_FIELD_NAME, Util.clone(o.get(ID_FIELD_NAME)));
}
@SuppressWarnings("unchecked") Iterator<DBObject> oldObjects = _idIndex.retrieveObjects(q).iterator();
if (oldObjects.hasNext() || upsert) {
addToIndexes(Util.clone(o), oldObjects.hasNext() ? oldObjects.next() : null, concern);
updatedDocuments++;
}
} else {
Filter filter = buildFilter(q);
for (DBObject obj : filterByIndexes(q)) {
if (filter.apply(obj)) {
DBObject newObject = Util.clone(obj);
updateEngine.doUpdate(newObject, o, q, false);
// Check for uniqueness (throw MongoException if error)
addToIndexes(newObject, obj, concern);
updatedDocuments++;
updatedExisting = true;
if (!multi) {
break;
}
}
}
if (updatedDocuments == 0 && upsert) {
BasicDBObject newObject = createUpsertObject(q);
fInsert(updateEngine.doUpdate(newObject, o, q, true), concern);
updatedDocuments++;
updatedExisting = false;
upsertedId = newObject.get(ID_FIELD_NAME);
}
}
return updateResult(updatedDocuments, updatedExisting, upsertedId);
}
protected DBObject _checkObject(DBObject o, boolean canBeNull, boolean query) {
if (o == null) {
if (canBeNull)
return null;
throw new IllegalArgumentException("can't be null");
}
if (o.isPartialObject() && !query)
throw new IllegalArgumentException("can't save partial objects");
if (!query) {
_checkKeys(o);
}
return o;
}
/**
* Checks key strings for invalid characters.
*/
private void _checkKeys(DBObject o) {
if (o instanceof LazyDBObject || o instanceof LazyDBList)
return;
for (String s : o.keySet()) {
validateKey(s);
_checkValue(o.get(s));
}
}
/**
* Checks key strings for invalid characters.
*/
private void _checkKeys(Map<String, Object> o) {
for (Map.Entry<String, Object> cur : o.entrySet()) {
validateKey(cur.getKey());
_checkValue(cur.getValue());
}
}
private void _checkValues(final List list) {
for (Object cur : list) {
_checkValue(cur);
}
}
private void _checkValue(final Object value) {
if (value instanceof DBObject) {
_checkKeys((DBObject) value);
} else if (value instanceof Map) {
_checkKeys((Map<String, Object>) value);
} else if (value instanceof List) {
_checkValues((List) value);
}
}
private void validateKey(String s) {
if (s.contains("\0"))
throw new IllegalArgumentException("Document field names can't have a NULL character. (Bad Key: '" + s + "')");
if (s.contains("."))
throw new IllegalArgumentException("Document field names can't have a . in them. (Bad Key: '" + s + "')");
if (s.startsWith("$"))
throw new IllegalArgumentException("Document field names can't start with '$' (Bad Key: '" + s + "')");
}
private List idsIn(DBObject query) {
Object idValue = query != null ? query.get(ID_FIELD_NAME) : null;
if (idValue == null || query.keySet().size() > 1) {
return emptyList();
} else if (ExpressionParser.isDbObject(idValue)) {
DBObject idDbObject = ExpressionParser.toDbObject(idValue);
Collection inList = (Collection) idDbObject.get(QueryOperators.IN);
// I think sorting the inputed keys is a rough
// approximation of how mongo creates the bounds for walking
// the index. It has the desired affect of returning results
// in _id index order, but feels pretty hacky.
if (inList != null) {
Object[] inListArray = inList.toArray(new Object[inList.size()]);
// ids could be DBObjects, so we need a comparator that can handle that
Arrays.sort(inListArray, objectComparator);
return Arrays.asList(inListArray);
}
if (!isNotUpdateCommand(idValue)) {
return emptyList();
}
}
return Collections.singletonList(Util.clone(idValue));
}
protected BasicDBObject createUpsertObject(DBObject q) {
BasicDBObject newObject = new BasicDBObject();
newObject.markAsPartialObject();
// List idsIn = idsIn(q);
// if (!idsIn.isEmpty()) {
// newObject.put(ID_FIELD_NAME, Util.clone(idsIn.get(0)));
// } else
BasicDBObject filteredQuery = new BasicDBObject();
for (String key : q.keySet()) {
Object value = q.get(key);
if (isNotUpdateCommand(value)) {
if ("$and".equals(key)) {
List<DBObject> values = (List<DBObject>) value;
for (DBObject dbObject : values) {
filteredQuery.putAll(dbObject);
}
} else {
filteredQuery.put(key, value);
}
}
}
updateEngine.mergeEmbeddedValueFromQuery(newObject, filteredQuery);
return newObject;
}
public boolean isNotUpdateCommand(Object value) {
boolean okValue = true;
if (ExpressionParser.isDbObject(value)) {
for (String innerKey : (ExpressionParser.toDbObject(value)).keySet()) {
if (innerKey.startsWith("$")) {
okValue = false;
}
}
}
return okValue;
}
@Override
public WriteResult remove(final DBObject query, final WriteConcern writeConcern) {
return this.remove(query, writeConcern, null);
}
@Override
public synchronized WriteResult remove(DBObject o, WriteConcern concern, DBEncoder encoder) throws MongoException {
o = filterLists(o);
if (LOG.isDebugEnabled()) {
LOG.debug("remove: " + o);
}
int updatedDocuments = 0;
Collection<DBObject> objectsByIndex = filterByIndexes(o);
Filter filter = buildFilter(o);
List<DBObject> ids = new ArrayList<DBObject>();
// Double pass, objectsByIndex can be not "objects"
for (DBObject object : objectsByIndex) {
if (filter.apply(object)) {
ids.add(object);
}
}
// Real remove.
for (DBObject object : ids) {
LOG.debug("remove object : {}", object);
removeFromIndexes(object);
updatedDocuments++;
}
return updateResult(updatedDocuments, true, null);
}
@Override
public synchronized void createIndex(final DBObject keys, final DBObject options) {
DBCollection indexColl = fongoDb.getCollection("system.indexes");
BasicDBObject rec = new BasicDBObject();
rec.append("v", 1);
rec.append("key", keys);
rec.append("ns", nsName());
if (options != null && options.containsField("name")) {
rec.append("name", options.get("name"));
} else {
StringBuilder sb = new StringBuilder();
boolean firstLoop = true;
for (String keyName : keys.keySet()) {
if (!firstLoop) {
sb.append("_");
}
sb.append(keyName).append("_").append(keys.get(keyName));
firstLoop = false;
}
rec.append("name", sb.toString());
}
// Ensure index doesn't exist.
final DBObject oldIndex = indexColl.findOne(rec);
if (oldIndex != null) {
for (Map.Entry<String, Object> entry : Util.entrySet(options)) {
if (!entry.getValue().equals(oldIndex.get(entry.getKey()))) {
fongoDb.notOkErrorResult(85, String.format("Index with name: %s already exists with different options", nsName())).throwOnError();
}
}
return;
}
// Unique index must not be in previous find.
boolean unique = options != null && options.get("unique") != null && (Boolean.TRUE.equals(options.get("unique")) || "1".equals(options.get("unique")) || Integer.valueOf(1).equals(options.get("unique")));
if (unique) {
rec.append("unique", unique);
}
rec.putAll(options);
try {
IndexAbstract index = IndexFactory.create((String) rec.get("name"), keys, unique);
@SuppressWarnings("unchecked") List<List<Object>> notUnique = index.addAll(_idIndex.values());
if (!notUnique.isEmpty()) {
// Duplicate key.
if (enforceDuplicates(getWriteConcern())) {
fongoDb.notOkErrorResult(11000, "E11000 duplicate key error index: " + getFullName() + ".$" + rec.get("name") + " dup key: { : " + notUnique + " }").throwOnError();
}
return;
}
indexes.add(index);
} catch (MongoException me) {
fongoDb.errorResult(me.getCode(), me.getMessage()).throwOnError();
}
// Add index if all fine.
indexColl.insert(rec);
}
// @Override
DBObject findOne(final DBObject pRef, final DBObject projection, final DBObject sort,
final ReadPreference readPreference, final long maxTime, final TimeUnit maxTimeUnit) {
final DBObject query = new BasicDBObject("$query", pRef);
if (sort != null) {
query.put("$orderby", sort);
}
final List<DBObject> objects = __find(query, projection, 0, 1, 1, 0, readPreference, null, null);
return objects.size() > 0 ? replaceWithObjectClass(objects.get(0)) : null;
}
/**
* Used for older compatibility.
* <p/>
* note: encoder, decoder, readPref, options are ignored
*/
List<DBObject> __find(DBObject ref, DBObject fields, int numToSkip, int batchSize, int limit, int options,
ReadPreference readPref, DBDecoder decoder, DBEncoder encoder) {
return __find(ref, fields, numToSkip, batchSize, limit, options, readPref, decoder);
}
@Override
public DBCursor find() {
return find(new BasicDBObject());
}
@Override
public DBCursor find(final DBObject query) {
return find(query, null);
}
public DBCursor find(final DBObject query, final DBObject projection) {
return new FongoDBCursor(this, query, projection);
}
/**
* Used for older compatibility.
* <p/>
* note: decoder, readPref, options are ignored
*/
synchronized List<DBObject> __find(final DBObject pRef, DBObject fields, int numToSkip, int batchSize, int limit,
int options, ReadPreference readPref, DBDecoder decoder) throws MongoException {
DBObject ref = filterLists(pRef == null ? new BasicDBObject() : pRef);
long maxScan = Long.MAX_VALUE;
if (LOG.isDebugEnabled()) {
LOG.debug("find({}, {}).skip({}).limit({})", ref, fields, numToSkip, limit);
LOG.debug("the db {} looks like {}", this.getDB().getName(), _idIndex.size());
}
DBObject orderby = null;
if (ref.containsField("$orderby")) {
orderby = ExpressionParser.toDbObject(ref.get("$orderby"));
}
if (ref.containsField("$maxScan")) {
maxScan = ((Number) ref.get("$maxScan")).longValue();
}
if (ref.containsField("$query")) {
ref = ExpressionParser.toDbObject(ref.get("$query"));
}
Filter filter = buildFilter(ref);
int foundCount = 0;
int upperLimit = Integer.MAX_VALUE;
if (limit > 0) {
upperLimit = limit;
}
Collection<DBObject> objectsFromIndex = filterByIndexes(ref);
List<DBObject> results = new ArrayList<DBObject>();
List objects = idsIn(ref);
if (!objects.isEmpty()) {
// if (!(ref.get(ID_FIELD_NAME) instanceof DBObject)) {
// Special case : find({id:<val}) doesn't handle skip...
// But : find({_id:{$in:[1,2,3]}).skip(3) will return empty list.
// numToSkip = 0;
if (orderby == null) {
orderby = new BasicDBObject(ID_FIELD_NAME, 1);
} else {
// Special case : if order by is wrong (field doesn't exist), the sort must be directed by _id.
objectsFromIndex = sortObjects(new BasicDBObject(ID_FIELD_NAME, 1), objectsFromIndex);
}
}
int seen = 0;
Iterable<DBObject> objectsToSearch = sortObjects(orderby, objectsFromIndex);
for (Iterator<DBObject> iter = objectsToSearch.iterator();
iter.hasNext() && foundCount < upperLimit && maxScan
DBObject dbo = iter.next();
if (filter.apply(dbo)) {
if (seen++ >= numToSkip) {
foundCount++;
DBObject clonedDbo = Util.clone(dbo);
if (nonIdCollection) {
clonedDbo.removeField(ID_FIELD_NAME);
}
clonedDbo.removeField(FONGO_SPECIAL_ORDER_BY);
// handleDBRef(clonedDbo);
results.add(clonedDbo);
}
}
}
if (!Util.isDBObjectEmpty(fields)) {
results = applyProjections(results, fields);
}
LOG.debug("found results {}", results);
return replaceWithObjectClass(results);
}
/**
* Return "objects.values()" if no index found.
*
* @return objects from "_id" if no index found, elsewhere the restricted values from an index.
*/
private Collection<DBObject> filterByIndexes(DBObject ref) {
Collection<DBObject> dbObjectIterable = null;
if (ref != null) {
IndexAbstract matchingIndex = searchIndex(ref);
if (matchingIndex != null) {
//noinspection unchecked
dbObjectIterable = matchingIndex.retrieveObjects(ref);
if (LOG.isDebugEnabled()) {
LOG.debug("restrict with index {}, from {} to {} elements", matchingIndex.getName(), _idIndex.size(), dbObjectIterable == null ? 0 : dbObjectIterable.size());
}
}
}
if (dbObjectIterable == null) {
//noinspection unchecked
dbObjectIterable = _idIndex.values();
}
return dbObjectIterable;
}
private List<DBObject> applyProjections(List<DBObject> results, DBObject projection) {
final List<DBObject> ret = new ArrayList<DBObject>(results.size());
for (DBObject result : results) {
DBObject projectionMacthedResult = applyProjections(result, projection);
if (null != projectionMacthedResult) {
ret.add(projectionMacthedResult);
}
}
return ret;
}
private static void addValuesAtPath(BasicDBObject ret, DBObject dbo, List<String> path, int startIndex) {
String subKey = path.get(startIndex);
Object value = dbo.get(subKey);
if (path.size() > startIndex + 1) {
if (ExpressionParser.isDbObject(value) && !(value instanceof List)) {
BasicDBObject nb = (BasicDBObject) ret.get(subKey);
if (nb == null) {
nb = new BasicDBObject();
}
ret.append(subKey, nb);
addValuesAtPath(nb, ExpressionParser.toDbObject(value), path, startIndex + 1);
} else if (value instanceof List) {
BasicDBList list = getListForKey(ret, subKey);
int idx = 0;
for (Object v : (List) value) {
if (ExpressionParser.isDbObject(v)) {
BasicDBObject nb;
if (list.size() > idx) {
nb = (BasicDBObject) list.get(idx);
} else {
nb = new BasicDBObject();
list.add(nb);
}
addValuesAtPath(nb, ExpressionParser.toDbObject(v), path, startIndex + 1);
}
idx++;
}
}
} else if (value != null) {
ret.append(subKey, value);
}
}
private static BasicDBList getListForKey(BasicDBObject ret, String subKey) {
BasicDBList list;
if (ret.containsField(subKey)) {
list = (BasicDBList) ret.get(subKey);
} else {
list = new BasicDBList();
ret.append(subKey, list);
}
return list;
}
/**
* Replaces the result {@link DBObject} with the configured object class of this collection. If the object class is
* <code>null</code> the result object itself will be returned.
*
* @param resultObject the original result value from the command.
* @return replaced {@link DBObject} if necessary, or resultObject.
*/
private DBObject replaceWithObjectClass(DBObject resultObject) {
if (resultObject == null || getObjectClass() == null) {
return resultObject;
}
final DBObject targetObject = instantiateObjectClassInstance();
for (final String key : resultObject.keySet()) {
targetObject.put(key, resultObject.get(key));
}
return targetObject;
}
private List<DBObject> replaceWithObjectClass(List<DBObject> resultObjects) {
final List<DBObject> targetObjects = new ArrayList<DBObject>(resultObjects.size());
for (final DBObject resultObject : resultObjects) {
targetObjects.add(replaceWithObjectClass(resultObject));
}
return targetObjects;
}
/**
* Returns a new instance of the object class.
*
* @return a new instance of the object class.
*/
private DBObject instantiateObjectClassInstance() {
try {
return ExpressionParser.toDbObject(getObjectClass().newInstance());
} catch (InstantiationException e) {
throw new MongoInternalException("Can't create instance of type: " + getObjectClass(), e);
} catch (IllegalAccessException e) {
throw new MongoInternalException("Can't create instance of type: " + getObjectClass(), e);
}
}
public static DBObject applyProjections(DBObject result, DBObject projectionObject) {
LOG.debug("applying projections {}", projectionObject);
if (Util.isDBObjectEmpty(projectionObject)) {
if (Util.isDBObjectEmpty(result)) {
return null;
}
return Util.cloneIdFirst(result);
}
if (result == null) {
return null; //
}
int inclusionCount = 0;
int exclusionCount = 0;
List<String> projectionFields = new ArrayList<String>();
boolean wasIdExcluded = false;
List<Tuple2<List<String>, Boolean>> projections = new ArrayList<Tuple2<List<String>, Boolean>>();
for (String projectionKey : projectionObject.keySet()) {
final Object projectionValue = projectionObject.get(projectionKey);
boolean included = false;
boolean project = false;
if (projectionValue instanceof Number) {
included = ((Number) projectionValue).intValue() > 0;
} else if (projectionValue instanceof Boolean) {
included = (Boolean) projectionValue;
} else if (ExpressionParser.isDbObject(projectionValue)) {
project = true;
projectionFields.add(projectionKey);
} else if (!projectionValue.toString().equals("text")) {
final String msg = "Projection `" + projectionKey
+ "' has a value that Fongo doesn't know how to handle: " + projectionValue
+ " (" + (projectionValue == null ? " " : projectionValue.getClass() + ")");
throw new IllegalArgumentException(msg);
}
List<String> projectionPath = Util.split(projectionKey);
if (!ID_FIELD_NAME.equals(projectionKey)) {
if (included) {
inclusionCount++;
} else if (!project) {
exclusionCount++;
}
} else {
wasIdExcluded = !included;
}
if (projectionPath.size() > 0) {
projections.add(new Tuple2<List<String>, Boolean>(projectionPath, included));
}
}
if (inclusionCount > 0 && exclusionCount > 0) {
throw new IllegalArgumentException(
"You cannot combine inclusion and exclusion semantics in a single projection with the exception of the _id field: "
+ projectionObject
);
}
BasicDBObject ret;
if (exclusionCount > 0) {
ret = (BasicDBObject) Util.clone(result);
} else {
ret = new BasicDBObject();
if (!wasIdExcluded) {
ret.append(ID_FIELD_NAME, Util.clone(result.get(ID_FIELD_NAME)));
} else if (inclusionCount == 0) {
ret = (BasicDBObject) Util.clone(result);
ret.removeField(ID_FIELD_NAME);
}
}
for (Tuple2<List<String>, Boolean> projection : projections) {
if (projection._1.size() == 1 && !projection._2) {
ret.removeField(projection._1.get(0));
} else {
addValuesAtPath(ret, result, projection._1, 0);
}
}
if (!projectionFields.isEmpty()) {
for (String projectionKey : projectionObject.keySet()) {
if (!projectionFields.contains(projectionKey)) {
continue;
}
final Object projectionValue = projectionObject.get(projectionKey);
final boolean isElemMatch =
((BasicDBObject) projectionObject.get(projectionKey)).containsField(QueryOperators.ELEM_MATCH);
final boolean isSlice =
((BasicDBObject) projectionObject.get(projectionKey)).containsField(ExpressionParser.SLICE);
if (isElemMatch) {
ret.removeField(projectionKey);
List searchIn = ((BasicDBList) result.get(projectionKey));
DBObject searchFor =
(BasicDBObject) ((BasicDBObject) projectionObject.get(projectionKey)).get(QueryOperators.ELEM_MATCH);
String searchKey = (String) searchFor.keySet().toArray()[0];
int pos = -1;
for (int i = 0, length = searchIn.size(); i < length; i++) {
boolean matches;
DBObject fieldToSearch = (BasicDBObject) searchIn.get(i);
if (fieldToSearch.containsField(searchKey)) {
if (searchFor.get(searchKey) instanceof ObjectId
&& fieldToSearch.get(searchKey) instanceof String) {
ObjectId m1 = new ObjectId(searchFor.get(searchKey).toString());
ObjectId m2 = new ObjectId(String.valueOf(fieldToSearch.get(searchKey)));
matches = m1.equals(m2);
} else if (searchFor.get(searchKey) instanceof String
&& fieldToSearch.get(searchKey) instanceof ObjectId) {
ObjectId m1 = new ObjectId(String.valueOf(searchFor.get(searchKey)));
ObjectId m2 = new ObjectId(fieldToSearch.get(searchKey).toString());
matches = m1.equals(m2);
} else {
matches = fieldToSearch.get(searchKey).equals(searchFor.get(searchKey));
}
if (matches) {
pos = i;
break;
}
}
}
if (pos != -1) {
BasicDBList append = new BasicDBList();
append.add(searchIn.get(pos));
ret.append(projectionKey, append);
LOG.debug("$elemMatch projection of field \"{}\", gave result: {} ({})", projectionKey, ret, ret.getClass());
}
} else if (isSlice) {
if (!slice(result, projectionObject, projectionKey, projectionValue, ret)) {
ret = null;
}
} else {
final String msg = "Projection `" + projectionKey
+ "' has a value that Fongo doesn't know how to handle: " + projectionValue
+ " (" + (projectionValue == null ? " " : projectionValue.getClass() + ")");
throw new IllegalArgumentException(msg);
}
}
}
return ret;
}
private static boolean slice(DBObject result, DBObject projectionObject, String projectionKey, Object projectionValue, BasicDBObject ret) throws MongoException {
ret.removeField(projectionKey);
List searchIn = ((BasicDBList) result.get(projectionKey));
if (searchIn == null) {
ret.clear();
return false;
}
final BasicDBObject basicDBObject = (BasicDBObject) projectionObject.get(projectionKey);
int start = 0;
int limit;
if (basicDBObject.get(ExpressionParser.SLICE) instanceof Number) {
limit = ((Number) (basicDBObject.get(ExpressionParser.SLICE))).intValue();
if (limit < 0) {
start = limit;
limit = -limit;
}
} else if (basicDBObject.get(ExpressionParser.SLICE) instanceof List) {
List range = (List) basicDBObject.get(ExpressionParser.SLICE);
if (range.size() != 2) {
throw new IllegalArgumentException("$slice with an Array must have size of 2");
}
start = (Integer) range.get(0);
limit = (Integer) range.get(1);
} else {
final String msg = "Projection `" + projectionKey
+ "' has a value that Fongo doesn't know how to handle: " + projectionValue
+ " (" + (projectionValue == null ? " " : projectionValue.getClass() + ")");
throw new IllegalArgumentException(msg);
}
if (limit < 0) {
throw new MongoException("Can't canonicalize query: BadValue $slice limit must be positive");
}
List slice = new BasicDBList();
final int startArray;
if (start < 0) {
startArray = Math.max(0, searchIn.size() + start) + 1;
} else {
startArray = Math.min(searchIn.size(), start) + 1;
}
for (int i = startArray, count = 0; i <= searchIn.size() && count < limit; i++, count++) {
slice.add(searchIn.get(i - 1));
}
ret.put(projectionKey, slice);
return true;
}
public Collection<DBObject> sortObjects(final DBObject orderby, final Collection<DBObject> objects) {
Collection<DBObject> objectsToSearch = objects;
if (orderby != null) {
final Set<String> orderbyKeySet = orderby.keySet();
if (!orderbyKeySet.isEmpty()) {
DBObject[] objectsToSort = objects.toArray(new DBObject[objects.size()]);
Arrays.sort(objectsToSort, new Comparator<DBObject>() {
@Override
public int compare(DBObject o1, DBObject o2) {
for (String sortKey : orderbyKeySet) {
final List<String> path = Util.split(sortKey);
int sortDirection = (Integer) orderby.get(sortKey);
List<Object> o1list = expressionParser.getEmbeddedValues(path, o1);
List<Object> o2list = expressionParser.getEmbeddedValues(path, o2);
int compareValue = expressionParser.compareLists(o1list, o2list) * sortDirection;
if (compareValue != 0) {
return compareValue;
}
}
return 0;
}
});
objectsToSearch = Arrays.asList(objectsToSort);
}
} else {
objectsToSearch = sortObjects(new BasicDBObject(FONGO_SPECIAL_ORDER_BY, 1), objects);
}
if (LOG.isDebugEnabled()) {
LOG.debug("sorted objectsToSearch " + objectsToSearch);
}
return objectsToSearch;
}
// @Override
public synchronized long getCount(final DBObject pQuery, final DBObject projection, final long limit, final long skip,
final ReadPreference readPreference, final long maxTime, final TimeUnit maxTimeUnit,
final BsonValue hint) {
final DBObject query = filterLists(pQuery);
Filter filter = query == null ? ExpressionParser.AllFilter : buildFilter(query);
long count = 0;
long upperLimit = Long.MAX_VALUE;
if (limit > 0) {
upperLimit = limit;
}
int seen = 0;
for (Iterator<DBObject> iter = filterByIndexes(query).iterator(); iter.hasNext() && count < upperLimit; ) {
DBObject value = iter.next();
if (filter.apply(value)) {
if (seen++ >= skip) {
count++;
}
}
}
return count;
}
@Override
public synchronized long getCount(DBObject query, DBObject fields, ReadPreference readPrefs) {
//as we're in memory we don't need to worry about readPrefs
return getCount(query, fields, 0, 0);
}
@Override
public synchronized DBObject findAndModify(DBObject query, DBObject fields, DBObject sort, boolean remove, DBObject update, boolean returnNew, boolean upsert) {
LOG.debug("findAndModify({}, {}, {}, {}, {}, {}, {}", query, fields, sort, remove, update, returnNew, upsert);
query = filterLists(query);
update = filterLists(update);
Filter filter = buildFilter(query);
Iterable<DBObject> objectsToSearch = sortObjects(sort, filterByIndexes(query));
DBObject beforeObject = null;
DBObject afterObject = null;
for (DBObject dbo : objectsToSearch) {
if (filter.apply(dbo)) {
beforeObject = dbo;
if (!remove) {
afterObject = Util.clone(beforeObject);
updateEngine.doUpdate(afterObject, update, query, false);
addToIndexes(afterObject, beforeObject, getWriteConcern());
break;
} else {
remove(dbo);
return dbo;
}
}
}
if (beforeObject != null && !returnNew) {
return replaceWithObjectClass(applyProjections(beforeObject, fields));
}
if (beforeObject == null && upsert && !remove) {
beforeObject = new BasicDBObject();
afterObject = createUpsertObject(query);
fInsert(updateEngine.doUpdate(afterObject, update, query, upsert), getWriteConcern());
}
final DBObject resultObject;
if (returnNew) {
resultObject = applyProjections(afterObject, fields);
} else {
resultObject = applyProjections(beforeObject, fields);
}
return replaceWithObjectClass(resultObject);
}
@Override
public synchronized List distinct(final String key, final DBObject pQuery, final ReadPreference readPreference) {
final DBObject query = filterLists(pQuery);
Set<Object> results = new LinkedHashSet<Object>();
Filter filter = buildFilter(query);
for (DBObject value : filterByIndexes(query)) {
if (filter.apply(value)) {
List<Object> keyValues = expressionParser.getEmbeddedValues(key, value);
for (Object keyValue : keyValues) {
if (keyValue instanceof List) {
results.addAll((List) keyValue);
} else {
results.add(keyValue);
}
}
}
}
//noinspection unchecked
return new ArrayList(results);
}
@Override
public AggregationOutput aggregate(final List<? extends DBObject> pipeline, final ReadPreference readPreference) {
final Aggregator aggregator = new Aggregator(this.fongoDb, this, pipeline);
return new AggregationOutput(aggregator.computeResult());
}
@Override
public List<Cursor> parallelScan(final ParallelScanOptions options) {
List<Cursor> cursors = new ArrayList<Cursor>();
for (int i = 0; i < options.getNumCursors(); i++) {
cursors.add(new FongoDBCursor(this, new BasicDBObject(), new BasicDBObject()));
}
return cursors;
}
@Override
BulkWriteResult executeBulkWriteOperation(final boolean ordered, final Boolean bypassDocumentValidation,
final List<WriteRequest> writeRequests,
final WriteConcern aWriteConcern) {
isTrueArgument("writes is not an empty list", !writeRequests.isEmpty());
WriteConcern writeConcern = aWriteConcern == null ? getWriteConcern() : aWriteConcern;
// TODO: unordered
List<BulkWriteUpsert> upserts = new ArrayList<BulkWriteUpsert>();
int insertedCount = 0;
int matchedCount = 0;
int removedCount = 0;
int modifiedCount = 0;
int idx = 0;
for (WriteRequest req : writeRequests) {
WriteResult wr;
if (req instanceof ReplaceRequest) {
ReplaceRequest r = (ReplaceRequest) req;
_checkObject(r.getDocument(), false, false);
wr = update(r.getQuery(), r.getDocument(), r.isUpsert(), /* r.isMulti()*/ false, writeConcern, null);
matchedCount += wr.getN();
modifiedCount += wr.getN();
if (!wr.isUpdateOfExisting()) {
if (wr.getUpsertedId() != null) {
upserts.add(new BulkWriteUpsert(idx, wr.getUpsertedId()));
}
}
} else if (req instanceof UpdateRequest) {
UpdateRequest r = (UpdateRequest) req;
// See com.mongodb.DBCollectionImpl.Run.executeUpdates()
checkMultiUpdateDocument(r.getUpdate());
wr = update(r.getQuery(), r.getUpdate(), r.isUpsert(), r.isMulti(), writeConcern, null);
if (wr.isUpdateOfExisting()) {
matchedCount += wr.getN();
modifiedCount += wr.getN();
} else {
if (wr.getUpsertedId() != null) {
upserts.add(new BulkWriteUpsert(idx, wr.getUpsertedId()));
// insertedCount++;
}
}
} else if (req instanceof RemoveRequest) {
RemoveRequest r = (RemoveRequest) req;
wr = remove(r.getQuery(), writeConcern, null);
matchedCount += wr.getN();
removedCount += wr.getN();
} else if (req instanceof InsertRequest) {
InsertRequest r = (InsertRequest) req;
wr = insert(r.getDocument());
insertedCount += wr.getN();
} else {
throw new NotImplementedException();
}
idx++;
}
if (!writeConcern.isAcknowledged()) {
return new UnacknowledgedBulkWriteResult();
}
return new AcknowledgedBulkWriteResult(insertedCount, matchedCount, removedCount, modifiedCount, upserts);
}
// @Override
@Deprecated
BulkWriteResult executeBulkWriteOperation(final boolean ordered, final List<WriteRequest> writeRequests,
final WriteConcern aWriteConcern) {
return executeBulkWriteOperation(ordered, false, writeRequests, aWriteConcern);
}
private void checkMultiUpdateDocument(DBObject updateDocument) throws IllegalArgumentException {
for (String key : updateDocument.keySet()) {
if (!key.startsWith("$")) {
throw new IllegalArgumentException("Invalid BSON field name " + key);
}
}
}
@Override
public List<DBObject> getIndexInfo() {
BasicDBObject cmd = new BasicDBObject();
cmd.put("ns", getFullName());
DBCursor cur = getDB().getCollection("system.indexes").find(cmd);
List<DBObject> list = new ArrayList<DBObject>();
while (cur.hasNext()) {
list.add(cur.next());
}
return list;
}
@Override
public void dropIndex(final String indexName) {
if ("*".equalsIgnoreCase(indexName)) {
_dropIndexes();
} else {
_dropIndex(indexName);
}
}
protected synchronized void _dropIndex(String name) throws MongoException {
final DBCollection indexColl = fongoDb.getCollection("system.indexes");
final WriteResult wr = indexColl.remove(new BasicDBObject("name", name).append("ns", nsName()), WriteConcern.ACKNOWLEDGED);
boolean isDrop = wr.getN() == 1;
ListIterator<IndexAbstract> iterator = indexes.listIterator();
while (iterator.hasNext()) {
IndexAbstract index = iterator.next();
if (index.getName().equals(name)) {
iterator.remove();
isDrop = true;
break;
}
}
if (!isDrop) {
fongoDb.notOkErrorResult("index not found with name [" + name + "]").throwOnError();
}
}
private String nsName() {
return this.getDB().getName() + "." + this.getName();
}
protected synchronized void _dropIndexes() {
final List<DBObject> indexes = fongoDb.getCollection("system.indexes").find(new BasicDBObject("ns", nsName())).toArray();
// Two step for no concurrent modification exception
for (final DBObject index : indexes) {
final String indexName = index.get("name").toString();
if (!ID_NAME_INDEX.equals(indexName)) {
dropIndexes(indexName);
}
}
}
@Override
public void drop() {
_idIndex.clear();
_dropIndexes(); // _idIndex must stay.
fongoDb.removeCollection(this);
}
/**
* Search the most restrictive index for query.
*
* @param query query for restriction
* @return the most restrictive index, or null.
*/
private synchronized IndexAbstract searchIndex(DBObject query) {
IndexAbstract result = null;
int foundCommon = -1;
Set<String> queryFields = query.keySet();
for (IndexAbstract index : this.indexes) {
if (index.canHandle(query)) {
// The most restrictive first.
if (index.getFields().size() > foundCommon || (result != null && !result.isUnique() && index.isUnique())) {
result = index;
foundCommon = index.getFields().size();
}
}
}
LOG.debug("searchIndex() found index {} for fields {}", result, queryFields);
return result;
}
/**
* Search the geo index.
*
* @return the geo index, or null.
*/
private synchronized IndexAbstract searchGeoIndex(boolean unique) {
IndexAbstract result = null;
for (IndexAbstract index : indexes) {
if (index.isGeoIndex()) {
if (result != null && unique) {
this.fongoDb.notOkErrorResult(-5, "more than one 2d index, not sure which to run geoNear on").throwOnError();
}
result = index;
if (!unique) {
break;
}
}
}
LOG.debug("searchGeoIndex() found index {}", result);
return result;
}
/**
* Add entry to index.
* If necessary, remove oldObject from index.
*
* @param object new object to insert.
* @param oldObject null if insert, old object if update.
*/
private void addToIndexes(DBObject object, DBObject oldObject, WriteConcern concern) {
// Ensure "insert/update" create collection into "fongoDB"
// First, try to see if index can add the new value.
for (IndexAbstract index : indexes) {
@SuppressWarnings("unchecked") List<List<Object>> error = index.checkAddOrUpdate(object, oldObject);
if (!error.isEmpty()) {
// TODO formatting : E11000 duplicate key error index: test.zip.$city_1_state_1_pop_1 dup key: { : "BARRE", : "MA", : 4546.0 }
if (enforceDuplicates(concern)) {
String err = "E11000 duplicate key error index: " + this.getFullName() + "." + index.getName() + " dup key : {" + error + " }";
if (oldObject == null) {
// insert
throw fongoDb.duplicateKeyException(11000, err);
} else {
// update (MongoDB throws a different exception in case of an update, see issue #200)
throw fongoDb.mongoCommandException(11000, err);
}
}
return; // silently ignore.
}
}
// Set<String> queryFields = object.keySet();
final DBObject idFirst = Util.cloneIdFirst(object);
try {
for (final IndexAbstract index : indexes) {
if (index.canHandle(object)) {
index.addOrUpdate(idFirst, oldObject);
} else if (index.canHandle(oldObject))
// In case of update and removing a field, we must remove from the index.
index.remove(oldObject);
}
} catch (MongoException e) {
LOG.info("", e);
throw this.fongoDb.writeConcernException(e.getCode(), e.getMessage());
}
this.fongoDb.addCollection(this);
}
/**
* Remove an object from indexes.
*
* @param object object to remove.
*/
private synchronized void removeFromIndexes(DBObject object) {
for (IndexAbstract index : indexes) {
if (index.canHandle(object)) {
index.remove(object);
}
}
}
public synchronized Collection<IndexAbstract> getIndexes() {
return Collections.unmodifiableList(indexes);
}
public synchronized List<DBObject> geoNear(Coordinate near, DBObject query, Number limit, Number maxDistance, boolean spherical) {
IndexAbstract matchingIndex = searchGeoIndex(true);
if (matchingIndex == null) {
fongoDb.notOkErrorResult(-5, "no geo indices for geoNear").throwOnError();
}
//noinspection ConstantConditions
LOG.info("geoNear() near:{}, query:{}, limit:{}, maxDistance:{}, spherical:{}, use index:{}", near, query, limit, maxDistance, spherical, matchingIndex.getName());
// List<LatLong> latLongs = GeoUtil.coordinate(Collections.<String>emptyList(), near);
Geometry geometry = GeoUtil.toGeometry(near);
return ((GeoIndex) matchingIndex).geoNear(query == null ? new BasicDBObject() : query, geometry, limit == null ? 100 : limit.intValue(), spherical);
}
public synchronized DBObject text(String search, Number limit, DBObject project) {
TextSearch ts = new TextSearch(this);
return ts.findByTextSearch(search, project == null ? new BasicDBObject() : project, limit == null ? 100 : limit.intValue());
}
// TODO WDEL
// private QueryResultIterator createQueryResultIterator(Iterator<DBObject> values) {
// try {
// QueryResultIterator iterator = new ObjenesisStd().getInstantiatorOf(QueryResultIterator.class).newInstance();
// Field field = QueryResultIterator.class.getDeclaredField("_cur");
// field.setAccessible(true);
// field.set(iterator, values);
// return iterator;
// } catch (Exception e) {
// throw new RuntimeException(e);
@Override
public long count() {
return _idIndex.size();
}
@Override
public MapReduceOutput mapReduce(final MapReduceCommand command) {
DBObject out = new BasicDBObject();
if (command.getOutputDB() != null) {
out.put("db", command.getOutputDB());
}
if (command.getOutputType() != null) {
out.put(command.getOutputType().name().toLowerCase(), command.getOutputTarget());
}
MapReduce mapReduce = new MapReduce(this.fongoDb.fongo, this, command.getMap(), command.getReduce(),
command.getFinalize(), command.getScope(), out, command.getQuery(), command.getSort(), command.getLimit());
return mapReduce.computeResult();
}
public static DBObject dbObject(BsonDocument document) {
if (document == null) {
return null;
}
return defaultDbObjectCodec().decode(new BsonDocumentReader(document),
decoderContext());
}
public static <T> List<T> decode(final Iterable<DBObject> objects, Decoder<T> resultDecoder) {
final List<T> list = new ArrayList<T>();
for (final DBObject object : objects) {
list.add(decode(object, resultDecoder));
}
return list;
}
public static <T> T decode(DBObject object, Decoder<T> resultDecoder) {
final BsonDocument document = bsonDocument(object);
return resultDecoder.decode(new BsonDocumentReader(document), decoderContext());
}
public static DecoderContext decoderContext() {
return DecoderContext.builder().build();
}
public static EncoderContext encoderContext() {
return EncoderContext.builder().build();
}
public static CodecRegistry defaultCodecRegistry() {
return MongoClient.getDefaultCodecRegistry();
}
public static Codec<DBObject> defaultDbObjectCodec() {
return defaultCodecRegistry().get(DBObject.class);
}
public static <T> Codec<T> codec(Class<T> clazz) {
return defaultCodecRegistry().get(clazz);
}
public static DBObject dbObject(final BsonDocument queryDocument, final String key) {
return queryDocument.containsKey(key) ? dbObject(queryDocument.getDocument(key)) : null;
}
public static List<DBObject> dbObjects(final BsonDocument queryDocument, final String key) {
final BsonArray values = queryDocument.containsKey(key) ? queryDocument.getArray(key) : null;
if (values == null) {
return null;
}
List<DBObject> list = new ArrayList<DBObject>();
for (BsonValue value : values) {
list.add(dbObject((BsonDocument) value));
}
return list;
}
public static BsonDocument bsonDocument(DBObject dbObject) {
if (dbObject == null) {
return null;
}
final BsonDocument bsonDocument = new BsonDocument();
defaultDbObjectCodec()
.encode(new BsonDocumentWriter(bsonDocument), dbObject, encoderContext());
return bsonDocument;
}
public static List<BsonDocument> bsonDocuments(Iterable<DBObject> dbObjects) {
if (dbObjects == null) {
return null;
}
List<BsonDocument> list = new ArrayList<BsonDocument>();
for (DBObject dbObject : dbObjects) {
list.add(bsonDocument(dbObject));
}
return list;
}
static com.mongodb.BulkWriteResult translateBulkWriteResult(final com.mongodb.bulk.BulkWriteResult bulkWriteResult,
final Decoder<DBObject> decoder) {
if (bulkWriteResult.wasAcknowledged()) {
Integer modifiedCount = (bulkWriteResult.isModifiedCountAvailable()) ? bulkWriteResult.getModifiedCount() : null;
return new AcknowledgedBulkWriteResult(bulkWriteResult.getInsertedCount(), bulkWriteResult.getMatchedCount(),
bulkWriteResult.getDeletedCount(), modifiedCount,
translateBulkWriteUpserts(bulkWriteResult.getUpserts(), decoder));
} else {
return new UnacknowledgedBulkWriteResult();
}
}
public static List<com.mongodb.BulkWriteUpsert> translateBulkWriteUpserts(final List<com.mongodb.bulk.BulkWriteUpsert> upserts,
final Decoder<DBObject> decoder) {
List<com.mongodb.BulkWriteUpsert> retVal = new ArrayList<com.mongodb.BulkWriteUpsert>(upserts.size());
for (com.mongodb.bulk.BulkWriteUpsert cur : upserts) {
retVal.add(new com.mongodb.BulkWriteUpsert(cur.getIndex(), getUpsertedId(cur, decoder)));
}
return retVal;
}
public static List<com.mongodb.bulk.BulkWriteUpsert> translateBulkWriteUpsertsToNew(final List<com.mongodb.BulkWriteUpsert> upserts,
final Decoder<BsonValue> decoder) {
List<com.mongodb.bulk.BulkWriteUpsert> retVal = new ArrayList<com.mongodb.bulk.BulkWriteUpsert>(upserts.size());
for (com.mongodb.BulkWriteUpsert cur : upserts) {
final BsonDocument document = bsonDocument(new BasicDBObject("_id", cur.getId()));
retVal.add(new com.mongodb.bulk.BulkWriteUpsert(cur.getIndex(), document.get("_id")));
}
return retVal;
}
public static Object getUpsertedId(final com.mongodb.bulk.BulkWriteUpsert cur, final Decoder<DBObject> decoder) {
return decoder.decode(new BsonDocumentReader(new BsonDocument("_id", cur.getId())), decoderContext()).get("_id");
}
public static BulkWriteException translateBulkWriteException(final MongoBulkWriteException e, final Decoder<DBObject> decoder) {
return new BulkWriteException(translateBulkWriteResult(e.getWriteResult(), decoder), translateWriteErrors(e.getWriteErrors()),
translateWriteConcernError(e.getWriteConcernError()), e.getServerAddress());
}
public static WriteConcernError translateWriteConcernError(final com.mongodb.bulk.WriteConcernError writeConcernError) {
return writeConcernError == null ? null : new WriteConcernError(writeConcernError.getCode(), writeConcernError.getMessage(),
dbObject(writeConcernError.getDetails()));
}
public static List<BulkWriteError> translateWriteErrors(final List<com.mongodb.bulk.BulkWriteError> errors) {
List<BulkWriteError> retVal = new ArrayList<BulkWriteError>(errors.size());
for (com.mongodb.bulk.BulkWriteError cur : errors) {
retVal.add(new BulkWriteError(cur.getCode(), cur.getMessage(), dbObject(cur.getDetails()), cur.getIndex()));
}
return retVal;
}
public static List<com.mongodb.bulk.WriteRequest> translateWriteRequestsToNew(final List<com.mongodb.WriteRequest> writeRequests,
final Codec<DBObject> objectCodec) {
List<com.mongodb.bulk.WriteRequest> retVal = new ArrayList<com.mongodb.bulk.WriteRequest>(writeRequests.size());
for (com.mongodb.WriteRequest cur : writeRequests) {
retVal.add(cur.toNew());
}
return retVal;
}
private Filter buildFilter(DBObject q) {
try {
return expressionParser.buildFilter(q);
} catch (FongoException e) {
if (e.getCode() != null) {
this.fongoDb.notOkErrorResult(e.getCode(), e.getMessage()).throwOnError();
}
throw e;
}
}
}
|
package javax.tv.service.selection;
import javax.tv.xlet.XletContext;
import org.bluray.ti.selection.TitleContextImpl;
public class ServiceContextFactoryImpl extends ServiceContextFactory {
protected ServiceContextFactoryImpl() {
serviceContexts = new ServiceContext[1];
serviceContexts[0] = new TitleContextImpl();
}
public static ServiceContextFactory getInstance() {
synchronized (ServiceContextFactoryImpl.class) {
if (instance == null)
instance = new ServiceContextFactoryImpl();
return instance;
}
}
public static void shutdown() {
synchronized (ServiceContextFactoryImpl.class) {
instance = null;
}
}
public ServiceContext createServiceContext()
throws InsufficientResourcesException, SecurityException
{
SecurityManager sec = System.getSecurityManager();
if (sec != null)
sec.checkPermission(new ServiceContextPermission("create", "own"));
throw new InsufficientResourcesException("Only one ServiceContext allowed");
}
public ServiceContext getServiceContext(XletContext context)
throws SecurityException, ServiceContextException
{
SecurityManager sec = System.getSecurityManager();
if (sec != null)
sec.checkPermission(new ServiceContextPermission("access", "own"));
return serviceContexts[0];
}
public ServiceContext[] getServiceContexts() {
SecurityManager sec = System.getSecurityManager();
if (sec != null)
sec.checkPermission(new ServiceContextPermission("access", "own"));
return serviceContexts;
}
private ServiceContext[] serviceContexts;
private static ServiceContextFactoryImpl instance = null;
}
|
package com.mongodb.dibs;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.simpleemail.AmazonSimpleEmailServiceClient;
import com.amazonaws.services.simpleemail.model.Content;
import com.amazonaws.services.simpleemail.model.Destination;
import com.amazonaws.services.simpleemail.model.Message;
import com.amazonaws.services.simpleemail.model.SendEmailRequest;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.base.Charsets;
import com.mongodb.dibs.model.Order;
import io.dropwizard.views.View;
import org.bson.types.ObjectId;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.mongodb.morphia.Datastore;
import org.mongodb.morphia.query.MorphiaIterator;
import org.mongodb.morphia.query.Query;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import java.io.IOException;
import java.text.ParseException;
import java.util.*;
@Path("/")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public class DibsResource {
private static final String OK_RESPONSE = "{\"ok\": 1}";
private final Datastore ds;
private DibsConfiguration configuration;
private AmazonSimpleEmailServiceClient sesClient;
private JacksonMapper mapper = new JacksonMapper();
public DibsResource(final DibsConfiguration configuration, final Datastore ds) {
this.ds = ds;
if (configuration != null) {
this.configuration = configuration;
if (configuration.getAwsCredentials().getAccessKey() != null &&
configuration.getAwsCredentials().getSecretKey() != null) {
final BasicAWSCredentials creds = new BasicAWSCredentials(
configuration.getAwsCredentials().getAccessKey(),
configuration.getAwsCredentials().getSecretKey());
final ClientConfiguration awsConf = new ClientConfiguration();
awsConf.setConnectionTimeout(30000);
awsConf.setMaxConnections(200);
awsConf.setMaxErrorRetry(2);
awsConf.setSocketTimeout(30000);
this.sesClient = new AmazonSimpleEmailServiceClient(creds, awsConf);
}
}
}
@GET
@Produces("text/html;charset=ISO-8859-1")
public View index() {
return new View("/index.ftl", Charsets.ISO_8859_1) {
};
}
@GET
@Path("dibs")
@Produces("text/html;charset=ISO-8859-1")
public View dibs() {
return new View("/dibs.ftl", Charsets.ISO_8859_1) {
};
}
@GET
@Path("/orders/{date}/{type}")
@Produces(MediaType.APPLICATION_JSON)
public String findOrders(@PathParam("date") String dateString, @PathParam("type") String type) throws IOException, ParseException {
DateTime dateTime = DateTime.parse(dateString, DateTimeFormat.forPattern("yyyy-MM-dd"));
DateTime next = dateTime.plusDays(1);
boolean groupOrder = type.equalsIgnoreCase("group");
boolean upForGrabs = type.equalsIgnoreCase("upForGrabs");
Query<Order> query = ds.createQuery(Order.class)
.filter("group", groupOrder)
.field("expectedAt").greaterThanOrEq(dateTime.toDate())
.field("expectedAt").lessThan(next.toDate());
return groupOrder ? findGroupOrders(query) : (upForGrabs ? findUpForGrabs(query) : findSingleOrders(query));
}
@POST
@Path("/notify/{date}/vendor/")
@Produces(MediaType.APPLICATION_JSON)
public String notifyGroup(@PathParam("date") final String dateString, final String vendor) throws ParseException {
final DateTime dateTime = DateTime.parse(dateString, DateTimeFormat.forPattern("yyyy-MM-dd"));
final DateTime next = dateTime.plusDays(1);
final Query<Order> query = ds.createQuery(Order.class)
.filter("vendor", vendor)
.field("expectedAt").greaterThanOrEq(dateTime.toDate())
.field("expectedAt").lessThan(next.toDate());
for (final Order o : query.fetch()) {
if (o.getClaimedBy() != null) {
notifyDelivery(o.getClaimedBy(), o);
} else if (o.getOrderedBy() != null) {
notifyDelivery(o.getOrderedBy(), o);
}
o.setDeliveredAt(new Date());
}
return OK_RESPONSE;
}
@POST
@Path("/notify/order")
@Produces(MediaType.APPLICATION_JSON)
public String notifyOrder(final String orderId) throws ParseException {
final Order order = ds.createQuery(Order.class)
.filter("_id", new ObjectId(orderId)).get();
if (order != null) {
if (order.getClaimedBy() != null) {
notifyDelivery(order.getClaimedBy(), order);
} else if (order.getOrderedBy() != null) {
notifyDelivery(order.getOrderedBy(), order);
}
order.setDeliveredAt(new Date());
ds.save(order);
}
return OK_RESPONSE;
}
@POST
@Path("/claim")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public String claim(final Order claimed) {
final Order order = ds.createQuery(Order.class)
.filter("_id", claimed.getId()).get();
if (order != null) {
if (order.getUpForGrabs() && order.getClaimedBy() != null) {
// order.setClaimedBy(claimed.ge);
notifyClaim(order.getClaimedBy(), order);
ds.save(order);
} else {
// TODO throw exception for order being claimed or not being up for grabs
}
}
return OK_RESPONSE;
}
private void notifyDelivery(final String email, final Order order) {
notify(email, order.getVendor() + " delivered. (EOM)");
}
private void notifyClaim(final String email, final Order order) {
notify(email, "You have claimed the order for " + order.getOrderedBy() + ". (EOM)");
}
private void notify(final String email, final String subject) {
final SendEmailRequest request = new SendEmailRequest();
request.setDestination(new Destination(Collections.singletonList(email)));
request.setSource("donotreply@10gen.com");
final Message message = new Message();
message.withSubject(new Content().withData(subject));
request.setMessage(message);
if (sesClient != null) {
sesClient.sendEmail(request);
}
}
private String findSingleOrders(final Query<Order> query) throws JsonProcessingException {
MorphiaIterator<Order, Order> iterator = query
.field("group").equal(false)
.field("upForGrabs").equal(false)
.order("orderedBy")
.fetch();
List<Order> orders = new ArrayList<>();
try {
for (Order order : iterator) {
orders.add(order);
}
} finally {
iterator.close();
}
return mapper.writeValueAsString(orders);
}
private String findUpForGrabs(final Query<Order> query) throws JsonProcessingException {
MorphiaIterator<Order, Order> iterator = query
.field("upForGrabs").equal(true)
.order("orderedBy").fetch();
List<Order> orders = new ArrayList<>();
try {
for (Order order : iterator) {
orders.add(order);
}
} finally {
iterator.close();
}
return mapper.writeValueAsString(orders);
}
private String findGroupOrders(final Query<Order> query) throws JsonProcessingException {
MorphiaIterator<Order, Order> iterator = query
.field("group").equal(true)
.field("upForGrabs").equal(false)
.fetch();
Set<String> vendors = new TreeSet<>();
try {
for (Order order : iterator) {
vendors.add(order.getVendor());
}
} finally {
iterator.close();
}
return mapper.writeValueAsString(vendors);
}
}
|
package com.mararok.epicwar.control.point.internal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.logging.Level;
import org.bukkit.scheduler.BukkitRunnable;
import com.mararok.epicwar.War;
import com.mararok.epicwar.control.ControlPoint;
import com.mararok.epicwar.control.ControlPointData;
import com.mararok.epicwar.control.ControlPointManager;
public class ControlPointManagerImpl implements ControlPointManager {
private ArrayList<ControlPoint> controlPoints;
private Collection<ControlPointImpl> occupiedControlPoints;
private UpdateTask updateTask;
private ControlPointMapper mapper;
private War war;
public ControlPointManagerImpl(ControlPointMapper mapper, War war) throws Exception {
controlPoints = new ArrayList<ControlPoint>();
this.mapper = mapper;
this.war = war;
loadAll();
}
private void loadAll() throws Exception {
Collection<ControlPointImpl> collection = mapper.findAll();
controlPoints.ensureCapacity(collection.size() + 1);
for (ControlPoint controlPoint : collection) {
controlPoints.set(controlPoint.getId(), controlPoint);
}
if (!war.getSettings().editMode) {
occupiedControlPoints = new LinkedList<ControlPointImpl>();
updateTask = this.new UpdateTask();
updateTask.runTaskTimer(war.getPlugin(), 0, war.getSettings().controlPoint.updateInterval);
}
}
@Override
public ControlPoint findById(int id) {
return (id > 0 && id < controlPoints.size()) ? controlPoints.get(id) : null;
}
@Override
public Collection<ControlPoint> findAll() {
return Collections.unmodifiableCollection(controlPoints);
}
@Override
public ControlPoint create(ControlPointData data) throws Exception {
ControlPointImpl controlPoint = mapper.insert(data);
controlPoints.ensureCapacity(controlPoint.getId() + 1);
controlPoints.set(controlPoint.getId(), controlPoint);
return controlPoint;
}
@Override
public void update(ControlPoint controlPoint) throws Exception {
ControlPointImpl entity = (ControlPointImpl) controlPoint;
mapper.update(entity);
entity.clearChanges();
}
@Override
public War getWar() {
return war;
}
private class UpdateTask extends BukkitRunnable {
@Override
public void run() {
try {
for (ControlPointImpl controlPoint : occupiedControlPoints) {
controlPoint.getOccupation().update();
}
} catch (Exception e) {
war.getPlugin().getLogger().log(Level.SEVERE, "Exception in control point occupation update", e);
}
}
}
public void addOccupied(ControlPointImpl controlPoint) {
if (!occupiedControlPoints.contains(controlPoint)) {
occupiedControlPoints.add(controlPoint);
}
}
public void removeOccupied(ControlPointImpl controlPoint) {
occupiedControlPoints.remove(controlPoint);
}
}
|
package com.disbrain.dbmslayer;
import akka.actor.ActorContext;
import akka.actor.ActorRef;
import akka.actor.Props;
import com.disbrain.dbmslayer.actors.GenericDBMSQueryingActor;
import com.disbrain.dbmslayer.descriptors.QueryGenericArgument;
import com.disbrain.dbmslayer.descriptors.RequestModes;
public class DbmsQuery {
private DbmsQuery() {
}
public static ActorRef create_generic_fsm(ActorContext ctx, QueryGenericArgument request, String description) {
return ctx.actorOf(Props.create(GenericDBMSQueryingActor.class, request), description);
}
public static ActorRef create_generic_fsm(ActorContext ctx, QueryGenericArgument request) {
return ctx.actorOf(Props.create(GenericDBMSQueryingActor.class, request));
}
public static ActorRef reuse_fsm(ActorRef target, QueryGenericArgument new_command) {
target.tell(new_command, ActorRef.noSender());
return (target);
}
public static ActorRef async_reuse_fsm(ActorRef target, QueryGenericArgument old_command) {
QueryGenericArgument new_command;
RequestModes.RequestTypology new_typology = old_command.request_properties.typology;
switch (old_command.request_properties.typology) {
case READ_ONLY:
new_typology = RequestModes.RequestTypology.ASYNC_READ_ONLY;
break;
case READ_WRITE:
new_typology = RequestModes.RequestTypology.ASYNC_READ_WRITE;
break;
case WRITE:
new_typology = RequestModes.RequestTypology.ASYNC_WRITE;
break;
default:
System.err.println("ASYNCING AN ALREADY ASYNC REQUEST: " + old_command.query);
}
new_command = new QueryGenericArgument(old_command.real_requester,
old_command.deathPolicy,
old_command.query,
new RequestModes(new_typology, old_command.request_properties.behaviour),
old_command.autocommit,
old_command.reply_type,
old_command.arg_array);
target.tell(new_command, ActorRef.noSender());
return (target);
}
}
|
package com.mysema.maven.apt;
import java.io.File;
import java.io.IOException;
import java.util.Set;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import org.apache.commons.io.FileUtils;
public class FileSync {
public static void syncFiles(File source, File target) throws IOException {
Set<String> sourceFiles = Sets.newHashSet(source.list());
Set<String> targetFiles = Sets.newHashSet(target.list());
// remove files from target that are not in source
for (String targetFile : targetFiles) {
if (!sourceFiles.contains(targetFile)) {
deleteFile(new File(target, targetFile));
}
}
for (String sourceFile : sourceFiles) {
File file = new File(source, sourceFile);
File file2 = new File(target, sourceFile);
if (file.isFile()) {
copyIfChanged(file, file2);
} else {
file2.mkdir();
syncFiles(file, file2);
}
}
}
private static void copyIfChanged(File source, File target) throws IOException {
if (!target.exists()) {
if (!source.renameTo(target)) {
Files.move(source, target);
}
} else {
boolean changed = source.length() != target.length()
|| FileUtils.checksumCRC32(source) != FileUtils.checksumCRC32(target);
if (changed) {
Files.copy(source, target);
}
}
}
private static void deleteFile(File file) throws IOException {
if (file.isDirectory()) {
FileUtils.deleteDirectory(file);
} else {
file.delete();
}
}
private FileSync() {}
}
|
package com.conveyal.taui.models;
import com.conveyal.r5.analyst.scenario.AddTrips;
import java.util.ArrayList;
import java.util.List;
/**
* Add a trip pattern.
*/
public class AddTripPattern extends Modification {
public static final String type = "add-trip-pattern";
public String getType() {
return type;
}
public List<Segment> segments;
public boolean bidirectional;
public List<Timetable> timetables;
public static class Timetable extends AbstractTimetable {
/** Default dwell time, seconds */
public int dwellTime;
/** Speed, kilometers per hour, for each segment */
public int[] segmentSpeeds;
/** Dwell times at adjusted stops, seconds */
// using Integer not int because dwell times can be null
public Integer[] dwellTimes;
public AddTrips.PatternTimetable toR5 (List<ModificationStop> stops) {
AddTrips.PatternTimetable pt = this.toBaseR5Timetable();
// Get hop times
pt.dwellTimes = ModificationStop.getDwellTimes(stops);
pt.hopTimes = ModificationStop.getHopTimes(stops);
return pt;
}
}
public AddTrips toR5 () {
AddTrips at = new AddTrips();
at.comment = name;
at.bidirectional = bidirectional;
at.frequencies = new ArrayList<>();
// Iterate over the timetables generating hopTimes and dwellTimes from the segments and segment speeds
for (int i = 0; i < timetables.size(); i++) {
Timetable tt = timetables.get(i);
// Stop distance calculations are repeated but this is a short term fix until the models are updated.
List<ModificationStop> stops = ModificationStop.getStopsFromSegments(segments, tt.dwellTimes, tt.dwellTime, tt.segmentSpeeds);
AddTrips.PatternTimetable pt = tt.toR5(stops);
at.frequencies.add(pt);
}
// Values for stop spec are not affected by time table segment speeds or dwell times
at.stops = ModificationStop.toStopSpecs(ModificationStop.getStopsFromSegments(segments, null, 0, new int[0]));
return at;
}
}
|
package com.pjbollinger.guilds;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.plugin.java.JavaPlugin;
public class Guilds extends JavaPlugin {
@Override
public void onEnable(){
getLogger().info("Guilds has started.");
}
@Override
public void onDisable(){
getLogger().info("Guilds has stopped.");
}
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args){
if(cmd.getName().equalsIgnoreCase("Judy")){
getLogger().info("Judy says, 'Hi!'");
return true;
}
else if(cmd.getName().equalsIgnoreCase("gs")){
if(args[0].equalsIgnoreCase("create")){
//Stuff for creating a faction will go here
//Need to specify <name of Guild>
getLogger().info("You want to make a Guild with the name: " + args[1]);
}
else if(args[0].equalsIgnoreCase("invite")){
//Stuff for inviting a player will go here
//Need to specify <name>
}
else if(args[0].equalsIgnoreCase("invitations")){
//Stuff for viewing which Guild(s) have invited a player to join will go here
}
else if(args[0].equalsIgnoreCase("join")){
//Stuff for if invited OR if desire to join an OPEN Guild
//Need to specify <name of Guild>
}
else if(args[0].equalsIgnoreCase("leave")){
//Stuff for leaving the Guild
}
else if(args[0].equalsIgnoreCase("kick")){
//Stuff for getting rid of a troublemaker
//Need to specify <name>
}
else if(args[0].equalsIgnoreCase("moderator")){
//Stuff for designating a Guild member as moderator of the Guild
//Need to specify <name>
}
else if(args[0].equalsIgnoreCase("member")){
//Stuff for removing Guild mod status, returning to member only status
//Need to specify <name>
}
else if(args[0].equalsIgnoreCase("leader")){
//Stuff for reassigning role of Guild leader
// MUST be approved by SO or SAdmin before going into effect
//Need to specify <name>
}
else if(args[0].equalsIgnoreCase("list")){
//Stuff for creating/showing list of Guilds
//page # is optional
}
else if(args[0].equalsIgnoreCase("show")){
//Stuff for showing info about a Guild
}
else if(args[0].equalsIgnoreCase("player")){
//Stuff for showing info about a single person
}
else if(args[0].equalsIgnoreCase("home")){
//Stuff for going to home set point
}
else if(args[0].equalsIgnoreCase("set")){
//Stuff for setting the value, property, etc.
//examples: property = home, value = open (is Guild open or closed)
//examples: name = change name of Guild, description = motto for Guild
//Need to specify <name, property> and [value(s), description]
}
else if(args[0].equalsIgnoreCase("ally")){
//Stuff for inviting another Guild to be allied with yours
//Need to specify <name>
}
else if(args[0].equalsIgnoreCase("neutral")){
//Stuff for returning to neutral with a Guild
//Need to specify <name>
}
}
return false;
}
}
|
package ca.corefacility.bioinformatics.irida.ria.web;
import java.security.Principal;
import java.util.*;
import java.util.stream.Collectors;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.*;
import ca.corefacility.bioinformatics.irida.exceptions.EntityExistsException;
import ca.corefacility.bioinformatics.irida.exceptions.UserGroupWithoutOwnerException;
import ca.corefacility.bioinformatics.irida.model.user.Role;
import ca.corefacility.bioinformatics.irida.model.user.User;
import ca.corefacility.bioinformatics.irida.model.user.group.UserGroup;
import ca.corefacility.bioinformatics.irida.model.user.group.UserGroupJoin;
import ca.corefacility.bioinformatics.irida.model.user.group.UserGroupJoin.UserGroupRole;
import ca.corefacility.bioinformatics.irida.repositories.specification.UserGroupSpecification;
import ca.corefacility.bioinformatics.irida.ria.web.components.datatables.DataTablesParams;
import ca.corefacility.bioinformatics.irida.ria.web.components.datatables.DataTablesResponse;
import ca.corefacility.bioinformatics.irida.ria.web.components.datatables.config.DataTablesRequest;
import ca.corefacility.bioinformatics.irida.ria.web.components.datatables.models.DataTablesResponseModel;
import ca.corefacility.bioinformatics.irida.ria.web.models.datatables.DTGroupMember;
import ca.corefacility.bioinformatics.irida.ria.web.models.datatables.DTUserGroup;
import ca.corefacility.bioinformatics.irida.service.user.UserGroupService;
import ca.corefacility.bioinformatics.irida.service.user.UserService;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
/**
* Controller for interacting with {@link UserGroup}.
*/
@Controller
@RequestMapping(value = "/groups")
public class GroupsController {
private static final Logger logger = LoggerFactory.getLogger(GroupsController.class);
private static final String GROUPS_LIST = "groups/list";
private static final String GROUPS_CREATE = "groups/create";
private static final String GROUPS_EDIT = "groups/edit";
private static final String GROUP_DETAILS = "groups/details";
private static final String GROUPS_REMOVE_MODAL = "groups/remove-group-modal";
private static final String GROUPS_USER_MODAL = "groups/remove-user-modal";
private final UserGroupService userGroupService;
private final UserService userService;
private final MessageSource messageSource;
/**
* Create a new groups controller.
*
* @param userGroupService
* the {@link UserGroupService}.
* @param userService
* the {@link UserService}.
* @param messageSource
* the {@link MessageSource}.
*/
@Autowired
public GroupsController(final UserGroupService userGroupService, final UserService userService,
final MessageSource messageSource) {
this.userGroupService = userGroupService;
this.messageSource = messageSource;
this.userService = userService;
}
/**
* Get the default index page for listing groups.
*
* @return the route to the index page.
*/
@RequestMapping
public String getIndex() {
return GROUPS_LIST;
}
/**
* Get the page to create a new group.
*
* @return the route to the creation page.
*/
@RequestMapping("/create")
public String getCreatePage() {
return GROUPS_CREATE;
}
/**
* Create a new {@link UserGroup}.
*
* @param userGroup
* the {@link UserGroup} from the request.
* @param model
* the model to add violation constraints to.
* @param locale
* the locale used by the browser.
* @param principal
* The logged in user
* @return the route back to the creation page on validation failure, or the
* destails page on success.
*/
@RequestMapping(path = "/create", method = RequestMethod.POST)
public String createGroup(final @ModelAttribute UserGroup userGroup, final Model model, final Locale locale, final Principal principal) {
logger.debug("Creating group: [ " + userGroup + "]");
final Map<String, String> errors = new HashMap<>();
try {
userGroupService.create(userGroup);
return "redirect:/groups/" + userGroup.getId();
} catch (final ConstraintViolationException e) {
for (final ConstraintViolation<?> v : e.getConstraintViolations()) {
errors.put(v.getPropertyPath().toString(), v.getMessage());
}
} catch (final EntityExistsException | DataIntegrityViolationException e) {
errors.put("name", messageSource.getMessage("group.name.exists", null, locale));
}
model.addAttribute("errors", errors);
model.addAttribute("given_name", userGroup.getName());
model.addAttribute("given_description", userGroup.getDescription());
return GROUPS_CREATE;
}
/**
* Search/filter/page with datatables for {@link UserGroup}.
* @param params {@link DataTablesParams} for the current DataTable
* @param principal Currently logged in user
* @return {@link DataTablesResponse} for the current table base on the parameters.
*/
@RequestMapping("/ajax/list")
@ResponseBody
public DataTablesResponse getGroups(final @DataTablesRequest DataTablesParams params, final Principal principal) {
Page<UserGroup> groups = userGroupService.search(
UserGroupSpecification.searchUserGroup(params.getSearchValue()),
new PageRequest(params.getCurrentPage(), params.getLength(), params.getSort()));
User currentUser = userService.getUserByUsername(principal.getName());
List<DataTablesResponseModel> groupsWithOwnership = groups.getContent()
.stream()
.map(ug -> new DTUserGroup(ug, isGroupOwner(currentUser, ug),
currentUser.getSystemRole().equals(Role.ROLE_ADMIN)))
.collect(Collectors.toList());
return new DataTablesResponse(params, groups, groupsWithOwnership);
}
/**
* Convenience method for checking whether or not the specified user is an
* owner of the group.
*
* @param user
* the {@link User} to check.
* @param group
* the {@link UserGroup} to check.
* @return true if owner, false otherwise.
*/
private boolean isGroupOwner(final User user, final UserGroup group) {
final Collection<UserGroupJoin> groupUsers = userGroupService.getUsersForGroup(group);
final Optional<UserGroupJoin> currentUserGroup = groupUsers.stream().filter(j -> j.getSubject().equals(user))
.findAny();
if (currentUserGroup.isPresent()) {
final UserGroupJoin j = currentUserGroup.get();
return j.getRole().equals(UserGroupRole.GROUP_OWNER);
} else {
return false;
}
}
/**
* Get the details page for a {@link UserGroup}.
*
* @param userGroupId
* the {@link UserGroup} to retrieve.
* @param principal
* the user that's currently logged in.
* @param model
* the model to write attributes to.
* @return the route to the group details page.
*/
@RequestMapping("/{userGroupId}")
public String getDetailsPage(final @PathVariable Long userGroupId, final Principal principal, final Model model) {
final UserGroup group = userGroupService.read(userGroupId);
final Collection<UserGroupJoin> groupUsers = userGroupService.getUsersForGroup(group);
final User currentUser = userService.getUserByUsername(principal.getName());
final boolean isOwner = isGroupOwner(currentUser, group);
model.addAttribute("group", group);
model.addAttribute("isAdmin", currentUser.getSystemRole().equals(Role.ROLE_ADMIN));
model.addAttribute("isOwner", isOwner);
model.addAttribute("users", groupUsers);
model.addAttribute("groupRoles", ImmutableList.of(UserGroupRole.GROUP_MEMBER, UserGroupRole.GROUP_OWNER));
return GROUP_DETAILS;
}
/**
* Delete the specified {@link UserGroup}.
*
* @param userGroupId
* the group to delete.
* @param locale
* the locale of the browser
* @return a message indicating success.
*/
@RequestMapping(path = "/{userGroupId}", method = RequestMethod.DELETE)
public @ResponseBody Map<String, String> deleteGroup(final @PathVariable Long userGroupId, final Locale locale) {
final UserGroup userGroup = userGroupService.read(userGroupId);
userGroupService.delete(userGroupId);
return ImmutableMap.of("result", messageSource.getMessage("group.remove.notification.success",
new Object[] { userGroup.getName() }, locale));
}
/**
* Get the group editing page.
*
* @param userGroupId
* the group id to edit.
* @param model
* the model to write attributes to.
* @return the route to the editing page.
*/
@RequestMapping(path = "/{userGroupId}/edit")
public String getEditPage(final @PathVariable Long userGroupId, final Model model) {
final UserGroup group = userGroupService.read(userGroupId);
model.addAttribute("group", group);
model.addAttribute("given_name", group.getName());
model.addAttribute("given_description", group.getDescription());
return GROUPS_EDIT;
}
/**
* Submit changes to the {@link UserGroup}.
*
* @param userGroupId
* the group ID to edit.
* @param name
* the new name of the group.
* @param description
* the new description of the group.
* @param principal
* the currently logged in user.
* @param model
* the model to add attributes to.
* @param locale
* the locale of the browser.
* @return the route to the editing page on validation failure, or the
* details page on success.
*/
@RequestMapping(path = "/{userGroupId}/edit", method = RequestMethod.POST)
public String editGroup(final @PathVariable Long userGroupId, final @RequestParam String name,
final @RequestParam String description, final Principal principal, final Model model, final Locale locale) {
logger.debug("Editing group: [" + userGroupId + "]");
final Map<String, String> errors = new HashMap<>();
UserGroup group = userGroupService.read(userGroupId);
try {
group.setName(name);
group.setDescription(description);
userGroupService.update(group);
return getDetailsPage(userGroupId, principal, model);
} catch (final ConstraintViolationException e) {
for (final ConstraintViolation<?> v : e.getConstraintViolations()) {
errors.put(v.getPropertyPath().toString(), v.getMessage());
}
} catch (final EntityExistsException | DataIntegrityViolationException e) {
errors.put("name", messageSource.getMessage("group.name.exists", null, locale));
}
model.addAttribute("errors", errors);
model.addAttribute("group", userGroupService.read(userGroupId));
model.addAttribute("given_name", name);
model.addAttribute("given_description", description);
return GROUPS_EDIT;
}
/**
* List the members in the group.
*
* @param params
* the datatables parameters to search for.
* @param userGroupId
* the group ID to get members for.
* @return the datatables-formatted response with filtered users.
*/
@RequestMapping("/{userGroupId}/ajax/list")
@ResponseBody
public DataTablesResponse getGroupUsers(@DataTablesRequest DataTablesParams params,
@PathVariable Long userGroupId) {
final UserGroup group = userGroupService.read(userGroupId);
final Page<UserGroupJoin> page = userGroupService.filterUsersByUsername(params.getSearchValue(), group,
params.getCurrentPage(), params.getLength(), params.getSort());
List<DataTablesResponseModel> members = page.getContent()
.stream()
.map(DTGroupMember::new)
.collect(Collectors.toList());
return new DataTablesResponse(params, page, members);
}
/**
* Get a list of the users that are not currently members of this group.
*
* @param userGroupId
* the group ID to use as a negative filter.
* @param term
* a filter on username to filter on.
* @return the collection of users that match the query.
*/
@RequestMapping("/{userGroupId}/ajax/availablemembers")
public @ResponseBody Collection<User> getUsersNotInGroup(final @PathVariable Long userGroupId,
final @RequestParam String term) {
final UserGroup group = userGroupService.read(userGroupId);
logger.debug("Loading users not in group [" + userGroupId + "]");
final Collection<User> usersNotInGroup = userGroupService.getUsersNotInGroup(group);
return usersNotInGroup.stream().filter(u -> u.getLabel().toLowerCase().contains(term.toLowerCase()))
.collect(Collectors.toList());
}
/**
* Add a new user to the group.
*
* @param userGroupId
* the group to add to.
* @param userId
* the new member.
* @param groupRole
* the role this user should have.
* @param locale
* the locale of the browser.
* @return a message indicating success.
*/
@RequestMapping(path = "/{userGroupId}/members", method = RequestMethod.POST)
public @ResponseBody Map<String, String> addUserToGroup(final @PathVariable Long userGroupId,
final @RequestParam Long userId, @RequestParam String groupRole, Locale locale) {
final User user = userService.read(userId);
final UserGroup group = userGroupService.read(userGroupId);
final UserGroupRole role = UserGroupRole.valueOf(groupRole);
userGroupService.addUserToGroup(user, group, role);
return ImmutableMap.of("result", messageSource.getMessage("group.users.add.notification.success",
new Object[] { user.getLabel() }, locale));
}
/**
* Remove a user from a group.
*
* @param userGroupId
* the group to remove from.
* @param userId
* the user to remove.
* @param locale
* the locale of the browser.
* @return a message indicating success.
*/
@RequestMapping(path = "/{userGroupId}/members/{userId}", method = RequestMethod.DELETE)
public @ResponseBody Map<String, String> removeUserFromGroup(final @PathVariable Long userGroupId,
final @PathVariable Long userId, Locale locale) {
final User user = userService.read(userId);
final UserGroup group = userGroupService.read(userGroupId);
try {
userGroupService.removeUserFromGroup(user, group);
return ImmutableMap.of("success", messageSource.getMessage("group.users.remove.notification.success",
new Object[] { user.getLabel() }, locale));
} catch (final UserGroupWithoutOwnerException e) {
return ImmutableMap.of("failure", messageSource.getMessage("group.users.remove.notification.failure",
new Object[] { user.getLabel() }, locale));
}
}
/**
* Get a string to tell the user which group they're going to delete.
*
* @param userGroupId
* the user group that's about to be deleted.
* @param model
* model for rendering view
* @return a message indicating which group is going to be deleted.
*/
@RequestMapping(path = "/deleteConfirmModal", method = RequestMethod.POST)
public String getDeleteGroupText(final @RequestParam Long userGroupId, final Model model) {
final UserGroup group = userGroupService.read(userGroupId);
model.addAttribute("group", group);
return GROUPS_REMOVE_MODAL;
}
/**
* Get a string to tell the user which group they're going to delete.
*
* @param userId
* the user group that's about to be deleted.
* @param model
* model for the view to render
* @return a message indicating which group is going to be deleted.
*/
@RequestMapping(path = "/removeUserModal", method = RequestMethod.POST)
public String getRemoveUserModal(final @RequestParam Long userId, final Model model) {
final User user = userService.read(userId);
model.addAttribute("user", user);
return GROUPS_USER_MODAL;
}
/**
* Update a user's role on a group
*
* @param groupId
* The ID of the group
* @param userId
* The ID of the user
* @param groupRole
* The role to set
* @param locale
* Locale of the logged in user
*
* @return message indicating update result
*/
@RequestMapping(path = "/{groupId}/members/editrole/{userId}", method = RequestMethod.POST)
@ResponseBody
public Map<String, String> updateUserRole(final @PathVariable Long groupId, final @PathVariable Long userId,
final @RequestParam String groupRole, final Locale locale) {
final UserGroup group = userGroupService.read(groupId);
final User user = userService.read(userId);
final UserGroupRole userGroupRole = UserGroupRole.fromString(groupRole);
final String roleName = messageSource.getMessage("group.users.role." + groupRole, new Object[] {}, locale);
try {
userGroupService.changeUserGroupRole(user, group, userGroupRole);
return ImmutableMap.of("success", messageSource.getMessage("group.members.edit.role.success",
new Object[] { user.getLabel(), roleName }, locale));
} catch (final UserGroupWithoutOwnerException e) {
return ImmutableMap.of("failure", messageSource.getMessage("group.members.edit.role.failure",
new Object[] { user.getLabel(), roleName }, locale));
}
}
}
|
package com.epimorphics.dclib.values;
import com.epimorphics.dclib.framework.ConverterProcess;
import com.epimorphics.util.NameUtils;
import java.util.ArrayList;
import org.apache.jena.datatypes.RDFDatatype;
import org.apache.jena.datatypes.TypeMapper;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.vocabulary.XSD;
/**
* Wraps an array of strings, e.g. from a split operation. This allows
* a pattern to return multiple results.
*
* @author <a href="mailto:dave@epimorphics.com">Dave Reynolds</a>
*/
public class ValueArray extends ValueBase<Value[]> implements Value {
public ValueArray(Value[] values) {
super(values);
}
public ValueArray(String[] values) {
super(wrapStrings(values));
}
private static Value[] wrapStrings(String[] values) {
Value[] wrapped = new Value[ values.length ];
for (int i = 0; i < values.length; i++) {
wrapped[i] = new ValueString(values[i]);
}
return wrapped;
}
@Override
public boolean isNull() {
return value == null || value.length == 0;
}
@Override
public boolean isMulti() {
return true;
}
@Override
public Value[] getValues() {
return value;
}
@Override
public Value append(Value app) {
if (app.isMulti()) {
Value[] apps = app.getValues();
int len = apps.length;
Value[] results = new Value[value.length * len];
for (int i = 0; i < value.length; i++) {
for (int j = 0; j < len; j++) {
results[i*len + j] = value[i].append( apps[j] );
}
}
return new ValueArray(results);
} else {
String[] results = new String[value.length];
for (int i = 0; i < value.length; i++) {
results[i] = value[i] + app.toString();
}
return new ValueArray(results);
}
}
@Override
public Value asString() {
return this;
}
@Override
public Node asNode() {
return null;
}
@Override
public String getDatatype() {
return null;
}
public Value get(int i) {
return value[i];
}
// Value methods applicable to any type
public Object datatype(final String typeURI) {
return applyFunction(new MapValue() {
public Value map(Value value) {
return new ValueNode( NodeFactory.createLiteral(value.toString(), typeFor(typeURI)) );
}
});
}
protected RDFDatatype typeFor(String typeURI) {
return TypeMapper.getInstance().getSafeTypeByName( expandTypeURI(typeURI) );
}
protected String expandTypeURI(String typeURI) {
typeURI = ConverterProcess.getGlobalDataContext().expandURI(typeURI);
if (typeURI.startsWith("xsd:")) {
// Hardwired xsd: even if the prefix mapping doesn't have it
typeURI = typeURI.replace("xsd:", XSD.getURI());
}
return typeURI;
}
public Object format(final String fmtstr) {
return applyFunction(new MapValue() {
public Value map(Value value) {
return new ValueString(String.format(fmtstr, value));
}
});
}
public boolean isString() {
return false;
}
public boolean isNumber() {
return false;
}
public boolean isDate() {
return false;
}
public Value asNumber() {
return applyFunction(new MapValue() {
public Value map(Value value) {
ValueNumber v = new ValueNumber(value.toString());
if (v.isNull()) {
reportError("Could not convert " + value + " to a number");
}
return v;
}
});
}
@Override
public Value map(final String mapsource, final boolean matchRequired) {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).map(mapsource, matchRequired);
}
});
}
public Value map(final String mapsource) {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).map(mapsource);
}
});
}
public Value map(final String[] mapsources, final Object deflt) {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).map(mapsources, deflt);
}
});
}
public Value asDate(final String format, final String typeURI) {
return applyFunction(new MapValue() {
public Value map(Value value) {
return ValueDate.parse(value.toString(), format, expandTypeURI(typeURI));
}
});
}
public Value asDate(final String typeURI) {
return applyFunction(new MapValue() {
public Value map(Value value) {
return ValueDate.parse(value.toString(), expandTypeURI(typeURI));
}
});
}
public Value referenceTime() {
return applyFunction(new MapValue() {
public Value map(Value value) {
if(value instanceof ValueDate ) {
return ((ValueDate)value).referenceTime();
}
reportError("Could not generate reference time for " + value + " not a ValueDate");
return new ValueNull();
}
});
}
public Value toLowerCase() {
return applyFunction(new MapValue() {
public Value map(Value value) {
return wrap(value.toString().toLowerCase());
}
});
}
public Value toUpperCase() {
return applyFunction(new MapValue() {
public Value map(Value value) {
return wrap(value.toString().toUpperCase());
}
});
}
public Value toSegment() {
return applyFunction(new MapValue() {
public Value map(Value value) {
return wrap( NameUtils.safeName(toString()) );
}
});
}
public Value toCleanSegment() {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).toCleanSegment();
}
});
}
public Value toSegment(final String repl) {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).toSegment(repl);
}
});
}
public Value trim() {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).trim();
}
});
}
public Value substring(final int offset) {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).substring(offset);
}
});
}
public Value substring(final int start, final int end) {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).substring(start, end);
}
});
}
public Value replaceAll(final String regex, final String replacement) {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).replaceAll(regex, replacement);
}
});
}
public Value regex(final String regex) {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).regex(regex);
}
});
}
public Value lastSegment() {
return applyFunction(new MapValue() {
@SuppressWarnings("rawtypes")
public Value map(Value value) {
return ((ValueBase)value).lastSegment();
}
});
}
public interface MapValue {
public Value map(Value value);
}
public ValueArray applyFunction(MapValue map) {
Value[] result = new Value[ value.length ];
for (int i = 0; i < value.length; i++) {
result[i] = map.map( value[i]);
}
return new ValueArray(result);
}
public String toString() {
StringBuilder sb = new StringBuilder() ;
if (value == null)
return null;
boolean first = true;
sb.append('[');
for(Value v : value) {
sb.append(first ? "" : " | ");
sb.append(v.toString()) ;
first = false;
}
sb.append(']');
return sb.toString();
}
// public ValueArray flatten() {
// ArrayList<Value> result = new ArrayList<Value>() ;
// Value[] a = null;
// for (Value v : value) {
// if(v instanceof ValueArray) {
// ValueArray values = ((ValueArray) v).flatten();
// for (Value x : values.getValues())
// result.add(x);
// else result.add(v);
// a = result.toArray(a);
// return new ValueArray(a);
}
|
package com.softala.dao;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
import com.softala.bean.Weight;
import com.softala.dao.WeightDao;
import com.softala.dao.WeightRowMapper;
/**
* Class implementing the WeightDAO interface
*
* @author InkaH and SoftwareSpot
*/
@Component
public class WeightDaoImpl implements WeightDao {
@Inject
private JdbcTemplate _jdbcTemplate;
/**
* Get the JDBC template object reference
*
* @return JDBC template object reference
*/
public JdbcTemplate getJdbcTemplate() {
return _jdbcTemplate;
}
/**
* Get a list of unique usernames
*
* @return A list of all unique usernames
*/
public List<String> getUsersAll() {
String sql = "SELECT DISTINCT username FROM Weights";
List<Map<String, Object>> rows = getJdbcTemplate().queryForList(sql);
List<String> usernames = new ArrayList<String>();
for (Map<String, Object> row : rows) {
usernames.add((String) row.get("username"));
}
return usernames;
}
/**
* Returns a list of Weight objects of all database records that match the
* condition given as a parameter
*
* @param username
* Specifies the condition by which the database records are
* filtered
* @return A list of Weight objects
*/
public List<Weight> getUserWeights(String username) {
String sql = "SELECT * FROM Weights WHERE username = ?";
Object[] parameters = new Object[] { username };
RowMapper<Weight> mapper = new WeightRowMapper();
List<Weight> weights = null;
try {
weights = getJdbcTemplate().query(sql, parameters, mapper);
} catch (DataAccessException e) {
//return null, controller handles the setting of http status to NOT_FOUND when it gets a null returned
return null;
}
return weights;
}
/**
* Get a Weight object based on an id
*
* @param id
* Id of the Weight object
*
* @return Weight object reference; otherwise, null
*/
public Weight getWeightById(int id) {
String sql = "SELECT * FROM Weights WHERE id = ?";
Object[] parameters = new Object[] { id };
RowMapper<Weight> mapper = new WeightRowMapper();
Weight weight = null;
try {
weight = (Weight) getJdbcTemplate().queryForObject(sql, parameters, mapper);
} catch (Exception e) {
System.out.println(e.getMessage());
}
return weight;
}
/**
* Returns a list of Weight objects of all database records
*
* @return An list of Weight objects
*/
public List<Weight> getWeightsAll() {
String sql = "SELECT * FROM Weights";
RowMapper<Weight> mapper = new WeightRowMapper();
List<Weight> weights = getJdbcTemplate().query(sql, mapper);
return weights;
}
/**
* Inserts the value, time and username attributes of the Weight object into
* the database
*
* @param weight
* Weight object
*/
public void saveWeight(Weight weight) {
// Note that weight.getTime() is ignored and the SQL function NOW() is
// used instead
String sql = "INSERT INTO Weights(value, time, username) VALUE(?, NOW(), ?)";
Object[] parameters = new Object[] { weight.getValue(), weight.getUsername() };
getJdbcTemplate().update(sql, parameters);
}
/**
* Deletes a weight record from database based on its id.
*
* @param id
* Id of the Weight object
*/
public void deleteWeight(int id){
String sql = "DELETE FROM Weights WHERE id = ?";
Object[] parameters = new Object[]{id};
getJdbcTemplate().update(sql, parameters);
}
/**
* Set the JDBC template object reference
*
* @param jdbcTemplate
* JDBC template object reference to set with
*/
public void setJdbcTemplate(JdbcTemplate jdbcTemplate) {
this._jdbcTemplate = jdbcTemplate;
}
}
|
package com.tinkerpop.gremlin;
import com.tinkerpop.gremlin.compiler.GremlinEvaluator;
import com.tinkerpop.gremlin.compiler.context.GremlinScriptContext;
import jline.ConsoleReader;
import jline.History;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
/**
* @author Pavel A. Yaskevich
*/
public class Console {
private static final String PROMPT = "gremlin> ";
private static final String QUIT = "quit";
private static final String INDENT = "\t ";
private static final String THREE_SPACES = " ";
private static final String HISTORY_FILE = ".gremlin_history";
private static final String HISTORY_ERROR = "Error: Can't set history file to " + HISTORY_FILE;
private static final String[] compoundStatements = {"if", "while", "repeat", "foreach", "func", "path"};
public static void main(String[] args) throws Exception {
// debug mode
GremlinEvaluator.DEBUG = true;
final PrintStream output = System.out;
final ConsoleReader reader = new ConsoleReader();
reader.setBellEnabled(false);
reader.setUseHistory(true);
try {
History history = new History();
history.setHistoryFile(new File(HISTORY_FILE));
reader.setHistory(history);
} catch (IOException e) {
System.err.println(HISTORY_ERROR);
}
output.println();
output.println(" \\,,,/");
output.println(" (o o)");
output.println("
String line = "";
String prompt;
int codeDepth = 0;
String compoundStatementParts = "";
boolean inCompoundStatement = false;
final GremlinScriptEngine engine = new GremlinScriptEngine();
final GremlinScriptContext context = new GremlinScriptContext();
while (line != null) {
// set appropriate prompt
if (inCompoundStatement) {
prompt = INDENT;
if (codeDepth > 1) {
for (int i = 0; i < codeDepth; i++) {
prompt += THREE_SPACES;
}
}
} else {
prompt = PROMPT;
}
// read console line
line = reader.readLine(prompt).trim();
if (line.isEmpty()) continue;
// analyze current statement
for (final String statement : compoundStatements) {
if (line.indexOf(statement + " ") == 0) {
inCompoundStatement = true;
codeDepth++;
}
}
if (inCompoundStatement)
compoundStatementParts += line + "\n";
// break on quit
if (null == line || line.equalsIgnoreCase(QUIT)) {
break;
}
if (inCompoundStatement && line.equals("end"))
codeDepth
if (inCompoundStatement && codeDepth > 0)
continue;
if (codeDepth == 0 && inCompoundStatement) {
line = compoundStatementParts;
compoundStatementParts = "";
inCompoundStatement = false;
}
try {
engine.eval(line, context);
} catch (Exception e) {
context.getErrorWriter().flush();
System.err.println(e.getMessage());
//e.printStackTrace();
}
}
}
}
|
package com.feed_the_beast.ftbcurseappbot.runnables;
import com.beust.jcommander.internal.Lists;
import com.feed_the_beast.ftbcurseappbot.Config;
import com.feed_the_beast.ftbcurseappbot.Main;
import com.feed_the_beast.ftbcurseappbot.persistence.MongoConnection;
import com.feed_the_beast.ftbcurseappbot.persistence.data.MongoCurseforgeCheck;
import com.feed_the_beast.javacurselib.addondumps.Addon;
import com.feed_the_beast.javacurselib.addondumps.AddonDatabase;
import com.feed_the_beast.javacurselib.addondumps.Bz2Data;
import com.feed_the_beast.javacurselib.addondumps.DatabaseType;
import com.feed_the_beast.javacurselib.addondumps.Filtering;
import com.feed_the_beast.javacurselib.addondumps.MergedDatabase;
import com.feed_the_beast.javacurselib.addondumps.ReleaseType;
import com.feed_the_beast.javacurselib.service.contacts.contacts.ContactsResponse;
import com.feed_the_beast.javacurselib.utils.ChatFormatter;
import com.feed_the_beast.javacurselib.utils.CurseGUID;
import com.feed_the_beast.javacurselib.websocket.WebSocket;
import lombok.extern.slf4j.Slf4j;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nonnull;
@Slf4j
public class CurseforgeChecker implements Runnable {
private WebSocket webSocket;
private boolean initialized = false;
private Optional<List<String>> channelsEnabled;//TODO make sure this gets updates!!
private String types = "";
public CurseforgeChecker (@Nonnull WebSocket webSocket) {
this.webSocket = webSocket;
this.channelsEnabled = Optional.of(Lists.newArrayList());
channelsEnabled.get().add("Progwml6's mods.curseforge-updates");
}
public static String getFeed (ReleaseType r) {
switch (r) {
case ALPHA:
return " alpha";
case BETA:
return " beta";
case RELEASE:
return " release";
default:
return " UNKNOWN " + (r == null ? "null" : r.getValue());
}
}
private static String getChangeTextForAddon (@Nonnull Addon a) {
String ret = "";
int i = 0;
for (int j = 0; j < a.latestFiles.size(); j++) {
if (a.latestFiles.get(j).fileDate.getTime() > a.latestFiles.get(i).fileDate.getTime()) {
i = j;
}
}
ret += a.name + getFeed(a.latestFiles.get(i).releaseType) + " for MC: ";
for (String s : a.latestFiles.get(i).gameVersion) {
if (!ret.endsWith(", ") && !ret.endsWith(": ")) {
ret += ", ";
}
ret += s;
}
/*if (!ret.endsWith(", ") && !ret.endsWith(": ")) {
ret += " ";
}*/
return ret;
}
private static String getTextForType (@Nonnull String type, @Nonnull List<Addon> lst) {
String result = "";
if (!lst.isEmpty()) {
result += ChatFormatter.bold(type) + ": ";
for (Addon a : lst) {
if (!result.endsWith(": ") && !result.endsWith(", ")) {
result += ", ";
}
result += getChangeTextForAddon(a);
}
if (!result.endsWith(" ")) {
result += " ";
}
}
return result;
}
private static String getTextForType (@Nonnull String type, @Nonnull AddonDatabase db) {
List<Addon> lst = Filtering.byCategorySection(type, db);
return getTextForType(type, lst);
}
@Override
public void run () {
try {
boolean changed = false;
Thread.currentThread().setName("curseforgecheckthread");
String result = "";
types = "";
String base = ChatFormatter.underscore(ChatFormatter.bold("Curse Updates")) + ": ";
if (!initialized) {
initialized = true;
Main.getCacheService().setAddonDatabase(Bz2Data.getInitialDatabase(Bz2Data.MC_GAME_ID));
String size = "";
long timestamp = -1;
if (Main.getCacheService().getAddonDatabase() == null || Main.getCacheService().getAddonDatabase().data == null) {
size = "null";
} else {
size = String.valueOf(Main.getCacheService().getAddonDatabase().data.size());
}
if (Main.getCacheService().getAddonDatabase() != null) {
timestamp = Main.getCacheService().getAddonDatabase().timestamp;
}
log.info("Curseforge Checker Initialized with " + size + " entries timestamp: " + timestamp);
} else {
Bz2Data.debug = false;//when true this produces log spam
MergedDatabase db = Bz2Data.updateCompleteDatabaseIfNeeded(Main.getCacheService().getAddonDatabase(), Bz2Data.MC_GAME_ID);
if (db.changes != null && !db.changes.data.isEmpty()) {
Main.getCacheService().setAddonDatabase(db.currentDatabase);
Optional<List<MongoCurseforgeCheck>> extraChecksList = Optional.of(Lists.newArrayList());
if (MongoConnection.isPersistanceEnabled()) {
extraChecksList = MongoConnection.getCurseChecks();
}
String dbt = "";
for (DatabaseType d : db.newDBTypes) {
dbt += d.getStringForUrl() + " ";
}
types = dbt;
log.debug(db.changes.data.size() + " curseforge changes detected " + dbt);
changed = true;
result = base;
result += getTextForType("Mods", db.changes);
result += getTextForType("Addons", db.changes);
result += getTextForType("ModPacks", db.changes);
result += getTextForType("Texture Packs", db.changes);
if (extraChecksList.isPresent()) {
for (MongoCurseforgeCheck m : extraChecksList.get()) {
if (m.getType() != null) {
List<Addon> ret = Filtering.byAuthorAndCategorySection(m.getAuthor(), m.getType(), db.changes);
String toSend = base + getTextForType(m.getType(), ret);
if (ret.size() > 0) {
log.debug("sending {} {} to {} using {}", m.getAuthor(), m.getType(), m.getChannelID(), dbt);
webSocket.sendMessage(m.getChannelIDAsGUID(), toSend);
}
} else {
if (m.getAuthor() != null) {
List<Addon> ret = Filtering.byAuthor(m.getAuthor(), db.changes);
AddonDatabase d = new AddonDatabase();
d.data = ret;
d.timestamp = db.changes.timestamp;
String toSend = base + getTextForType("Mods", d) + getTextForType("Addons", d) + getTextForType("Modpacks", d) + getTextForType("Texture Packs", d);
if (ret.size() > 0) {
log.debug("sending {} to {} using {}", m.getAuthor(), m.getChannelID(), dbt);
webSocket.sendMessage(m.getChannelIDAsGUID(), toSend);
}
}
}
}
}
}
}
if (changed) {
sendServiceStatusNotifications(Main.getCacheService().getContacts().get(), webSocket, result, this.channelsEnabled);
} else {
if (Config.isDebugEnabled()) {
long now = new Date().getTime();
log.debug("No curseforge change detected db_timestamp: " + Main.getCacheService().getAddonDatabase().timestamp + " Now: " + now + " Diff: " + (now - Main.getCacheService()
.getAddonDatabase().timestamp));
}
}
} catch (Exception e) {
log.error("curseforge checker exception", e);
}
}
private void sendServiceStatusNotifications (@Nonnull ContactsResponse cr, @Nonnull WebSocket ws, @Nonnull String message, @Nonnull Optional<List<String>> channelsEnabled) {
if (message.isEmpty()) {
if (Config.isDebugEnabled()) {
log.debug("no CurseForge Updates");
}
return;
}
if (channelsEnabled.isPresent()) {
log.info("curseforge has had an update");
for (String s : channelsEnabled.get()) {
if (s.contains(".")) {
String[] g = s.split("\\.");
Optional<CurseGUID> ci = Main.getCacheService().getContacts().get().getChannelIdbyNames(g[0], g[1], true);
if (ci.isPresent()) {
log.debug("sending status change for {} to {} guid: {} types {}", "CurseForge", s, ci.get().serialize(), types);
ws.sendMessage(ci.get(), message);
} else {
log.error("no channel id exists for {} {}", g[0], g[1]);
}
} else {
Optional<CurseGUID> ci = Main.getCacheService().getContacts().get().getGroupIdByName(s, String::equalsIgnoreCase);
if (ci.isPresent()) {
ws.sendMessage(ci.get(), message);
} else {
log.error("no channel id exists for {}", s);
}
}
}
}
}
}
|
package com.github.ansell.csv.util;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import javax.script.Bindings;
import javax.script.Compilable;
import javax.script.CompiledScript;
import javax.script.Invocable;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineFactory;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
/**
* A mapping definition from an original CSV field to an output CSV field.
*
* @author Peter Ansell p_ansell@yahoo.com
*/
public class ValueMapping {
private static final String NO = "no";
public enum ValueMappingLanguage {
DEFAULT(ValueMapping.DEFAULT_MAPPING),
JAVASCRIPT("return inputValue;"),
GROOVY("inputValue"),
LUA("return inputValue"),
ACCESS(""),
CSVMERGE("");
private final String defaultMapping;
ValueMappingLanguage(String defaultMapping) {
this.defaultMapping = defaultMapping;
}
public String getDefaultMapping() {
return this.defaultMapping;
}
public boolean matchesDefaultMapping(String mapping) {
return this.getDefaultMapping().equals(mapping);
}
}
/**
* The default mapping if none is specified in the mapping file.
*/
protected static final String DEFAULT_MAPPING = "inputValue";
public static final String OLD_FIELD = "OldField";
public static final String NEW_FIELD = "NewField";
public static final String SHOWN = "Shown";
public static final String LANGUAGE = "Language";
public static final String MAPPING = "Mapping";
private static final ScriptEngineManager SCRIPT_MANAGER = new ScriptEngineManager();
private static final boolean DEBUG = false;
static {
if (DEBUG) {
System.out.println("Installed script engines:");
SCRIPT_MANAGER.getEngineFactories().stream().map(ScriptEngineFactory::getEngineName)
.forEach(System.out::println);
}
}
public static List<ValueMapping> extractMappings(Reader input) throws IOException {
List<ValueMapping> result = new ArrayList<>();
CSVUtil.streamCSV(input, h -> {
} , (h, l) -> {
return newMapping(l.get(h.indexOf(LANGUAGE)), l.get(h.indexOf(OLD_FIELD)), l.get(h.indexOf(NEW_FIELD)),
l.get(h.indexOf(MAPPING)), l.get(h.indexOf(SHOWN)));
} , l -> result.add(l));
return result;
}
public static List<String> mapLine(List<String> inputHeaders, List<String> line, List<String> previousLine,
List<String> previousMappedLine, List<ValueMapping> map) throws LineFilteredException {
HashMap<String, String> outputValues = new HashMap<>(map.size(), 0.75f);
List<String> outputHeaders = map.stream().filter(k -> k.getShown()).map(k -> k.getOutputField())
.collect(Collectors.toList());
map.forEach(nextMapping -> {
String mappedValue = nextMapping.apply(inputHeaders, line, previousLine, previousMappedLine, outputHeaders,
outputValues);
outputValues.put(nextMapping.getOutputField(), mappedValue);
});
List<String> result = new ArrayList<>(outputHeaders.size());
outputHeaders.forEach(nextOutput -> result.add(outputValues.getOrDefault(nextOutput, "")));
outputValues.clear();
return result;
}
public static final ValueMapping newMapping(String language, String input, String output, String mapping,
String shownString) {
if (output == null || output.isEmpty()) {
throw new IllegalArgumentException("Output field must not be empty");
}
ValueMappingLanguage nextLanguage;
try {
nextLanguage = ValueMappingLanguage.valueOf(language.toUpperCase());
} catch (IllegalArgumentException e) {
nextLanguage = ValueMappingLanguage.DEFAULT;
}
String nextMapping;
// By default empty mappings do not change the input, and are
// efficiently dealt with as such
if (!mapping.isEmpty()) {
nextMapping = mapping;
} else {
nextMapping = nextLanguage.getDefaultMapping();
}
boolean shown = !NO.equalsIgnoreCase(shownString);
ValueMapping result = new ValueMapping(nextLanguage, input, output, nextMapping, shown);
result.init();
return result;
}
private final ValueMappingLanguage language;
private final String input;
private final String output;
private final String mapping;
private final boolean shown;
private transient ScriptEngine scriptEngine;
private transient CompiledScript compiledScript;
/**
* All creation of ValueMapping objects must be done through the
* {@link #newMapping(String, String, String, String)} method.
*/
private ValueMapping(ValueMappingLanguage language, String input, String output, String mapping, boolean shown) {
this.language = language;
this.input = input.intern();
this.output = output.intern();
this.mapping = mapping.intern();
this.shown = shown;
}
private String apply(List<String> inputHeaders, List<String> line, List<String> previousLine,
List<String> previousMappedLine, List<String> outputHeaders, Map<String, String> mappedLine) {
int indexOf = inputHeaders.indexOf(getInputField());
String nextInputValue;
if (indexOf >= 0) {
nextInputValue = line.get(indexOf);
} else {
// Provide a default input value for these cases. Likely the input
// field in this case was a set of fields and won't be directly
// relied upon
nextInputValue = "";
}
// Short circuit if the mapping is a default mapping
if (this.language == ValueMappingLanguage.DEFAULT || this.language.matchesDefaultMapping(this.mapping)) {
return nextInputValue;
}
if (this.language == ValueMappingLanguage.JAVASCRIPT || this.language == ValueMappingLanguage.GROOVY
|| this.language == ValueMappingLanguage.LUA) {
try {
if (scriptEngine instanceof Invocable) {
// evaluate script code and access the variable that results
// from the mapping
return (String) ((Invocable) scriptEngine).invokeFunction("mapFunction", inputHeaders,
this.getInputField(), nextInputValue, outputHeaders, this.getOutputField(), line,
mappedLine, previousLine, previousMappedLine);
} else if (compiledScript != null) {
Bindings bindings = scriptEngine.createBindings();
// inputHeaders, inputField, inputValue, outputField, line
bindings.put("inputHeaders", inputHeaders);
bindings.put("inputField", this.getInputField());
bindings.put("inputValue", nextInputValue);
bindings.put("outputHeaders", outputHeaders);
bindings.put("outputField", this.getOutputField());
bindings.put("line", line);
bindings.put("mapLine", mappedLine);
bindings.put("previousLine", previousLine);
bindings.put("previousMappedLine", previousMappedLine);
return (String) compiledScript.eval(bindings);
} else {
throw new UnsupportedOperationException(
"Cannot handle results from ScriptEngine.eval that are not Invocable or CompiledScript");
}
} catch (ScriptException e) {
if (e.getCause() != null) {
if (e.getCause().getMessage().contains(LineFilteredException.class.getCanonicalName())) {
throw new LineFilteredException(e);
}
}
throw new RuntimeException(e);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
} else if (this.language == ValueMappingLanguage.ACCESS) {
// Access is currently handled separately, before these mappings are
// applied, so make this a noop
return nextInputValue;
} else if (this.language == ValueMappingLanguage.CSVMERGE) {
// CSVMerge is currently handled separately, before these mappings
// are applied, so make this a noop
return nextInputValue;
} else {
throw new UnsupportedOperationException("Mapping language not supported: " + this.language);
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof ValueMapping)) {
return false;
}
ValueMapping other = (ValueMapping) obj;
if (input == null) {
if (other.input != null) {
return false;
}
} else if (!input.equals(other.input)) {
return false;
}
if (language != other.language) {
return false;
}
if (mapping == null) {
if (other.mapping != null) {
return false;
}
} else if (!mapping.equals(other.mapping)) {
return false;
}
if (output == null) {
if (other.output != null) {
return false;
}
} else if (!output.equals(other.output)) {
return false;
}
if (shown != other.shown) {
return false;
}
return true;
}
public String getInputField() {
return this.input;
}
public ValueMappingLanguage getLanguage() {
return this.language;
}
public String getMapping() {
return this.mapping;
}
public String getOutputField() {
return this.output;
}
public boolean getShown() {
return this.shown;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((input == null) ? 0 : input.hashCode());
result = prime * result + ((language == null) ? 0 : language.hashCode());
result = prime * result + ((mapping == null) ? 0 : mapping.hashCode());
result = prime * result + ((output == null) ? 0 : output.hashCode());
result = prime * result + (shown ? 1231 : 1237);
return result;
}
private void init() {
// Short circuit if the mapping is the default mapping and avoid
// creating an instance of nashorn/groovy/etc. for this mapping
if (this.language == ValueMappingLanguage.DEFAULT || this.language.matchesDefaultMapping(this.mapping)) {
return;
}
// precompile the function for this mapping for efficiency
if (this.language == ValueMappingLanguage.JAVASCRIPT) {
try {
scriptEngine = SCRIPT_MANAGER.getEngineByName("javascript");
StringBuilder javascriptFunction = new StringBuilder();
javascriptFunction
.append("var LFE = Java.type(\"com.github.ansell.csv.util.LineFilteredException\"); \n");
javascriptFunction.append("var Integer = Java.type('java.lang.Integer'); \n");
javascriptFunction.append("var Double = Java.type('java.lang.Double'); \n");
javascriptFunction.append("var Long = Java.type('java.lang.Long'); \n");
javascriptFunction.append("var LocalDate = Java.type('java.time.LocalDate'); \n");
javascriptFunction.append("var LocalDateTime = Java.type('java.time.LocalDateTime'); \n");
javascriptFunction.append("var LocalTime = Java.type('java.time.LocalTime'); \n");
javascriptFunction.append("var Format = Java.type('java.time.format.DateTimeFormatter'); \n");
javascriptFunction.append("var ChronoUnit = Java.type('java.time.temporal.ChronoUnit'); \n");
javascriptFunction.append(
"var dateMatches = function(dateValue, format) { try {\n format.parse(dateValue); \n return true; \n } catch(e) { } \n return false; }; \n");
javascriptFunction.append(
"var dateConvert = function(dateValue, inputFormat, outputFormat, parseClass) { if(!parseClass) { parseClass = LocalDate; } return parseClass.parse(dateValue, inputFormat).format(outputFormat); }; \n");
javascriptFunction.append("var filter = function() { throw new LFE(); }; \n");
javascriptFunction.append(
"var columnFunction = function(searchHeader, inputHeaders, line) { return line.get(inputHeaders.indexOf(searchHeader)); };\n");
javascriptFunction.append(
"var columnFunctionMap = function(searchHeader, mapLine) { return mapLine.get(searchHeader); };\n");
javascriptFunction.append(
"var mapFunction = function(inputHeaders, inputField, inputValue, outputHeaders, outputField, line, mapLine, previousLine, previousMappedLine) { ");
javascriptFunction.append(
" var col = function(searchHeader) { \n return columnFunction(searchHeader, inputHeaders, line); }; \n ");
javascriptFunction.append(
" var outCol = function(searchHeader) { \n return columnFunctionMap(searchHeader, mapLine); }; \n ");
javascriptFunction.append(this.mapping);
javascriptFunction.append(" }; \n");
scriptEngine.eval(javascriptFunction.toString());
} catch (ScriptException e) {
throw new RuntimeException(e);
}
} else if (this.language == ValueMappingLanguage.GROOVY) {
try {
scriptEngine = SCRIPT_MANAGER.getEngineByName("groovy");
scriptEngine
.eval("def mapFunction(inputHeaders, inputField, inputValue, outputHeaders, outputField, line, mapLine, previousLine, previousMappedLine) { "
+ this.mapping + " }");
} catch (ScriptException e) {
throw new RuntimeException(e);
}
} else if (this.language == ValueMappingLanguage.LUA) {
try {
scriptEngine = SCRIPT_MANAGER.getEngineByName("lua");
compiledScript = ((Compilable) scriptEngine).compile(this.mapping);
} catch (ScriptException e) {
throw new RuntimeException(e);
}
} else if (this.language == ValueMappingLanguage.ACCESS) {
} else if (this.language == ValueMappingLanguage.CSVMERGE) {
} else {
throw new UnsupportedOperationException("Mapping language not supported: " + this.language);
}
}
@Override
public String toString() {
return "ValueMapping [language=" + language + ", input=" + input + ", output=" + output + ", mapping=" + mapping
+ ", shown=" + shown + "]";
}
}
|
package com.untamedears.humbug;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import net.minecraft.server.v1_8_R2.EntityTypes;
import net.minecraft.server.v1_8_R2.Item;
import net.minecraft.server.v1_8_R2.ItemEnderPearl;
import net.minecraft.server.v1_8_R2.MinecraftKey;
import net.minecraft.server.v1_8_R2.RegistryID;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.World.Environment;
import org.bukkit.block.Biome;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import org.bukkit.block.BlockState;
import org.bukkit.block.Hopper;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.command.ConsoleCommandSender;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.entity.Arrow;
import org.bukkit.entity.Boat;
import org.bukkit.entity.Damageable;
import org.bukkit.entity.Enderman;
import org.bukkit.entity.Entity;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Horse;
import org.bukkit.entity.HumanEntity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Minecart;
import org.bukkit.entity.Player;
import org.bukkit.entity.Skeleton;
import org.bukkit.entity.Skeleton.SkeletonType;
import org.bukkit.entity.Vehicle;
import org.bukkit.entity.minecart.HopperMinecart;
import org.bukkit.entity.minecart.StorageMinecart;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.block.Action;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockFromToEvent;
import org.bukkit.event.block.BlockPhysicsEvent;
import org.bukkit.event.block.BlockPistonExtendEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.enchantment.EnchantItemEvent;
import org.bukkit.event.enchantment.PrepareItemEnchantEvent;
import org.bukkit.event.entity.CreatureSpawnEvent;
import org.bukkit.event.entity.EntityChangeBlockEvent;
import org.bukkit.event.entity.EntityCreatePortalEvent;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDamageEvent.DamageCause;
import org.bukkit.event.entity.EntityDeathEvent;
import org.bukkit.event.entity.EntityExplodeEvent;
import org.bukkit.event.entity.EntityPortalEvent;
import org.bukkit.event.entity.EntityShootBowEvent;
import org.bukkit.event.entity.ExpBottleEvent;
import org.bukkit.event.entity.FoodLevelChangeEvent;
import org.bukkit.event.entity.PlayerDeathEvent;
import org.bukkit.event.entity.PotionSplashEvent;
import org.bukkit.event.entity.SheepDyeWoolEvent;
import org.bukkit.event.inventory.InventoryMoveItemEvent;
import org.bukkit.event.inventory.InventoryOpenEvent;
import org.bukkit.event.player.PlayerBucketEmptyEvent;
import org.bukkit.event.player.PlayerExpChangeEvent;
import org.bukkit.event.player.PlayerInteractEntityEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.event.player.PlayerItemConsumeEvent;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.event.player.PlayerKickEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import org.bukkit.event.player.PlayerRespawnEvent;
import org.bukkit.event.player.PlayerTeleportEvent;
import org.bukkit.event.player.PlayerTeleportEvent.TeleportCause;
import org.bukkit.event.vehicle.VehicleDestroyEvent;
import org.bukkit.event.vehicle.VehicleEnterEvent;
import org.bukkit.event.vehicle.VehicleExitEvent;
import org.bukkit.event.vehicle.VehicleMoveEvent;
import org.bukkit.event.world.ChunkLoadEvent;
import org.bukkit.event.world.PortalCreateEvent;
import org.bukkit.event.world.StructureGrowEvent;
import org.bukkit.inventory.EntityEquipment;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.InventoryHolder;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.PlayerInventory;
import org.bukkit.inventory.Recipe;
import org.bukkit.inventory.ShapedRecipe;
import org.bukkit.inventory.meta.BookMeta;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.metadata.FixedMetadataValue;
import org.bukkit.metadata.MetadataValue;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import org.bukkit.scheduler.BukkitScheduler;
import org.bukkit.scheduler.BukkitTask;
import org.bukkit.util.Vector;
import com.untamedears.humbug.annotations.BahHumbug;
import com.untamedears.humbug.annotations.BahHumbugs;
import com.untamedears.humbug.annotations.ConfigOption;
import com.untamedears.humbug.annotations.OptType;
public class Humbug extends JavaPlugin implements Listener {
public static void severe(String message) {
log_.severe("[Humbug] " + message);
}
public static void warning(String message) {
log_.warning("[Humbug] " + message);
}
public static void info(String message) {
log_.info("[Humbug] " + message);
}
public static void debug(String message) {
if (config_.getDebug()) {
log_.info("[Humbug] " + message);
}
}
public static Humbug getPlugin() {
return global_instance_;
}
private static final Logger log_ = Logger.getLogger("Humbug");
private static Humbug global_instance_ = null;
private static Config config_ = null;
private static int max_golden_apple_stack_ = 1;
static {
max_golden_apple_stack_ = Material.GOLDEN_APPLE.getMaxStackSize();
if (max_golden_apple_stack_ > 64) {
max_golden_apple_stack_ = 64;
}
}
private Random prng_ = new Random();
private CombatTagManager combatTag_ = new CombatTagManager();
public Humbug() {}
// Reduce registered PlayerInteractEvent count. onPlayerInteractAll handles
// cancelled events.
@EventHandler(priority = EventPriority.LOW, ignoreCancelled = true)
public void onPlayerInteract(PlayerInteractEvent event) {
onAnvilOrEnderChestUse(event);
if (!event.isCancelled()) {
onCauldronInteract(event);
}
if (!event.isCancelled()) {
onRecordInJukebox(event);
}
if (!event.isCancelled()) {
onEnchantingTableUse(event);
}
}
@EventHandler(priority = EventPriority.LOWEST) // ignoreCancelled=false
public void onPlayerInteractAll(PlayerInteractEvent event) {
onPlayerEatGoldenApple(event);
throttlePearlTeleport(event);
}
// Stops people from dying sheep
@BahHumbug(opt="allow_dye_sheep", def="true")
@EventHandler
public void onDyeWool(SheepDyeWoolEvent event) {
if (!config_.get("allow_dye_sheep").getBool()) {
event.setCancelled(true);
}
}
// Configurable bow buff
@EventHandler
public void onEntityShootBowEventAlreadyIntializedSoIMadeThisUniqueName(EntityShootBowEvent event) {
Integer power = event.getBow().getEnchantmentLevel(Enchantment.ARROW_DAMAGE);
MetadataValue metadata = new FixedMetadataValue(this, power);
event.getProjectile().setMetadata("power", metadata);
}
@BahHumbug(opt="bow_buff", type=OptType.Double, def="1.000000")
@EventHandler
public void onArrowHitEntity(EntityDamageByEntityEvent event) {
Double multiplier = config_.get("bow_buff").getDouble();
if(multiplier <= 1.000001 && multiplier >= 0.999999) {
return;
}
if (event.getEntity() instanceof LivingEntity) {
Entity damager = event.getDamager();
if (damager instanceof Arrow) {
Arrow arrow = (Arrow) event.getDamager();
Double damage = event.getDamage() * config_.get("bow_buff").getDouble();
Integer power = 0;
if(arrow.hasMetadata("power")) {
power = arrow.getMetadata("power").get(0).asInt();
}
damage *= Math.pow(1.25, power - 5); // f(x) = 1.25^(x - 5)
event.setDamage(damage);
}
}
}
// Fixes Teleporting through walls and doors
// ** and **
// Ender Pearl Teleportation disabling
// ** and **
// Ender pearl cooldown timer
private class PearlTeleportInfo {
public long last_teleport;
public long last_notification;
}
private Map<String, PearlTeleportInfo> pearl_teleport_info_
= new TreeMap<String, PearlTeleportInfo>();
private final static int PEARL_THROTTLE_WINDOW = 10000; // 10 sec
private final static int PEARL_NOTIFICATION_WINDOW = 1000; // 1 sec
// EventHandler registered in onPlayerInteractAll
@BahHumbug(opt="ender_pearl_teleportation_throttled", def="true")
public void throttlePearlTeleport(PlayerInteractEvent event) {
if (!config_.get("ender_pearl_teleportation_throttled").getBool()) {
return;
}
if (event.getItem() == null || !event.getItem().getType().equals(Material.ENDER_PEARL)) {
return;
}
final Action action = event.getAction();
if (action != Action.RIGHT_CLICK_AIR && action != Action.RIGHT_CLICK_BLOCK) {
return;
}
final Block clickedBlock = event.getClickedBlock();
BlockState clickedState = null;
Material clickedMaterial = null;
if (clickedBlock != null) {
clickedState = clickedBlock.getState();
clickedMaterial = clickedState.getType();
}
if (clickedState != null && (
clickedState instanceof InventoryHolder
|| clickedMaterial.equals(Material.ANVIL)
|| clickedMaterial.equals(Material.ENCHANTMENT_TABLE)
|| clickedMaterial.equals(Material.ENDER_CHEST)
|| clickedMaterial.equals(Material.WORKBENCH))) {
// Prevent Combat Tag/Pearl cooldown on inventory access
return;
}
final long current_time = System.currentTimeMillis();
final Player player = event.getPlayer();
final String player_name = player.getName();
PearlTeleportInfo teleport_info = pearl_teleport_info_.get(player_name);
long time_diff = 0;
if (teleport_info == null) {
// New pearl thrown outside of throttle window
teleport_info = new PearlTeleportInfo();
teleport_info.last_teleport = current_time;
teleport_info.last_notification =
current_time - (PEARL_NOTIFICATION_WINDOW + 100); // Force notify
combatTag_.tagPlayer(player);
} else {
time_diff = current_time - teleport_info.last_teleport;
if (PEARL_THROTTLE_WINDOW > time_diff) {
// Pearl throw throttled
event.setCancelled(true);
} else {
// New pearl thrown outside of throttle window
combatTag_.tagPlayer(player);
teleport_info.last_teleport = current_time;
teleport_info.last_notification =
current_time - (PEARL_NOTIFICATION_WINDOW + 100); // Force notify
time_diff = 0;
}
}
final long notify_diff = current_time - teleport_info.last_notification;
if (notify_diff > PEARL_NOTIFICATION_WINDOW) {
teleport_info.last_notification = current_time;
Integer tagCooldown = combatTag_.remainingSeconds(player);
if (tagCooldown != null) {
player.sendMessage(String.format(
"Pearl in %d seconds. Combat Tag in %d seconds.",
(PEARL_THROTTLE_WINDOW - time_diff + 500) / 1000,
tagCooldown));
} else {
player.sendMessage(String.format(
"Pearl Teleport Cooldown: %d seconds",
(PEARL_THROTTLE_WINDOW - time_diff + 500) / 1000));
}
}
pearl_teleport_info_.put(player_name, teleport_info);
return;
}
@BahHumbugs({
@BahHumbug(opt="ender_pearl_teleportation", def="true"),
@BahHumbug(opt="fix_teleport_glitch", def="true")
})
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onTeleport(PlayerTeleportEvent event) {
TeleportCause cause = event.getCause();
if (cause != TeleportCause.ENDER_PEARL) {
return;
} else if (!config_.get("ender_pearl_teleportation").getBool()) {
event.setCancelled(true);
return;
}
if (!config_.get("fix_teleport_glitch").getBool()) {
return;
}
Location to = event.getTo();
World world = to.getWorld();
// From and To are feet positions. Check and make sure we can teleport to a location with air
// above the To location.
Block toBlock = world.getBlockAt(to);
Block aboveBlock = world.getBlockAt(to.getBlockX(), to.getBlockY()+1, to.getBlockZ());
Block belowBlock = world.getBlockAt(to.getBlockX(), to.getBlockY()-1, to.getBlockZ());
boolean lowerBlockBypass = false;
double height = 0.0;
switch( toBlock.getType() ) {
case CHEST: // Probably never will get hit directly
case ENDER_CHEST: // Probably never will get hit directly
height = 0.875;
break;
case STEP:
lowerBlockBypass = true;
height = 0.5;
break;
case WATER_LILY:
height = 0.016;
break;
case ENCHANTMENT_TABLE:
lowerBlockBypass = true;
height = 0.75;
break;
case BED:
case BED_BLOCK:
// This one is tricky, since even with a height offset of 2.5, it still glitches.
//lowerBlockBypass = true;
//height = 0.563;
// Disabling teleporting on top of beds for now by leaving lowerBlockBypass false.
break;
case FLOWER_POT:
case FLOWER_POT_ITEM:
height = 0.375;
break;
case SKULL: // Probably never will get hit directly
height = 0.5;
break;
default:
break;
}
// Check if the below block is difficult
// This is added because if you face downward directly on a gate, it will
// teleport your feet INTO the gate, thus bypassing the gate until you leave that block.
switch( belowBlock.getType() ) {
case FENCE:
case FENCE_GATE:
case NETHER_FENCE:
case COBBLE_WALL:
height = 0.5;
break;
default:
break;
}
boolean upperBlockBypass = false;
if( height >= 0.5 ) {
Block aboveHeadBlock = world.getBlockAt(aboveBlock.getX(), aboveBlock.getY()+1, aboveBlock.getZ());
if( false == aboveHeadBlock.getType().isSolid() ) {
height = 0.5;
} else {
upperBlockBypass = true; // Cancel this event. What's happening is the user is going to get stuck due to the height.
}
}
// Normalize teleport to the center of the block. Feet ON the ground, plz.
// Leave Yaw and Pitch alone
to.setX(Math.floor(to.getX()) + 0.5000);
to.setY(Math.floor(to.getY()) + height);
to.setZ(Math.floor(to.getZ()) + 0.5000);
if(aboveBlock.getType().isSolid() ||
(toBlock.getType().isSolid() && !lowerBlockBypass) ||
upperBlockBypass ) {
// One last check because I care about Top Nether. (someone build me a shrine up there)
boolean bypass = false;
if ((world.getEnvironment() == Environment.NETHER) &&
(to.getBlockY() > 124) && (to.getBlockY() < 129)) {
bypass = true;
}
if (!bypass) {
event.setCancelled(true);
}
}
}
// Villager Trading
@BahHumbug(opt="villager_trades")
@EventHandler(priority = EventPriority.LOW, ignoreCancelled = true)
public void onPlayerInteractEntity(PlayerInteractEntityEvent event) {
if (config_.get("villager_trades").getBool()) {
return;
}
Entity npc = event.getRightClicked();
if (npc == null) {
return;
}
if (npc.getType() == EntityType.VILLAGER) {
event.setCancelled(true);
}
}
// Anvil and Ender Chest usage
// EventHandler registered in onPlayerInteract
@BahHumbugs({
@BahHumbug(opt="anvil"),
@BahHumbug(opt="ender_chest")
})
public void onAnvilOrEnderChestUse(PlayerInteractEvent event) {
if (config_.get("anvil").getBool() && config_.get("ender_chest").getBool()) {
return;
}
Action action = event.getAction();
Material material = event.getClickedBlock().getType();
boolean anvil = !config_.get("anvil").getBool() &&
action == Action.RIGHT_CLICK_BLOCK &&
material.equals(Material.ANVIL);
boolean ender_chest = !config_.get("ender_chest").getBool() &&
action == Action.RIGHT_CLICK_BLOCK &&
material.equals(Material.ENDER_CHEST);
if (anvil || ender_chest) {
event.setCancelled(true);
}
}
@BahHumbug(opt="enchanting_table", def = "false")
public void onEnchantingTableUse(PlayerInteractEvent event) {
if(!config_.get("enchanting_table").getBool()) {
return;
}
Action action = event.getAction();
Material material = event.getClickedBlock().getType();
boolean enchanting_table = action == Action.RIGHT_CLICK_BLOCK &&
material.equals(Material.ENCHANTMENT_TABLE);
if(enchanting_table) {
event.setCancelled(true);
}
}
@BahHumbug(opt="ender_chests_placeable", def="true")
@EventHandler(ignoreCancelled=true)
public void onEnderChestPlace(BlockPlaceEvent e) {
Material material = e.getBlock().getType();
if (!config_.get("ender_chests_placeable").getBool() && material == Material.ENDER_CHEST) {
e.setCancelled(true);
}
}
public void EmptyEnderChest(HumanEntity human) {
if (config_.get("ender_backpacks").getBool()) {
dropInventory(human.getLocation(), human.getEnderChest());
}
}
public void dropInventory(Location loc, Inventory inv) {
final World world = loc.getWorld();
final int end = inv.getSize();
for (int i = 0; i < end; ++i) {
try {
final ItemStack item = inv.getItem(i);
if (item != null) {
world.dropItemNaturally(loc, item);
inv.clear(i);
}
} catch (Exception ex) {}
}
}
// Unlimited Cauldron water
// EventHandler registered in onPlayerInteract
@BahHumbug(opt="unlimitedcauldron")
public void onCauldronInteract(PlayerInteractEvent e) {
if (!config_.get("unlimitedcauldron").getBool()) {
return;
}
// block water going down on cauldrons
if(e.getClickedBlock().getType() == Material.CAULDRON && e.getMaterial() == Material.GLASS_BOTTLE && e.getAction() == Action.RIGHT_CLICK_BLOCK)
{
Block block = e.getClickedBlock();
if(block.getData() > 0)
{
block.setData((byte)(block.getData()+1));
}
}
}
// Quartz from Gravel
@BahHumbug(opt="quartz_gravel_percentage", type=OptType.Int)
@EventHandler(ignoreCancelled=true, priority = EventPriority.HIGHEST)
public void onGravelBreak(BlockBreakEvent e) {
if(e.getBlock().getType() != Material.GRAVEL
|| config_.get("quartz_gravel_percentage").getInt() <= 0) {
return;
}
if(prng_.nextInt(100) < config_.get("quartz_gravel_percentage").getInt())
{
e.setCancelled(true);
e.getBlock().setType(Material.AIR);
e.getBlock().getWorld().dropItemNaturally(e.getBlock().getLocation(), new ItemStack(Material.QUARTZ, 1));
}
}
// Portals
@BahHumbug(opt="portalcreate", def="true")
@EventHandler(ignoreCancelled=true)
public void onPortalCreate(PortalCreateEvent e) {
if (!config_.get("portalcreate").getBool()) {
e.setCancelled(true);
}
}
@EventHandler(ignoreCancelled=true)
public void onEntityPortalCreate(EntityCreatePortalEvent e) {
if (!config_.get("portalcreate").getBool()) {
e.setCancelled(true);
}
}
// EnderDragon
@BahHumbug(opt="enderdragon", def="true")
@EventHandler(ignoreCancelled=true)
public void onDragonSpawn(CreatureSpawnEvent e) {
if (e.getEntityType() == EntityType.ENDER_DRAGON
&& !config_.get("enderdragon").getBool()) {
e.setCancelled(true);
}
}
// Join/Quit/Kick messages
@BahHumbug(opt="joinquitkick", def="true")
@EventHandler(priority=EventPriority.HIGHEST)
public void onJoin(PlayerJoinEvent e) {
if (!config_.get("joinquitkick").getBool()) {
e.setJoinMessage(null);
}
}
@EventHandler(priority=EventPriority.HIGHEST)
public void onQuit(PlayerQuitEvent e) {
EmptyEnderChest(e.getPlayer());
if (!config_.get("joinquitkick").getBool()) {
e.setQuitMessage(null);
}
}
@EventHandler(priority=EventPriority.HIGHEST)
public void onKick(PlayerKickEvent e) {
EmptyEnderChest(e.getPlayer());
if (!config_.get("joinquitkick").getBool()) {
e.setLeaveMessage(null);
}
}
// Death Messages
@BahHumbugs({
@BahHumbug(opt="deathannounce", def="true"),
@BahHumbug(opt="deathlog"),
@BahHumbug(opt="deathpersonal"),
@BahHumbug(opt="deathred"),
@BahHumbug(opt="ender_backpacks")
})
@EventHandler(priority=EventPriority.HIGHEST)
public void onDeath(PlayerDeathEvent e) {
final boolean logMsg = config_.get("deathlog").getBool();
final boolean sendPersonal = config_.get("deathpersonal").getBool();
final Player player = e.getEntity();
EmptyEnderChest(player);
if (logMsg || sendPersonal) {
Location location = player.getLocation();
String msg = String.format(
"%s ([%s] %d, %d, %d)", e.getDeathMessage(), location.getWorld().getName(),
location.getBlockX(), location.getBlockY(), location.getBlockZ());
if (logMsg) {
info(msg);
}
if (sendPersonal) {
e.getEntity().sendMessage(ChatColor.RED + msg);
}
}
if (!config_.get("deathannounce").getBool()) {
e.setDeathMessage(null);
} else if (config_.get("deathred").getBool()) {
e.setDeathMessage(ChatColor.RED + e.getDeathMessage());
}
}
// Endermen Griefing
@BahHumbug(opt="endergrief", def="true")
@EventHandler(ignoreCancelled=true)
public void onEndermanGrief(EntityChangeBlockEvent e)
{
if (!config_.get("endergrief").getBool() && e.getEntity() instanceof Enderman) {
e.setCancelled(true);
}
}
// Wither Insta-breaking and Explosions
@BahHumbug(opt="wither_insta_break")
@EventHandler(priority = EventPriority.LOW, ignoreCancelled = true)
public void onEntityChangeBlock(EntityChangeBlockEvent event) {
if (config_.get("wither_insta_break").getBool()) {
return;
}
Entity npc = event.getEntity();
if (npc == null) {
return;
}
EntityType npc_type = npc.getType();
if (npc_type.equals(EntityType.WITHER)) {
event.setCancelled(true);
}
}
@BahHumbug(opt="wither_explosions")
@EventHandler(priority = EventPriority.LOW, ignoreCancelled = true)
public void onEntityExplode(EntityExplodeEvent event) {
if (config_.get("wither_explosions").getBool()) {
return;
}
Entity npc = event.getEntity();
if (npc == null) {
return;
}
EntityType npc_type = npc.getType();
if ((npc_type.equals(EntityType.WITHER) ||
npc_type.equals(EntityType.WITHER_SKULL))) {
event.blockList().clear();
}
}
@BahHumbug(opt="wither", def="true")
@EventHandler(priority = EventPriority.LOW, ignoreCancelled = true)
public void onWitherSpawn(CreatureSpawnEvent event) {
if (config_.get("wither").getBool()) {
return;
}
if (!event.getEntityType().equals(EntityType.WITHER)) {
return;
}
event.setCancelled(true);
}
// Prevent specified items from dropping off mobs
public void removeItemDrops(EntityDeathEvent event) {
if (!config_.doRemoveItemDrops()) {
return;
}
if (event.getEntity() instanceof Player) {
return;
}
Set<Integer> remove_ids = config_.getRemoveItemDrops();
List<ItemStack> drops = event.getDrops();
ItemStack item;
int i = drops.size() - 1;
while (i >= 0) {
item = drops.get(i);
if (remove_ids.contains(item.getTypeId())) {
drops.remove(i);
}
--i;
}
}
// Spawn more Wither Skeletons and Ghasts
@BahHumbugs ({
@BahHumbug(opt="extra_ghast_spawn_rate", type=OptType.Int),
@BahHumbug(opt="extra_wither_skele_spawn_rate", type=OptType.Int),
@BahHumbug(opt="portal_extra_ghast_spawn_rate", type=OptType.Int),
@BahHumbug(opt="portal_extra_wither_skele_spawn_rate", type=OptType.Int),
@BahHumbug(opt="portal_pig_spawn_multiplier", type=OptType.Int)
})
@EventHandler(ignoreCancelled=true)
public void spawnMoreHellMonsters(CreatureSpawnEvent e) {
final Location loc = e.getLocation();
final World world = loc.getWorld();
boolean portalSpawn = false;
final int blockType = world.getBlockTypeIdAt(loc);
if (blockType == 90 || blockType == 49) {
// >= because we are preventing instead of spawning
if(prng_.nextInt(1000000) >= config_.get("portal_pig_spawn_multiplier").getInt()) {
e.setCancelled(true);
return;
}
portalSpawn = true;
}
if (config_.get("extra_wither_skele_spawn_rate").getInt() <= 0
&& config_.get("extra_ghast_spawn_rate").getInt() <= 0) {
return;
}
if (e.getEntityType() == EntityType.PIG_ZOMBIE) {
int adjustedwither;
int adjustedghast;
if (portalSpawn) {
adjustedwither = config_.get("portal_extra_wither_skele_spawn_rate").getInt();
adjustedghast = config_.get("portal_extra_ghast_spawn_rate").getInt();
} else {
adjustedwither = config_.get("extra_wither_skele_spawn_rate").getInt();
adjustedghast = config_.get("extra_ghast_spawn_rate").getInt();
}
if(prng_.nextInt(1000000) < adjustedwither) {
e.setCancelled(true);
world.spawnEntity(loc, EntityType.SKELETON);
} else if(prng_.nextInt(1000000) < adjustedghast) {
e.setCancelled(true);
int x = loc.getBlockX();
int z = loc.getBlockZ();
List<Integer> heights = new ArrayList<Integer>(16);
int lastBlockHeight = 2;
int emptyCount = 0;
int maxHeight = world.getMaxHeight();
for (int y = 2; y < maxHeight; ++y) {
Block block = world.getBlockAt(x, y, z);
if (block.isEmpty()) {
++emptyCount;
if (emptyCount == 11) {
heights.add(lastBlockHeight + 2);
}
} else {
lastBlockHeight = y;
emptyCount = 0;
}
}
if (heights.size() <= 0) {
return;
}
loc.setY(heights.get(prng_.nextInt(heights.size())));
world.spawnEntity(loc, EntityType.GHAST);
}
} else if (e.getEntityType() == EntityType.SKELETON
&& e.getSpawnReason() == CreatureSpawnEvent.SpawnReason.CUSTOM) {
Entity entity = e.getEntity();
if (entity instanceof Skeleton) {
Skeleton skele = (Skeleton)entity;
skele.setSkeletonType(SkeletonType.WITHER);
EntityEquipment entity_equip = skele.getEquipment();
entity_equip.setItemInHand(new ItemStack(Material.STONE_SWORD));
entity_equip.setItemInHandDropChance(0.0F);
}
}
}
// Wither Skull drop rate
public static final int skull_id_ = Material.SKULL_ITEM.getId();
public static final byte wither_skull_data_ = 1;
@BahHumbug(opt="wither_skull_drop_rate", type=OptType.Int)
public void adjustWitherSkulls(EntityDeathEvent event) {
Entity entity = event.getEntity();
if (!(entity instanceof Skeleton)) {
return;
}
int rate = config_.get("wither_skull_drop_rate").getInt();
if (rate < 0 || rate > 1000000) {
return;
}
Skeleton skele = (Skeleton)entity;
if (skele.getSkeletonType() != SkeletonType.WITHER) {
return;
}
List<ItemStack> drops = event.getDrops();
ItemStack item;
int i = drops.size() - 1;
while (i >= 0) {
item = drops.get(i);
if (item.getTypeId() == skull_id_
&& item.getData().getData() == wither_skull_data_) {
drops.remove(i);
}
--i;
}
if (rate - prng_.nextInt(1000000) <= 0) {
return;
}
item = new ItemStack(Material.SKULL_ITEM);
item.setAmount(1);
item.setDurability((short)wither_skull_data_);
drops.add(item);
}
// Generic mob drop rate adjustment
@BahHumbug(opt="disable_xp_orbs", type=OptType.Bool, def = "true")
public void adjustMobItemDrops(EntityDeathEvent event){
Entity mob = event.getEntity();
if (mob instanceof Player){
return;
}
// Try specific multiplier, if that doesn't exist use generic
EntityType mob_type = mob.getType();
int multiplier = config_.getLootMultiplier(mob_type.toString());
if (multiplier < 0) {
multiplier = config_.getLootMultiplier("generic");
}
//set entity death xp to zero so they don't drop orbs
if(config_.get("disable_xp_orbs").getBool()){
event.setDroppedExp(0);
}
//if a dropped item was in the mob's inventory, drop only one, otherwise drop the amount * the multiplier
LivingEntity liveMob = (LivingEntity) mob;
EntityEquipment mobEquipment = liveMob.getEquipment();
ItemStack[] eeItem = mobEquipment.getArmorContents();
for (ItemStack item : event.getDrops()) {
boolean armor = false;
boolean hand = false;
for(ItemStack i : eeItem){
if(i.isSimilar(item)){
armor = true;
item.setAmount(1);
}
}
if(item.isSimilar(mobEquipment.getItemInHand())){
hand = true;
item.setAmount(1);
}
if(!hand && !armor){
int amount = item.getAmount() * multiplier;
item.setAmount(amount);
}
}
}
@EventHandler(priority = EventPriority.LOW, ignoreCancelled = true)
public void onEntityDeathEvent(EntityDeathEvent event) {
removeItemDrops(event);
adjustWitherSkulls(event);
adjustMobItemDrops(event);
}
// Enchanted Golden Apple
public boolean isEnchantedGoldenApple(ItemStack item) {
// Golden Apples are GOLDEN_APPLE with 0 durability
// Enchanted Golden Apples are GOLDEN_APPLE with 1 durability
if (item == null) {
return false;
}
if (item.getDurability() != 1) {
return false;
}
Material material = item.getType();
return material.equals(Material.GOLDEN_APPLE);
}
public void replaceEnchantedGoldenApple(
String player_name, ItemStack item, int inventory_max_stack_size) {
if (!isEnchantedGoldenApple(item)) {
return;
}
int stack_size = max_golden_apple_stack_;
if (inventory_max_stack_size < max_golden_apple_stack_) {
stack_size = inventory_max_stack_size;
}
info(String.format(
"Replaced %d Enchanted with %d Normal Golden Apples for %s",
item.getAmount(), stack_size, player_name));
item.setDurability((short)0);
item.setAmount(stack_size);
}
@BahHumbug(opt="ench_gold_app_craftable")
public void removeRecipies() {
if (config_.get("ench_gold_app_craftable").getBool()) {
return;
}
Iterator<Recipe> it = getServer().recipeIterator();
while (it.hasNext()) {
Recipe recipe = it.next();
ItemStack resulting_item = recipe.getResult();
if ( // !ench_gold_app_craftable_ &&
isEnchantedGoldenApple(resulting_item)) {
it.remove();
info("Enchanted Golden Apple Recipe disabled");
}
}
}
// EventHandler registered in onPlayerInteractAll
@BahHumbug(opt="ench_gold_app_edible")
public void onPlayerEatGoldenApple(PlayerInteractEvent event) {
// The event when eating is cancelled before even LOWEST fires when the
// player clicks on AIR.
if (config_.get("ench_gold_app_edible").getBool()) {
return;
}
Player player = event.getPlayer();
Inventory inventory = player.getInventory();
ItemStack item = event.getItem();
replaceEnchantedGoldenApple(
player.getName(), item, inventory.getMaxStackSize());
}
// Fix entities going through portals
// This needs to be removed when updated to citadel 3.0
@BahHumbug(opt="disable_entities_portal", type = OptType.Bool, def = "true")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled=true)
public void entityPortalEvent(EntityPortalEvent event){
event.setCancelled(config_.get("disable_entities_portal").getBool());
}
// Enchanted Book
public boolean isNormalBook(ItemStack item) {
if (item == null) {
return false;
}
Material material = item.getType();
return material.equals(Material.BOOK);
}
@BahHumbug(opt="ench_book_craftable")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled=true)
public void onPrepareItemEnchantEvent(PrepareItemEnchantEvent event) {
if (config_.get("ench_book_craftable").getBool()) {
return;
}
ItemStack item = event.getItem();
if (isNormalBook(item)) {
event.setCancelled(true);
}
}
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled=true)
public void onEnchantItemEvent(EnchantItemEvent event) {
if (config_.get("ench_book_craftable").getBool()) {
return;
}
ItemStack item = event.getItem();
if (isNormalBook(item)) {
event.setCancelled(true);
Player player = event.getEnchanter();
warning(
"Prevented book enchant. This should not trigger. Watch player " +
player.getName());
}
}
// Stop Cobble generation from lava+water
private static final BlockFace[] faces_ = new BlockFace[] {
BlockFace.NORTH,
BlockFace.SOUTH,
BlockFace.EAST,
BlockFace.WEST,
BlockFace.UP,
BlockFace.DOWN
};
private BlockFace WaterAdjacentLava(Block lava_block) {
for (BlockFace face : faces_) {
Block block = lava_block.getRelative(face);
Material material = block.getType();
if (material.equals(Material.WATER) ||
material.equals(Material.STATIONARY_WATER)) {
return face;
}
}
return BlockFace.SELF;
}
public void ConvertLava(final Block block) {
int data = (int)block.getData();
if (data == 0) {
return;
}
Material material = block.getType();
if (!material.equals(Material.LAVA) &&
!material.equals(Material.STATIONARY_LAVA)) {
return;
}
if (isLavaSourceNear(block, 3)) {
return;
}
BlockFace face = WaterAdjacentLava(block);
if (face == BlockFace.SELF) {
return;
}
Bukkit.getScheduler().runTask(this, new Runnable() {
@Override
public void run() {
block.setType(Material.AIR);
}
});
}
public boolean isLavaSourceNear(Block block, int ttl) {
int data = (int)block.getData();
if (data == 0) {
Material material = block.getType();
if (material.equals(Material.LAVA) ||
material.equals(Material.STATIONARY_LAVA)) {
return true;
}
}
if (ttl <= 0) {
return false;
}
for (BlockFace face : faces_) {
Block child = block.getRelative(face);
if (isLavaSourceNear(child, ttl - 1)) {
return true;
}
}
return false;
}
public void LavaAreaCheck(Block block, int ttl) {
ConvertLava(block);
if (ttl <= 0) {
return;
}
for (BlockFace face : faces_) {
Block child = block.getRelative(face);
LavaAreaCheck(child, ttl - 1);
}
}
@BahHumbugs ({
@BahHumbug(opt="cobble_from_lava"),
@BahHumbug(opt="cobble_from_lava_scan_radius", type=OptType.Int, def="0")
})
@EventHandler(priority = EventPriority.LOWEST)
public void onBlockPhysicsEvent(BlockPhysicsEvent event) {
if (config_.get("cobble_from_lava").getBool()) {
return;
}
Block block = event.getBlock();
LavaAreaCheck(block, config_.get("cobble_from_lava_scan_radius").getInt());
}
// Counteract 1.4.6 protection enchant nerf
@BahHumbug(opt="scale_protection_enchant", def="true")
@EventHandler(priority = EventPriority.LOWEST) // ignoreCancelled=false
public void onEntityDamageByEntityEvent(EntityDamageByEntityEvent event) {
if (!config_.get("scale_protection_enchant").getBool()) {
return;
}
double damage = event.getDamage();
if (damage <= 0.0000001D) {
return;
}
DamageCause cause = event.getCause();
if (!cause.equals(DamageCause.ENTITY_ATTACK) &&
!cause.equals(DamageCause.PROJECTILE)) {
return;
}
Entity entity = event.getEntity();
if (!(entity instanceof Player)) {
return;
}
Player defender = (Player)entity;
PlayerInventory inventory = defender.getInventory();
int enchant_level = 0;
for (ItemStack armor : inventory.getArmorContents()) {
enchant_level += armor.getEnchantmentLevel(Enchantment.PROTECTION_ENVIRONMENTAL);
}
int damage_adjustment = 0;
if (enchant_level >= 3 && enchant_level <= 6) {
// 0 to 2
damage_adjustment = prng_.nextInt(3);
} else if (enchant_level >= 7 && enchant_level <= 10) {
// 0 to 3
damage_adjustment = prng_.nextInt(4);
} else if (enchant_level >= 11 && enchant_level <= 14) {
// 1 to 4
damage_adjustment = prng_.nextInt(4) + 1;
} else if (enchant_level >= 15) {
// 2 to 4
damage_adjustment = prng_.nextInt(3) + 2;
}
damage = Math.max(damage - (double)damage_adjustment, 0.0D);
event.setDamage(damage);
}
@BahHumbug(opt="player_max_health", type=OptType.Int, def="20")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled=true)
public void onPlayerJoinEvent(PlayerJoinEvent event) {
Player player = event.getPlayer();
player.setMaxHealth((double)config_.get("player_max_health").getInt());
}
// Fix dupe bug with chests and other containers
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled=true)
public void blockExplodeEvent(EntityExplodeEvent event) {
List<HumanEntity> humans = new ArrayList<HumanEntity>();
for (Block block: event.blockList()) {
if (block.getState() instanceof InventoryHolder) {
InventoryHolder holder = (InventoryHolder) block.getState();
for (HumanEntity ent: holder.getInventory().getViewers()) {
humans.add(ent);
}
}
}
for (HumanEntity human: humans) {
human.closeInventory();
}
}
// Prevent entity dup bug
@BahHumbug(opt="fix_rail_dup_bug", def="true")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled=true)
public void onPistonPushRail(BlockPistonExtendEvent e) {
if (!config_.get("fix_rail_dup_bug").getBool()) {
return;
}
for (Block b : e.getBlocks()) {
Material t = b.getType();
if (t == Material.RAILS ||
t == Material.POWERED_RAIL ||
t == Material.DETECTOR_RAIL) {
e.setCancelled(true);
return;
}
}
}
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled=true)
public void onRailPlace(BlockPlaceEvent e) {
if (!config_.get("fix_rail_dup_bug").getBool()) {
return;
}
Block b = e.getBlock();
Material t = b.getType();
if (t == Material.RAILS ||
t == Material.POWERED_RAIL ||
t == Material.DETECTOR_RAIL) {
for (BlockFace face : faces_) {
t = b.getRelative(face).getType();
if (t == Material.PISTON_STICKY_BASE ||
t == Material.PISTON_EXTENSION ||
t == Material.PISTON_MOVING_PIECE ||
t == Material.PISTON_BASE) {
e.setCancelled(true);
return;
}
}
}
}
// Combat Tag players on server join
@BahHumbug(opt="tag_on_join", def="true")
@EventHandler
public void tagOnJoin(PlayerJoinEvent event){
if(!config_.get("tag_on_join").getBool()) {
return;
}
// Delay two ticks to tag after secure login has been denied.
// This opens a 1 tick window for a cheater to login and grab
// server info, which should be detectable and bannable.
final Player loginPlayer = event.getPlayer();
Bukkit.getScheduler().runTaskLater(this, new Runnable() {
@Override
public void run() {
combatTag_.tagPlayer(loginPlayer.getName());
loginPlayer.sendMessage("You have been Combat Tagged on Login");
}
}, 2L);
}
// Give introduction book to n00bs
private Set<String> playersWithN00bBooks_ = new TreeSet<String>();
@BahHumbug(opt="drop_newbie_book", def="true")
@EventHandler(priority=EventPriority.HIGHEST)
public void onPlayerDeathBookDrop(PlayerDeathEvent e) {
if (!config_.get("drop_newbie_book").getBool()) {
return;
}
final String playerName = e.getEntity().getName();
List<ItemStack> dropList = e.getDrops();
for (int i = 0; i < dropList.size(); ++i) {
final ItemStack item = dropList.get(i);
if (item.getType().equals(Material.WRITTEN_BOOK)) {
final BookMeta bookMeta = (BookMeta)item.getItemMeta();
if (bookMeta.getTitle().equals(config_.getTitle())) {
playersWithN00bBooks_.add(playerName);
dropList.remove(i);
return;
}
}
}
playersWithN00bBooks_.remove(playerName);
}
@EventHandler
public void onGiveBookOnRespawn(PlayerRespawnEvent event) {
if (!config_.get("drop_newbie_book").getBool()) {
return;
}
final Player player = event.getPlayer();
final String playerName = player.getName();
if (!playersWithN00bBooks_.contains(playerName)) {
return;
}
playersWithN00bBooks_.remove(playerName);
giveN00bBook(player);
}
@EventHandler
public void onGiveBookOnJoin(PlayerJoinEvent event) {
if (!config_.get("drop_newbie_book").getBool()) {
return;
}
final Player player = event.getPlayer();
final String playerName = player.getName();
if (player.hasPlayedBefore() && !playersWithN00bBooks_.contains(playerName)) {
return;
}
playersWithN00bBooks_.remove(playerName);
giveN00bBook(player);
}
public void giveN00bBook(Player player) {
Inventory inv = player.getInventory();
inv.addItem(createN00bBook());
}
public ItemStack createN00bBook() {
ItemStack book = new ItemStack(Material.WRITTEN_BOOK);
BookMeta sbook = (BookMeta)book.getItemMeta();
sbook.setTitle(config_.getTitle());
sbook.setAuthor(config_.getAuthor());
sbook.setPages(config_.getPages());
book.setItemMeta(sbook);
return book;
}
public boolean checkForInventorySpace(Inventory inv, int emptySlots) {
int foundEmpty = 0;
final int end = inv.getSize();
for (int slot = 0; slot < end; ++slot) {
ItemStack item = inv.getItem(slot);
if (item == null) {
++foundEmpty;
} else if (item.getType().equals(Material.AIR)) {
++foundEmpty;
}
}
return foundEmpty >= emptySlots;
}
public void giveHolidayPackage(Player player) {
int count = 0;
Inventory inv = player.getInventory();
while (checkForInventorySpace(inv, 4)) {
inv.addItem(createHolidayBook());
inv.addItem(createFruitcake());
inv.addItem(createTurkey());
inv.addItem(createCoal());
++count;
}
info(String.format("%s generated %d packs of holiday cheer.",
player.getName(), count));
}
public ItemStack createHolidayBook() {
ItemStack book = new ItemStack(Material.WRITTEN_BOOK);
BookMeta sbook = (BookMeta)book.getItemMeta();
sbook.setTitle(config_.getHolidayTitle());
sbook.setAuthor(config_.getHolidayAuthor());
sbook.setPages(config_.getHolidayPages());
List<String> lore = new ArrayList<String>(1);
lore.add("December 25th, 2013");
sbook.setLore(lore);
book.setItemMeta(sbook);
return book;
}
public ItemStack createFruitcake() {
ItemStack cake = new ItemStack(Material.CAKE);
ItemMeta meta = cake.getItemMeta();
meta.setDisplayName("Fruitcake");
List<String> lore = new ArrayList<String>(1);
lore.add("Deliciously stale");
meta.setLore(lore);
cake.setItemMeta(meta);
return cake;
}
private String[] turkey_names_ = new String[] {
"Turkey",
"Turkey",
"Turkey",
"Turducken",
"Tofurkey",
"Cearc Frangach",
"Dinde",
"Kalkoen",
"Indeyka",
"Pollo d'India",
"Pelehu",
"Chilmyeonjo"
};
public ItemStack createTurkey() {
String turkey_name = turkey_names_[prng_.nextInt(turkey_names_.length)];
ItemStack turkey = new ItemStack(Material.COOKED_CHICKEN);
ItemMeta meta = turkey.getItemMeta();
meta.setDisplayName(turkey_name);
List<String> lore = new ArrayList<String>(1);
lore.add("Tastes like chicken");
meta.setLore(lore);
turkey.setItemMeta(meta);
return turkey;
}
public ItemStack createCoal() {
ItemStack coal = new ItemStack(Material.COAL);
ItemMeta meta = coal.getItemMeta();
List<String> lore = new ArrayList<String>(1);
lore.add("You've been naughty");
meta.setLore(lore);
coal.setItemMeta(meta);
return coal;
}
// Playing records in jukeboxen? Gone
// EventHandler registered in onPlayerInteract
@BahHumbug(opt="disallow_record_playing", def="true")
public void onRecordInJukebox(PlayerInteractEvent event) {
if (!config_.get("disallow_record_playing").getBool()) {
return;
}
Block cb = event.getClickedBlock();
if (cb == null || cb.getType() != Material.JUKEBOX) {
return;
}
ItemStack his = event.getItem();
if(his != null && his.getType().isRecord()) {
event.setCancelled(true);
}
}
// Water in the nether? Nope.
@BahHumbugs ({
@BahHumbug(opt="allow_water_in_nether"),
@BahHumbug(opt="indestructible_end_portals", def="true")
})
@EventHandler(priority = EventPriority.HIGHEST, ignoreCancelled = true)
public void onPlayerBucketEmptyEvent(PlayerBucketEmptyEvent e) {
if(!config_.get("allow_water_in_nether").getBool()) {
if( ( e.getBlockClicked().getBiome() == Biome.HELL )
&& ( e.getBucket() == Material.WATER_BUCKET ) ) {
e.setCancelled(true);
e.getItemStack().setType(Material.BUCKET);
e.getPlayer().addPotionEffect(new PotionEffect(PotionEffectType.WATER_BREATHING, 5, 1));
}
}
if (config_.get("indestructible_end_portals").getBool()) {
Block baseBlock = e.getBlockClicked();
BlockFace face = e.getBlockFace();
Block block = baseBlock.getRelative(face);
if (block.getType() == Material.ENDER_PORTAL) {
e.setCancelled(true);
}
}
}
@EventHandler(priority = EventPriority.HIGHEST, ignoreCancelled = true)
public void onBlockFromToEvent(BlockFromToEvent e) {
if(!config_.get("allow_water_in_nether").getBool()) {
if( e.getToBlock().getBiome() == Biome.HELL ) {
if( ( e.getBlock().getType() == Material.WATER )
|| ( e.getBlock().getType() == Material.STATIONARY_WATER ) ) {
e.setCancelled(true);
}
}
}
if (config_.get("indestructible_end_portals").getBool()) {
if (e.getToBlock().getType() == Material.ENDER_PORTAL) {
e.setCancelled(true);
}
}
if(!e.isCancelled() && config_.get("obsidian_generator").getBool()) {
generateObsidian(e);
}
}
// Generates obsidian like it did in 1.7.
// Note that this does not change anything in versions where obsidian generation exists.
@BahHumbug(opt="obsidian_generator", def="false")
public void generateObsidian(BlockFromToEvent event) {
if(!event.getBlock().getType().equals(Material.STATIONARY_LAVA)) {
return;
}
if(!event.getToBlock().getType().equals(Material.TRIPWIRE)) {
return;
}
Block string = event.getToBlock();
if(!(string.getRelative(BlockFace.NORTH).getType().equals(Material.STATIONARY_WATER)
|| string.getRelative(BlockFace.EAST).getType().equals(Material.STATIONARY_WATER)
|| string.getRelative(BlockFace.WEST).getType().equals(Material.STATIONARY_WATER)
|| string.getRelative(BlockFace.SOUTH).getType().equals(Material.STATIONARY_WATER))) {
return;
}
string.setType(Material.OBSIDIAN);
}
// Stops perculators
private Map<Chunk, Integer> waterChunks = new HashMap<Chunk, Integer>();
BukkitTask waterSchedule = null;
@BahHumbugs ({
@BahHumbug(opt="max_water_lava_height", def="100", type=OptType.Int),
@BahHumbug(opt="max_water_lava_amount", def = "400", type=OptType.Int),
@BahHumbug(opt="max_water_lava_timer", def = "1200", type=OptType.Int)
})
@EventHandler(priority = EventPriority.LOWEST)
public void stopLiquidMoving(BlockFromToEvent event){
try {
Block to = event.getToBlock();
Block from = event.getBlock();
if (to.getLocation().getBlockY() < config_.get("max_water_lava_height").getInt()) {
return;
}
Material mat = from.getType();
if (!(mat.equals(Material.WATER) || mat.equals(Material.STATIONARY_WATER) ||
mat.equals(Material.LAVA) || mat.equals(Material.STATIONARY_LAVA))) {
return;
}
Chunk c = to.getChunk();
if (!waterChunks.containsKey(c)){
waterChunks.put(c, 0);
}
Integer i = waterChunks.get(c);
i = i + 1;
waterChunks.put(c, i);
int amount = getWaterInNearbyChunks(c);
if (amount > config_.get("max_water_lava_amount").getInt()) {
event.setCancelled(true);
}
if (waterSchedule != null) {
return;
}
waterSchedule = Bukkit.getScheduler().runTaskLater(this, new Runnable(){
@Override
public void run() {
waterChunks.clear();
waterSchedule = null;
}
}, config_.get("max_water_lava_timer").getInt());
} catch (Exception e){
getLogger().log(Level.INFO, "Tried getting info from a chunk before it generated, skipping.");
return;
}
}
public int getWaterInNearbyChunks(Chunk chunk){
World world = chunk.getWorld();
Chunk[] chunks = {
world.getChunkAt(chunk.getX(), chunk.getZ()), world.getChunkAt(chunk.getX()-1, chunk.getZ()),
world.getChunkAt(chunk.getX(), chunk.getZ()-1), world.getChunkAt(chunk.getX()-1, chunk.getZ()-1),
world.getChunkAt(chunk.getX()+1, chunk.getZ()), world.getChunkAt(chunk.getX(), chunk.getZ()+1),
world.getChunkAt(chunk.getX()+1, chunk.getZ()+1), world.getChunkAt(chunk.getX()-1, chunk.getZ()+1),
world.getChunkAt(chunk.getX()+1, chunk.getZ()-1)
};
int count = 0;
for (Chunk c: chunks){
Integer amount = waterChunks.get(c);
if (amount == null)
continue;
count += amount;
}
return count;
}
// Changes Strength Potions, strength_multiplier 3 is roughly Pre-1.6 Level
@BahHumbugs ({
@BahHumbug(opt="nerf_strength", def="true"),
@BahHumbug(opt="strength_multiplier", type=OptType.Int, def="3")
})
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onPlayerDamage(EntityDamageByEntityEvent event) {
if (!config_.get("nerf_strength").getBool()) {
return;
}
if (!(event.getDamager() instanceof Player)) {
return;
}
Player player = (Player)event.getDamager();
final int strengthMultiplier = config_.get("strength_multiplier").getInt();
if (player.hasPotionEffect(PotionEffectType.INCREASE_DAMAGE)) {
for (PotionEffect effect : player.getActivePotionEffects()) {
if (effect.getType().equals(PotionEffectType.INCREASE_DAMAGE)) {
final int potionLevel = effect.getAmplifier() + 1;
final double unbuffedDamage = event.getDamage() / (1.3 * potionLevel + 1);
final double newDamage = unbuffedDamage + (potionLevel * strengthMultiplier);
event.setDamage(newDamage);
break;
}
}
}
}
// Buffs health splash to pre-1.6 levels
@BahHumbug(opt="buff_health_pots", def="true")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onPotionSplash(PotionSplashEvent event) {
if (!config_.get("buff_health_pots").getBool()) {
return;
}
for (PotionEffect effect : event.getEntity().getEffects()) {
if (!(effect.getType().getName().equalsIgnoreCase("heal"))) { // Splash potion of poison
return;
}
}
for (LivingEntity entity : event.getAffectedEntities()) {
if (entity instanceof Player) {
if(((Damageable)entity).getHealth() > 0d) {
final double newHealth = Math.min(
((Damageable)entity).getHealth() + 4.0D,
((Damageable)entity).getMaxHealth());
entity.setHealth(newHealth);
}
}
}
}
// Bow shots cause slow debuff
@BahHumbugs ({
@BahHumbug(opt="projectile_slow_chance", type=OptType.Int, def="30"),
@BahHumbug(opt="projectile_slow_ticks", type=OptType.Int, def="100")
})
@EventHandler
public void onEDBE(EntityDamageByEntityEvent event) {
int rate = config_.get("projectile_slow_chance").getInt();
if (rate <= 0 || rate > 100) {
return;
}
if (!(event.getEntity() instanceof Player)) {
return;
}
boolean damager_is_player_arrow = false;
int chance_scaling = 0;
Entity damager_entity = event.getDamager();
if (damager_entity != null) {
// public LivingEntity CraftArrow.getShooter()
// Playing this game to not have to take a hard dependency on
// craftbukkit internals.
try {
Class<?> damager_class = damager_entity.getClass();
if (damager_class.getName().endsWith(".CraftArrow")) {
Method getShooter = damager_class.getMethod("getShooter");
Object result = getShooter.invoke(damager_entity);
if (result instanceof Player) {
damager_is_player_arrow = true;
String player_name = ((Player)result).getName();
if (bow_level_.containsKey(player_name)) {
chance_scaling = bow_level_.get(player_name);
}
}
}
} catch(Exception ex) {}
}
if (!damager_is_player_arrow) {
return;
}
rate += chance_scaling * 5;
int percent = prng_.nextInt(100);
if (percent < rate){
int ticks = config_.get("projectile_slow_ticks").getInt();
Player player = (Player)event.getEntity();
player.addPotionEffect(new PotionEffect(PotionEffectType.SLOW, ticks, 1, false));
}
}
// Used to track bow enchantment levels per player
private Map<String, Integer> bow_level_ = new TreeMap<String, Integer>();
@EventHandler
public void onEntityShootBow(EntityShootBowEvent event) {
if (!(event.getEntity() instanceof Player)) {
return;
}
int ench_level = 0;
ItemStack bow = event.getBow();
Map<Enchantment, Integer> enchants = bow.getEnchantments();
for (Enchantment ench : enchants.keySet()) {
int tmp_ench_level = 0;
if (ench.equals(Enchantment.KNOCKBACK) || ench.equals(Enchantment.ARROW_KNOCKBACK)) {
tmp_ench_level = enchants.get(ench) * 2;
} else if (ench.equals(Enchantment.ARROW_DAMAGE)) {
tmp_ench_level = enchants.get(ench);
}
if (tmp_ench_level > ench_level) {
ench_level = tmp_ench_level;
}
}
bow_level_.put(
((Player)event.getEntity()).getName(),
ench_level);
}
// BottleO refugees
// Changes the yield from an XP bottle
@BahHumbugs ({
@BahHumbug(opt="disable_experience", def="true"),
@BahHumbug(opt="xp_per_bottle", type=OptType.Int, def="10")
})
@EventHandler(priority=EventPriority.HIGHEST)
public void onExpBottleEvent(ExpBottleEvent event) {
final int bottle_xp = config_.get("xp_per_bottle").getInt();
if (config_.get("disable_experience").getBool()) {
((Player) event.getEntity().getShooter()).giveExp(bottle_xp);
event.setExperience(0);
} else {
event.setExperience(bottle_xp);
}
}
// Diables all XP gain except when manually changed via code.
@EventHandler
public void onPlayerExpChangeEvent(PlayerExpChangeEvent event) {
if (config_.get("disable_experience").getBool()) {
event.setAmount(0);
}
}
// Find the end portals
public static final int ender_portal_id_ = Material.ENDER_PORTAL.getId();
public static final int ender_portal_frame_id_ = Material.ENDER_PORTAL_FRAME.getId();
private Set<Long> end_portal_scanned_chunks_ = new TreeSet<Long>();
@BahHumbug(opt="find_end_portals", type=OptType.String)
@EventHandler
public void onFindEndPortals(ChunkLoadEvent event) {
String scanWorld = config_.get("find_end_portals").getString();
if (scanWorld.isEmpty()) {
return;
}
World world = event.getWorld();
if (!world.getName().equalsIgnoreCase(scanWorld)) {
return;
}
Chunk chunk = event.getChunk();
long chunk_id = (long)chunk.getX() << 32L + (long)chunk.getZ();
if (end_portal_scanned_chunks_.contains(chunk_id)) {
return;
}
end_portal_scanned_chunks_.add(chunk_id);
int chunk_x = chunk.getX() * 16;
int chunk_end_x = chunk_x + 16;
int chunk_z = chunk.getZ() * 16;
int chunk_end_z = chunk_z + 16;
int max_height = 0;
for (int x = chunk_x; x < chunk_end_x; x += 3) {
for (int z = chunk_z; z < chunk_end_z; ++z) {
int height = world.getMaxHeight();
if (height > max_height) {
max_height = height;
}
}
}
for (int y = 1; y <= max_height; ++y) {
int z_adj = 0;
for (int x = chunk_x; x < chunk_end_x; ++x) {
for (int z = chunk_z + z_adj; z < chunk_end_z; z += 3) {
int block_type = world.getBlockTypeIdAt(x, y, z);
if (block_type == ender_portal_id_ || block_type == ender_portal_frame_id_) {
info(String.format("End portal found at %d,%d", x, z));
return;
}
}
// This funkiness results in only searching 48 of the 256 blocks on
// each y-level. 81.25% fewer blocks checked.
++z_adj;
if (z_adj >= 3) {
z_adj = 0;
x += 2;
}
}
}
}
// Prevent inventory access while in a vehicle, unless it's the Player's
@BahHumbugs ({
@BahHumbug(opt="prevent_opening_container_carts", def="true"),
@BahHumbug(opt="prevent_vehicle_inventory_open", def="true")
})
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onPreventVehicleInvOpen(InventoryOpenEvent event) {
// Cheap break-able conditional statement
while (config_.get("prevent_vehicle_inventory_open").getBool()) {
HumanEntity human = event.getPlayer();
if (!(human instanceof Player)) {
break;
}
if (!human.isInsideVehicle()) {
break;
}
InventoryHolder holder = event.getInventory().getHolder();
if (holder == human) {
break;
}
event.setCancelled(true);
break;
}
if (config_.get("prevent_opening_container_carts").getBool() && !event.isCancelled()) {
InventoryHolder holder = event.getInventory().getHolder();
if (holder instanceof StorageMinecart || holder instanceof HopperMinecart) {
event.setCancelled(true);
}
}
}
// Disable outbound hopper transfers
@BahHumbug(opt="disable_hopper_out_transfers", def="false")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onInventoryMoveItem(InventoryMoveItemEvent event) {
if (!config_.get("disable_hopper_out_transfers").getBool()) {
return;
}
final Inventory src = event.getSource();
final InventoryHolder srcHolder = src.getHolder();
if (srcHolder instanceof Hopper) {
event.setCancelled(true);
return;
}
}
// Adjust horse speeds
@BahHumbug(opt="horse_speed", type=OptType.Double, def="0.170000")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onVehicleEnter(VehicleEnterEvent event) {
// 0.17 is just a tad slower than minecarts
Vehicle vehicle = event.getVehicle();
if (!(vehicle instanceof Horse)) {
return;
}
Versioned.setHorseSpeed((Entity)vehicle, config_.get("horse_speed").getDouble());
}
// Admins can view player inventories
@SuppressWarnings("deprecation")
public void onInvseeCommand(Player admin, String playerName) {
final Player player = Bukkit.getPlayerExact(playerName);
if (player == null) {
admin.sendMessage("Player not found");
return;
}
final Inventory pl_inv = player.getInventory();
final Inventory inv = Bukkit.createInventory(
admin, 36, playerName + "'s Inventory");
for (int slot = 0; slot < 36; slot++) {
final ItemStack it = pl_inv.getItem(slot);
inv.setItem(slot, it);
}
admin.openInventory(inv);
admin.updateInventory();
}
// Fix boats
@BahHumbug(opt="prevent_self_boat_break", def="true")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onPreventLandBoats(VehicleDestroyEvent event) {
if (!config_.get("prevent_land_boats").getBool()) {
return;
}
final Vehicle vehicle = event.getVehicle();
if (vehicle == null || !(vehicle instanceof Boat)) {
return;
}
final Entity passenger = vehicle.getPassenger();
if (passenger == null || !(passenger instanceof Player)) {
return;
}
final Entity attacker = event.getAttacker();
if (attacker == null) {
return;
}
if (!attacker.getUniqueId().equals(passenger.getUniqueId())) {
return;
}
final Player player = (Player)passenger;
Humbug.info(String.format(
"Player '%s' kicked for self damaging boat at %s",
player.getName(), vehicle.getLocation().toString()));
vehicle.eject();
vehicle.getWorld().dropItem(vehicle.getLocation(), new ItemStack(Material.BOAT));
vehicle.remove();
((Player)passenger).kickPlayer("Nope");
}
@BahHumbug(opt="prevent_land_boats", def="true")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onPreventLandBoats(VehicleMoveEvent event) {
if (!config_.get("prevent_land_boats").getBool()) {
return;
}
final Vehicle vehicle = event.getVehicle();
if (vehicle == null || !(vehicle instanceof Boat)) {
return;
}
final Entity passenger = vehicle.getPassenger();
if (passenger == null || !(passenger instanceof Player)) {
return;
}
final Location to = event.getTo();
final Material boatOn = to.getBlock().getRelative(BlockFace.DOWN).getType();
if (boatOn.equals(Material.STATIONARY_WATER) || boatOn.equals(Material.WATER)) {
return;
}
Humbug.info(String.format(
"Player '%s' removed from land-boat at %s",
((Player)passenger).getName(), to.toString()));
vehicle.eject();
vehicle.getWorld().dropItem(vehicle.getLocation(), new ItemStack(Material.BOAT));
vehicle.remove();
}
// Fix minecarts
public boolean checkForTeleportSpace(Location loc) {
final Block block = loc.getBlock();
final Material mat = block.getType();
if (mat.isSolid()) {
return false;
}
final Block above = block.getRelative(BlockFace.UP);
if (above.getType().isSolid()) {
return false;
}
return true;
}
public boolean tryToTeleport(Player player, Location location, String reason) {
Location loc = location.clone();
loc.setX(Math.floor(loc.getX()) + 0.500000D);
loc.setY(Math.floor(loc.getY()) + 0.02D);
loc.setZ(Math.floor(loc.getZ()) + 0.500000D);
final Location baseLoc = loc.clone();
final World world = baseLoc.getWorld();
// Check if teleportation here is viable
boolean performTeleport = checkForTeleportSpace(loc);
if (!performTeleport) {
loc.setY(loc.getY() + 1.000000D);
performTeleport = checkForTeleportSpace(loc);
}
if (performTeleport) {
player.setVelocity(new Vector());
player.teleport(loc);
Humbug.info(String.format(
"Player '%s' %s: Teleported to %s",
player.getName(), reason, loc.toString()));
return true;
}
loc = baseLoc.clone();
// Create a sliding window of block types and track how many of those
// are solid. Keep fetching the block below the current block to move down.
int air_count = 0;
LinkedList<Material> air_window = new LinkedList<Material>();
loc.setY((float)world.getMaxHeight() - 2);
Block block = world.getBlockAt(loc);
for (int i = 0; i < 4; ++i) {
Material block_mat = block.getType();
if (!block_mat.isSolid()) {
++air_count;
}
air_window.addLast(block_mat);
block = block.getRelative(BlockFace.DOWN);
}
// Now that the window is prepared, scan down the Y-axis.
while (block.getY() >= 1) {
Material block_mat = block.getType();
if (block_mat.isSolid()) {
if (air_count == 4) {
player.setVelocity(new Vector());
loc = block.getLocation();
loc.setX(Math.floor(loc.getX()) + 0.500000D);
loc.setY(loc.getY() + 1.02D);
loc.setZ(Math.floor(loc.getZ()) + 0.500000D);
player.teleport(loc);
Humbug.info(String.format(
"Player '%s' %s: Teleported to %s",
player.getName(), reason, loc.toString()));
return true;
}
} else { // !block_mat.isSolid()
++air_count;
}
air_window.addLast(block_mat);
if (!air_window.removeFirst().isSolid()) {
--air_count;
}
block = block.getRelative(BlockFace.DOWN);
}
return false;
}
@BahHumbug(opt="prevent_ender_pearl_save", def="true")
@EventHandler
public void enderPearlSave(EnderPearlUnloadEvent event) {
if(!config_.get("prevent_ender_pearl_save").getBool())
return;
event.setCancelled(true);
}
@BahHumbug(opt="fix_vehicle_logout_bug", def="true")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled=true)
public void onDisallowVehicleLogout(PlayerQuitEvent event) {
if (!config_.get("fix_vehicle_logout_bug").getBool()) {
return;
}
kickPlayerFromVehicle(event.getPlayer());
}
public void kickPlayerFromVehicle(Player player) {
Entity vehicle = player.getVehicle();
if (vehicle == null
|| !(vehicle instanceof Minecart || vehicle instanceof Horse || vehicle instanceof Arrow)) {
return;
}
Location vehicleLoc = vehicle.getLocation();
// Vehicle data has been cached, now safe to kick the player out
player.leaveVehicle();
if (!tryToTeleport(player, vehicleLoc, "logged out")) {
player.setHealth(0.000000D);
Humbug.info(String.format(
"Player '%s' logged out in vehicle: Killed", player.getName()));
}
}
@BahHumbug(opt="fix_minecart_reenter_bug", def="true")
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onFixMinecartReenterBug(VehicleExitEvent event) {
if (!config_.get("fix_minecart_reenter_bug").getBool()) {
return;
}
final Vehicle vehicle = event.getVehicle();
if (vehicle == null || !(vehicle instanceof Minecart)) {
return;
}
final Entity passengerEntity = event.getExited();
if (passengerEntity == null || !(passengerEntity instanceof Player)) {
return;
}
// Must delay the teleport 2 ticks or else the player's mis-managed
// movement still occurs. With 1 tick it could still occur.
final Player player = (Player)passengerEntity;
final Location vehicleLoc = vehicle.getLocation();
Bukkit.getScheduler().runTaskLater(this, new Runnable() {
@Override
public void run() {
if (!tryToTeleport(player, vehicleLoc, "exiting vehicle")) {
player.setHealth(0.000000D);
Humbug.info(String.format(
"Player '%s' exiting vehicle: Killed", player.getName()));
}
}
}, 2L);
}
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onFixMinecartReenterBug(VehicleDestroyEvent event) {
if (!config_.get("fix_minecart_reenter_bug").getBool()) {
return;
}
final Vehicle vehicle = event.getVehicle();
if (vehicle == null || !(vehicle instanceof Minecart || vehicle instanceof Horse)) {
return;
}
final Entity passengerEntity = vehicle.getPassenger();
if (passengerEntity == null || !(passengerEntity instanceof Player)) {
return;
}
// Must delay the teleport 2 ticks or else the player's mis-managed
// movement still occurs. With 1 tick it could still occur.
final Player player = (Player)passengerEntity;
final Location vehicleLoc = vehicle.getLocation();
Bukkit.getScheduler().runTaskLater(this, new Runnable() {
@Override
public void run() {
if (!tryToTeleport(player, vehicleLoc, "in destroyed vehicle")) {
player.setHealth(0.000000D);
Humbug.info(String.format(
"Player '%s' in destroyed vehicle: Killed", player.getName()));
}
}
}, 2L);
}
// Adjust ender pearl gravity
public final static int pearlId = 368;
public final static MinecraftKey pearlKey = new MinecraftKey("ender_pearl");
@SuppressWarnings({ "rawtypes", "unchecked" })
@BahHumbug(opt="ender_pearl_gravity", type=OptType.Double, def="0.060000")
public void hookEnderPearls() {
try {
// They thought they could stop us by preventing us from registering an
// item. We'll show them
Field idRegistryField = Item.REGISTRY.getClass().getDeclaredField("a");
idRegistryField.setAccessible(true);
Object idRegistry = idRegistryField.get(Item.REGISTRY);
Field idRegistryMapField = idRegistry.getClass().getDeclaredField("a");
idRegistryMapField.setAccessible(true);
Object idRegistryMap = idRegistryMapField.get(idRegistry);
Field idRegistryItemsField = idRegistry.getClass().getDeclaredField("b");
idRegistryItemsField.setAccessible(true);
Object idRegistryItemList = idRegistryItemsField.get(idRegistry);
// Remove ItemEnderPearl from the ID Registry
Item idItem = null;
Iterator<Item> itemListIter = ((List<Item>)idRegistryItemList).iterator();
while (itemListIter.hasNext()) {
idItem = itemListIter.next();
if (idItem == null) {
continue;
}
if (!(idItem instanceof ItemEnderPearl)) {
continue;
}
itemListIter.remove();
break;
}
if (idItem != null) {
((Map<Item, Integer>)idRegistryMap).remove(idItem);
}
// Register our custom pearl Item.
Item.REGISTRY.a(pearlId, pearlKey, new CustomNMSItemEnderPearl(config_));
// Setup the custom entity
Field fieldStringToClass = EntityTypes.class.getDeclaredField("c");
Field fieldClassToString = EntityTypes.class.getDeclaredField("d");
fieldStringToClass.setAccessible(true);
fieldClassToString.setAccessible(true);
Field fieldClassToId = EntityTypes.class.getDeclaredField("f");
Field fieldStringToId = EntityTypes.class.getDeclaredField("g");
fieldClassToId.setAccessible(true);
fieldStringToId.setAccessible(true);
Map mapStringToClass = (Map)fieldStringToClass.get(null);
Map mapClassToString = (Map)fieldClassToString.get(null);
Map mapClassToId = (Map)fieldClassToId.get(null);
Map mapStringToId = (Map)fieldStringToId.get(null);
mapStringToClass.put("ThrownEnderpearl",CustomNMSEntityEnderPearl.class);
mapStringToId.put("ThrownEnderpearl", Integer.valueOf(14));
mapClassToString.put(CustomNMSEntityEnderPearl.class, "ThrownEnderpearl");
mapClassToId.put(CustomNMSEntityEnderPearl.class, Integer.valueOf(14));
} catch (Exception e) {
Humbug.severe("Exception while overriding MC's ender pearl class");
e.printStackTrace();
}
}
// Hunger Changes
// Keep track if the player just ate.
private Map<Player, Double> playerLastEat_ = new HashMap<Player, Double>();
@BahHumbug(opt="saturation_multiplier", type=OptType.Double, def="0.0")
@EventHandler
public void setSaturationOnFoodEat(PlayerItemConsumeEvent event) {
// Each food sets a different saturation.
final Player player = event.getPlayer();
ItemStack item = event.getItem();
Material mat = item.getType();
double multiplier = config_.get("saturation_multiplier").getDouble();
if (multiplier <= 0.000001 && multiplier >= -0.000001) {
return;
}
switch(mat) {
case APPLE:
playerLastEat_.put(player, multiplier*2.4);
case BAKED_POTATO:
playerLastEat_.put(player, multiplier*7.2);
case BREAD:
playerLastEat_.put(player, multiplier*6);
case CAKE:
playerLastEat_.put(player, multiplier*0.4);
case CARROT_ITEM:
playerLastEat_.put(player, multiplier*4.8);
case COOKED_FISH:
playerLastEat_.put(player, multiplier*6);
case GRILLED_PORK:
playerLastEat_.put(player, multiplier*12.8);
case COOKIE:
playerLastEat_.put(player, multiplier*0.4);
case GOLDEN_APPLE:
playerLastEat_.put(player, multiplier*9.6);
case GOLDEN_CARROT:
playerLastEat_.put(player, multiplier*14.4);
case MELON:
playerLastEat_.put(player, multiplier*1.2);
case MUSHROOM_SOUP:
playerLastEat_.put(player, multiplier*7.2);
case POISONOUS_POTATO:
playerLastEat_.put(player, multiplier*1.2);
case POTATO:
playerLastEat_.put(player, multiplier*0.6);
case RAW_FISH:
playerLastEat_.put(player, multiplier*1);
case PUMPKIN_PIE:
playerLastEat_.put(player, multiplier*4.8);
case RAW_BEEF:
playerLastEat_.put(player, multiplier*1.8);
case RAW_CHICKEN:
playerLastEat_.put(player, multiplier*1.2);
case PORK:
playerLastEat_.put(player, multiplier*1.8);
case ROTTEN_FLESH:
playerLastEat_.put(player, multiplier*0.8);
case SPIDER_EYE:
playerLastEat_.put(player, multiplier*3.2);
case COOKED_BEEF:
playerLastEat_.put(player, multiplier*12.8);
default:
playerLastEat_.put(player, multiplier);
Bukkit.getServer().getScheduler().runTaskLater(this, new Runnable() {
// In case the player ingested a potion, this removes the
// saturation from the list. Unsure if I have every item
// listed. There is always the other cases of like food
// that shares same id
@Override
public void run() {
playerLastEat_.remove(player);
}
}, 80);
}
}
@BahHumbug(opt="hunger_slowdown", type=OptType.Double, def="0.0")
@EventHandler
public void onFoodLevelChange(FoodLevelChangeEvent event) {
final Player player = (Player) event.getEntity();
final double mod = config_.get("hunger_slowdown").getDouble();
Double saturation;
if (playerLastEat_.containsKey(player)) { // if the player just ate
saturation = playerLastEat_.get(player);
if (saturation == null) {
saturation = ((Float)player.getSaturation()).doubleValue();
}
} else {
saturation = Math.min(
player.getSaturation() + mod,
20.0D + (mod * 2.0D));
}
player.setSaturation(saturation.floatValue());
}
//Remove Book Copying
@BahHumbug(opt="copy_book_enable", def= "false")
public void removeBooks() {
if (config_.get("copy_book_enable").getBool()) {
return;
}
Iterator<Recipe> it = getServer().recipeIterator();
while (it.hasNext()) {
Recipe recipe = it.next();
ItemStack resulting_item = recipe.getResult();
if ( // !copy_book_enable_ &&
isWrittenBook(resulting_item)) {
it.remove();
info("Copying Books disabled");
}
}
}
public boolean isWrittenBook(ItemStack item) {
if (item == null) {
return false;
}
Material material = item.getType();
return material.equals(Material.WRITTEN_BOOK);
}
// Prevent tree growth wrap-around
@BahHumbug(opt="prevent_tree_wraparound", def="true")
@EventHandler(priority=EventPriority.LOWEST, ignoreCancelled = true)
public void onStructureGrowEvent(StructureGrowEvent event) {
if (!config_.get("prevent_tree_wraparound").getBool()) {
return;
}
int maxY = 0, minY = 257;
for (BlockState bs : event.getBlocks()) {
final int y = bs.getLocation().getBlockY();
maxY = Math.max(maxY, y);
minY = Math.min(minY, y);
}
if (maxY - minY > 240) {
event.setCancelled(true);
final Location loc = event.getLocation();
info(String.format("Prevented structure wrap-around at %d, %d, %d",
loc.getBlockX(), loc.getBlockY(), loc.getBlockZ()));
}
}
// General
public void onLoad()
{
loadConfiguration();
hookEnderPearls();
info("Loaded");
}
public void onEnable() {
registerEvents();
registerCommands();
removeRecipies();
removeBooks();
global_instance_ = this;
info("Enabled");
}
public void onDisable() {
if (config_.get("fix_vehicle_logout_bug").getBool()) {
for (World world: getServer().getWorlds()) {
for (Player player: world.getPlayers()) {
kickPlayerFromVehicle(player);
}
}
}
}
public boolean isInitiaized() {
return global_instance_ != null;
}
public boolean toBool(String value) {
if (value.equals("1") || value.equalsIgnoreCase("true")) {
return true;
}
return false;
}
public int toInt(String value, int default_value) {
try {
return Integer.parseInt(value);
} catch(Exception e) {
return default_value;
}
}
public double toDouble(String value, double default_value) {
try {
return Double.parseDouble(value);
} catch(Exception e) {
return default_value;
}
}
public int toMaterialId(String value, int default_value) {
try {
return Integer.parseInt(value);
} catch(Exception e) {
Material mat = Material.matchMaterial(value);
if (mat != null) {
return mat.getId();
}
}
return default_value;
}
public boolean onCommand(
CommandSender sender,
Command command,
String label,
String[] args) {
if (sender instanceof Player && command.getName().equals("invsee")) {
if (args.length < 1) {
sender.sendMessage("Provide a name");
return false;
}
onInvseeCommand((Player)sender, args[0]);
return true;
}
if (sender instanceof Player
&& command.getName().equals("introbook")) {
if (!config_.get("drop_newbie_book").getBool()) {
return true;
}
Player sendBookTo = (Player)sender;
if (args.length >= 1) {
Player possible = Bukkit.getPlayerExact(args[0]);
if (possible != null) {
sendBookTo = possible;
}
}
giveN00bBook(sendBookTo);
return true;
}
if (sender instanceof Player
&& command.getName().equals("bahhumbug")) {
giveHolidayPackage((Player)sender);
return true;
}
if (!(sender instanceof ConsoleCommandSender) ||
!command.getName().equals("humbug") ||
args.length < 1) {
return false;
}
String option = args[0];
String value = null;
String subvalue = null;
boolean set = false;
boolean subvalue_set = false;
String msg = "";
if (args.length > 1) {
value = args[1];
set = true;
}
if (args.length > 2) {
subvalue = args[2];
subvalue_set = true;
}
ConfigOption opt = config_.get(option);
if (opt != null) {
if (set) {
opt.set(value);
}
msg = String.format("%s = %s", option, opt.getString());
} else if (option.equals("debug")) {
if (set) {
config_.setDebug(toBool(value));
}
msg = String.format("debug = %s", config_.getDebug());
} else if (option.equals("loot_multiplier")) {
String entity_type = "generic";
if (set && subvalue_set) {
entity_type = value;
value = subvalue;
}
if (set) {
config_.setLootMultiplier(
entity_type, toInt(value, config_.getLootMultiplier(entity_type)));
}
msg = String.format(
"loot_multiplier(%s) = %d", entity_type, config_.getLootMultiplier(entity_type));
} else if (option.equals("remove_mob_drops")) {
if (set && subvalue_set) {
if (value.equals("add")) {
config_.addRemoveItemDrop(toMaterialId(subvalue, -1));
} else if (value.equals("del")) {
config_.removeRemoveItemDrop(toMaterialId(subvalue, -1));
}
}
msg = String.format("remove_mob_drops = %s", config_.toDisplayRemoveItemDrops());
} else if (option.equals("save")) {
config_.save();
msg = "Configuration saved";
} else if (option.equals("reload")) {
config_.reload();
msg = "Configuration loaded";
} else {
msg = String.format("Unknown option %s", option);
}
sender.sendMessage(msg);
return true;
}
public void registerCommands() {
ConsoleCommandSender console = getServer().getConsoleSender();
console.addAttachment(this, "humbug.console", true);
}
private void registerEvents() {
getServer().getPluginManager().registerEvents(this, this);
}
private void loadConfiguration() {
config_ = Config.initialize(this);
}
}
|
package com.github.bednar.security.api;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.ws.rs.Consumes;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.github.bednar.base.api.ApiResource;
import com.github.bednar.base.event.Dispatcher;
import com.github.bednar.security.event.AuthenticateViaFormEvent;
import com.github.bednar.security.event.IsAuthenticatedEvent;
import com.github.bednar.security.event.UnAuthenticateEvent;
import com.wordnik.swagger.annotations.Api;
import com.wordnik.swagger.annotations.ApiOperation;
import com.wordnik.swagger.annotations.ApiParam;
import com.wordnik.swagger.annotations.ApiResponse;
import com.wordnik.swagger.annotations.ApiResponses;
import org.jboss.resteasy.annotations.Suspend;
import org.jboss.resteasy.spi.AsynchronousResponse;
@Path("security")
@Consumes("application/json")
@Produces("application/json")
@Api(
value = "Authentication",
description = "API for authentication subject. Supported authentication method: HTML Form.",
position = 10)
public class Security implements ApiResource
{
@Inject
private Dispatcher dispatcher;
@POST
@Path("authenticateViaForm")
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@ApiOperation(
position = 1,
value = "Simple Web-Authentication via HTML Form. After success authentication " +
"return HTTP 200 OK response if authentication not finish success, " +
"than return HTTP 401 UNAUTHORIZED.")
@ApiResponses({
@ApiResponse(code = 200, message = "{}"),
@ApiResponse(code = 401, message = "{}")})
public void authenticateViaForm(@Nonnull @Suspend final AsynchronousResponse response,
@Nullable @FormParam("username")
@ApiParam(name = "username", value = "Username of Subject (Form param)", required = true)
final String username,
@Nullable @FormParam("password")
@ApiParam(name = "password", value = "Password of Subject (Form param)", required = true)
final String password)
{
dispatcher.publish(new AuthenticateViaFormEvent(username, password)
{
@Override
public void success(@Nonnull final Boolean authentiacted)
{
Response.ResponseBuilder builder;
if (authentiacted)
{
builder = Response.ok();
}
else
{
builder = Response.status(Response.Status.UNAUTHORIZED);
}
response.setResponse(builder.entity("{}").build());
}
});
}
@GET
@Path("unAuthenticate")
@ApiOperation(
position = 2,
value = "If actual session is authenticated, than invalidate session and unauthenticated it." +
" Always return HTTP 200 OK.")
@ApiResponse(code = 200, message = "{}")
public void unAuthenticate(@Nonnull @Suspend final AsynchronousResponse response)
{
dispatcher.publish(new UnAuthenticateEvent()
{
@Override
public void success(@Nonnull final Void value)
{
response.setResponse(Response.ok("{}").build());
}
});
}
@GET
@Path("isAuthenticated")
@ApiOperation(
position = 3,
value = "Return HTTP 200 OK response if actual session is authenticated or HTTP 401 UNAUTHORIZED if not.")
@ApiResponses({
@ApiResponse(code = 200, message = "{}"),
@ApiResponse(code = 401, message = "{}")})
public void isAuthenticated(@Nonnull @Suspend final AsynchronousResponse response)
{
dispatcher.publish(new IsAuthenticatedEvent()
{
@Override
public void success(@Nonnull final Boolean isAuthenticated)
{
Response.ResponseBuilder builder;
if (isAuthenticated)
{
builder = Response.ok();
}
else
{
builder = Response.status(Response.Status.UNAUTHORIZED);
}
response.setResponse(builder.entity("{}").build());
}
});
}
}
|
package com.github.ferstl.depgraph;
import java.util.Collection;
import java.util.List;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.MavenProject;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilderException;
import org.apache.maven.shared.dependency.graph.DependencyNode;
import com.github.ferstl.depgraph.dot.DotBuilder;
@Mojo(
name = "graph",
aggregator = true,
defaultPhase = LifecyclePhase.NONE,
inheritByDefault = false,
requiresDependencyCollection = ResolutionScope.TEST,
requiresDirectInvocation = true,
threadSafe = true)
public class DepGraphMojo extends AbstractMojo {
@Parameter(
alias = "gropIdClusters",
property = "groupIdClusters",
readonly = true,
required = false)
private List<String> groupIdClusters;
@Component
private MavenProject project;
@Component( hint = "default" )
private DependencyGraphBuilder dependencyGraphBuilder;
@Override
public void execute() throws MojoExecutionException {
try {
DotBuilder dotBuilder = new DotBuilder(ArtifactIdRenderer.VERSIONLESS_ID, ArtifactIdRenderer.ARTIFACT_ID);
@SuppressWarnings("unchecked")
List<MavenProject> collectedProjects = this.project.getCollectedProjects();
buildModuleTree(collectedProjects, dotBuilder);
for (MavenProject collectedProject : collectedProjects) {
DependencyNode root = this.dependencyGraphBuilder.buildDependencyGraph(collectedProject, null);
DotBuildingVisitor visitor = new DotBuildingVisitor(dotBuilder);
root.accept(visitor);
}
System.err.println(dotBuilder);
} catch (DependencyGraphBuilderException e) {
throw new MojoExecutionException("boom");
}
}
public void buildModuleTree(Collection<MavenProject> collectedProjects, DotBuilder dotBuilder) {
System.err.println("Project: " + this.project + ", Parent: " + this.project.getParent());
for (MavenProject collectedProject : collectedProjects) {
MavenProject child = collectedProject;
MavenProject parent = collectedProject.getParent();
while (parent != null) {
ArtifactNode parentNode = new ArtifactNode(parent.getArtifact());
ArtifactNode childNode = new ArtifactNode(child.getArtifact());
dotBuilder.addEdge(parentNode, childNode);
child = parent;
parent = parent.getParent();
}
}
}
}
|
package edu.chl.proton.model;
import javafx.scene.text.Text;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.*;
public class Document {
// Lika = document
private Cursor cursor;
private File file;
private List<String> lines = new ArrayList<String>();
private List<Parts> parts = new ArrayList<Parts>();
DocTypeInterface docType;
public Document(DocTypeInterface type){
this.docType = type;
}
public Document(String name){
String extension = name.substring(name.lastIndexOf(".") + 1, name.length());
}
protected Cursor getCursor(){
return this.cursor;
}
protected void setCursor(Cursor cursor){
this.cursor = cursor;
}
protected File getFile(){
return this.file;
}
protected void setFile(File file){
this.file = file;
}
protected void addFile(String path){
file.setPath(path);
// setFile(rootFolder.getFileFromPath(path)); ???
}
protected void addParts(Parts parts){
this.parts.add(parts);
}
protected void removeParts(int index){
parts.remove(index);
}
protected void removeAllParts(){
parts.clear();
}
protected void addLines(String lines){
this.lines.add(lines);
}
protected void removeLines(int index){
lines.remove(index);
}
protected void removeAllLines(){
lines.clear();
}
protected void insertChar(char ch){
int row = cursor.getPosition().getY();
int col = cursor.getPosition().getX();
// check if Enter was the key pressed
if(ch == '\r'){
cursor.setPosition(row + 1, col);
} else {
String tmp = lines.get(row);
StringBuilder str = new StringBuilder(tmp);
str.insert(col, ch);
lines.set(row, tmp);
cursor.setPosition(row, col + 1);
}
}
public List<Text> getText(){
return docType.getText();
}
protected void setText(List<String> text){
docType.setText();
}
protected void save(List<String> lines) throws IOException{
file.save(lines);
}
protected void remove(){
file.remove();
}
protected boolean isSaved(){
return file.isSaved();
}
// Aqcuires the text from the file we opened.
protected void aqcuireText(){
// This will reference one line at a time
String line = null;
try {
// FileReader reads text files in the default encoding.
FileReader fileReader =
new FileReader(file.getName());
// Always wrap FileReader in BufferedReader.
BufferedReader bufferedReader =
new BufferedReader(fileReader);
while((line = bufferedReader.readLine()) != null) {
lines.add(line);
}
// Close file.
bufferedReader.close();
}
catch(FileNotFoundException ex) {
System.out.println(
"Unable to open file '" +
file.getName() + "'");
}
catch(IOException ex) {
System.out.println(
"Error reading file '"
+ file.getName() + "'");
}
}
}
|
package com.github.rschmitt.collider;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.stream.Collector;
import java.util.stream.Stream;
import javax.annotation.concurrent.Immutable;
import clojure.lang.Associative;
import clojure.lang.IEditableCollection;
import clojure.lang.IPersistentMap;
import clojure.lang.ITransientCollection;
import clojure.lang.ITransientMap;
import clojure.lang.PersistentHashMap;
import clojure.lang.RT;
import static java.util.stream.Collector.Characteristics.UNORDERED;
/**
* A generic persistent immutable Map implementation, with three types of methods:
* <ol>
* <li>Read methods from {@link Map}, such as {@link #get}</li>
* <li>Write methods from Map, such as {@link #put}; these will throw {@link
* UnsupportedOperationException}</li> and have been marked as {@code @Deprecated}
* <li>Persistent "modification" methods, such as {@link #assoc}; these will efficiently create
* modified copies of the current map</li>
* </ol>
*/
@Immutable
public class ClojureMap<K, V> implements Map<K, V> {
private final Map<K, V> delegate;
@SuppressWarnings("unchecked")
static <K, V> ClojureMap<K, V> create(Object... init) {
return (ClojureMap<K, V>) create(PersistentHashMap.create(init));
}
@SuppressWarnings("unchecked")
private static <K, V> ClojureMap<K, V> create(Map<K, V> ts) {
return new ClojureMap<>(ts);
}
@SuppressWarnings("unchecked")
static <K, V> ClojureMap<K, V> wrap(IPersistentMap map) {
return create((Map<K, V>) map);
}
@SuppressWarnings("unchecked")
protected ClojureMap(Object delegate) {
this.delegate = (Map<K, V>) delegate;
}
/**
* Returns a copy of this map which also contains a mapping from {@code key} to {@code value}.
* If a mapping for {@code key} already exists in the current map, it will be overwritten.
*/
@SuppressWarnings("unchecked")
public ClojureMap<K, V> assoc(K key, V value) {
Associative assoc = RT.assoc(delegate, key, value);
return ClojureMap.create((Map<K, V>) assoc);
}
/**
* Returns a copy of this map without a mapping for {@code key}.
*/
@SuppressWarnings("unchecked")
public ClojureMap<K, V> dissoc(K key) {
Object dissoc = RT.dissoc(delegate, key);
return ClojureMap.create((Map<K, V>) dissoc);
}
/**
* Returns a map that consists of all bindings from the current map, as well as {@code maps}.
* If a mapping occurs in more than one map, the mapping in the rightmost map will take
* precedence.
*/
@SafeVarargs
public final ClojureMap<K, V> merge(ClojureMap<K, V>... maps) {
if (maps.length == 0) return this;
if (Stream.of(maps).allMatch(Map::isEmpty)) return this;
if (isEmpty() && maps.length == 1) return maps[0];
TransientMap<K, V> ret = asTransient();
for (ClojureMap<K, V> map : maps) {
for (Entry<K, V> entry : map.entrySet()) {
ret.put(entry.getKey(), entry.getValue());
}
}
return ret.toPersistent();
}
/**
* Returns a mutable copy of this map.
*/
public Map<K, V> toMutableMap() {
return new HashMap<>(this);
}
/**
* Returns a transient version of this map in constant time.
*/
public TransientMap<K, V> asTransient() {
IEditableCollection asEditable = (IEditableCollection) delegate;
ITransientCollection asTransient = asEditable.asTransient();
return new TransientMap<>((ITransientMap) asTransient);
}
/**
* Maps {@code f} over the keys in this map, returning a new map containing the result. If
* {@code f} produces collisions, the result is undefined.
*/
public <R> ClojureMap<R, V> mapKeys(Function<? super K, ? extends R> f) {
return entrySet().stream().collect(toClojureMap(e -> f.apply(e.getKey()), Entry::getValue));
}
/**
* Maps {@code f} over the values in this map, returning a new map containing the result.
*/
public <R> ClojureMap<K, R> mapValues(Function<? super V, ? extends R> f) {
return entrySet().stream().collect(toClojureMap(Entry::getKey, e -> f.apply(e.getValue())));
}
/**
* Returns a new map containing only the mappings whose keys match {@code p}.
*/
public ClojureMap<K, V> filterKeys(Predicate<? super K> p) {
return entrySet().stream().filter(e -> p.test(e.getKey())).collect(toClojureMap(Entry::getKey, Entry::getValue));
}
/**
* Returns a new map containing only the mappings whose values match {@code p}.
*/
public ClojureMap<K, V> filterValues(Predicate<? super V> p) {
return entrySet().stream().filter(e -> p.test(e.getValue())).collect(toClojureMap(Entry::getKey, Entry::getValue));
}
/**
* Returns a new map containing none of the mappings whose keys match {@code p}.
*/
public ClojureMap<K, V> excludeKeys(Predicate<? super K> p) {
return filterKeys(p.negate());
}
/**
* Returns a new map containing none of the mappings whose values match {@code p}.
*/
public ClojureMap<K, V> excludeValues(Predicate<? super V> p) {
return filterValues(p.negate());
}
/**
* Returns a {@link Collector} that accumulates values into a TransientMap, returning a
* ClojureMap upon completion. If multiple mappings are produced for the same key, the last
* mapping produced will be the one in the returned map.
*
* @param keyMapper a function from the input type to keys
* @param valueMapper a function from the input type to values
* @param <T> the type of the input element in the stream
* @param <K> the key type for the map that will be returned
* @param <V> the value type for the map that will be returned
*/
public static <T, K, V> Collector<T, TransientMap<K, V>, ClojureMap<K, V>> toClojureMap(
Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends V> valueMapper
) {
return new Collector<T, TransientMap<K, V>, ClojureMap<K, V>>() {
@Override
public Supplier<TransientMap<K, V>> supplier() {
return TransientMap::new;
}
@Override
public BiConsumer<TransientMap<K, V>, T> accumulator() {
return (map, t) -> map.put(keyMapper.apply(t), valueMapper.apply(t));
}
@Override
public BinaryOperator<TransientMap<K, V>> combiner() {
return (x, y) -> {
x.putAll(y.toPersistent());
return x;
};
}
@Override
public Function<TransientMap<K, V>, ClojureMap<K, V>> finisher() {
return TransientMap::toPersistent;
}
@Override
public Set<Characteristics> characteristics() {
return EnumSet.of(UNORDERED);
}
};
}
/**
* Returns a {@link Collector} that accumulates values into a TransientMap, returning a
* ClojureMap upon completion. If multiple mappings are produced for the same key, the {@code
* mergeFunction} will be invoked to determine a value.
*
* @param keyMapper a function from the input type to keys
* @param valueMapper a function from the input type to values
* @param mergeFunction a function used to resolve collisions between values associated with
* the
* same key
* @param <T> the type of the input element in the stream
* @param <K> the key type for the map that will be returned
* @param <V> the value type for the map that will be returned
*/
public static <T, K, V> Collector<T, TransientMap<K, V>, ClojureMap<K, V>> toStrictClojureMap(
Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends V> valueMapper,
BinaryOperator<V> mergeFunction
) {
return new Collector<T, TransientMap<K, V>, ClojureMap<K, V>>() {
@Override
public Supplier<TransientMap<K, V>> supplier() {
return TransientMap::new;
}
@Override
public BiConsumer<TransientMap<K, V>, T> accumulator() {
return (map, t) -> putUnique(map, keyMapper.apply(t), valueMapper.apply(t));
}
@Override
public BinaryOperator<TransientMap<K, V>> combiner() {
return (x, y) -> {
ClojureMap<K, V> source = y.toPersistent();
for (Entry<K, V> entry : source.entrySet()) {
putUnique(x, entry.getKey(), entry.getValue());
}
return x;
};
}
private void putUnique(TransientMap<K, V> map, K key, V value) {
if (map.contains(key)) {
value = mergeFunction.apply(value, map.get(key));
}
map.put(key, value);
}
@Override
public Function<TransientMap<K, V>, ClojureMap<K, V>> finisher() {
return TransientMap::toPersistent;
}
@Override
public Set<Characteristics> characteristics() {
return Collections.emptySet();
}
};
}
// Mindless delegation goes here
@Override
public int size() {
return delegate.size();
}
@Override
public boolean isEmpty() {
return delegate.isEmpty();
}
@Override
public boolean containsKey(Object key) {
return delegate.containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return delegate.containsValue(value);
}
@Override
public V get(Object key) {
return delegate.get(key);
}
@Override
public Set<K> keySet() {
return delegate.keySet();
}
@Override
public Collection<V> values() {
return delegate.values();
}
@Override
public Set<Entry<K, V>> entrySet() {
return delegate.entrySet();
}
@Override
public boolean equals(Object o) {
return delegate.equals(o);
}
@Override
public int hashCode() {
return delegate.hashCode();
}
@Override
public V getOrDefault(Object key, V defaultValue) {
return delegate.getOrDefault(key, defaultValue);
}
@Override
public void forEach(BiConsumer<? super K, ? super V> action) {
delegate.forEach(action);
}
/**
* @deprecated This operation will fail; use {@link #assoc} instead
*/
@Override
@Deprecated
public V put(K key, V value) {
return delegate.put(key, value);
}
/**
* @deprecated This operation will fail; use {@link #dissoc} instead
*/
@Override
@Deprecated
public V remove(Object key) {
return delegate.remove(key);
}
/**
* @deprecated This operation will fail; use {@link #merge(ClojureMap[])} instead
*/
@Override
@Deprecated
public void putAll(Map<? extends K, ? extends V> m) {
delegate.putAll(m);
}
/**
* @deprecated This operation will fail; use {@link Collider#clojureMap()} instead
*/
@Override
@Deprecated
public void clear() {
delegate.clear();
}
/**
* @deprecated This operation will fail; use {@link #mapValues} instead
*/
@Override
@Deprecated
public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {
delegate.replaceAll(function);
}
/**
* @deprecated This operation will fail.
*/
@Override
@Deprecated
public V putIfAbsent(K key, V value) {
return delegate.putIfAbsent(key, value);
}
/**
* @deprecated This operation will fail; use {@link #dissoc} instead
*/
@Override
@Deprecated
public boolean remove(Object key, Object value) {
return delegate.remove(key, value);
}
/**
* @deprecated This operation will fail; use {@link #assoc} instead
*/
@Override
@Deprecated
public boolean replace(K key, V oldValue, V newValue) {
return delegate.replace(key, oldValue, newValue);
}
/**
* @deprecated This operation will fail; use {@link #assoc} instead
*/
@Override
@Deprecated
public V replace(K key, V value) {
return delegate.replace(key, value);
}
/**
* @deprecated This operation will fail.
*/
@Override
@Deprecated
public V computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction) {
return delegate.computeIfAbsent(key, mappingFunction);
}
/**
* @deprecated This operation will fail.
*/
@Override
@Deprecated
public V computeIfPresent(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
return delegate.computeIfPresent(key, remappingFunction);
}
/**
* @deprecated This operation will fail.
*/
@Override
@Deprecated
public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
return delegate.compute(key, remappingFunction);
}
/**
* @deprecated This operation will fail.
*/
@Override
@Deprecated
public V merge(K key, V value, BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
return delegate.merge(key, value, remappingFunction);
}
}
|
package edu.uw.zookeeper;
import static com.google.common.base.Preconditions.checkArgument;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.ListeningScheduledExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.Service;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.common.util.concurrent.UncaughtExceptionHandlers;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import edu.uw.zookeeper.util.Application;
import edu.uw.zookeeper.util.Arguments;
import edu.uw.zookeeper.util.ConfigurableMain;
import edu.uw.zookeeper.util.Configuration;
import edu.uw.zookeeper.util.DefaultsFactory;
import edu.uw.zookeeper.util.EventBusPublisher;
import edu.uw.zookeeper.util.ExecutorServiceMonitor;
import edu.uw.zookeeper.util.Factories;
import edu.uw.zookeeper.util.Factory;
import edu.uw.zookeeper.util.ParameterizedFactory;
import edu.uw.zookeeper.util.Publisher;
import edu.uw.zookeeper.util.ServiceApplication;
import edu.uw.zookeeper.util.ServiceMonitor;
import edu.uw.zookeeper.util.Singleton;
import edu.uw.zookeeper.util.TimeValue;
public abstract class AbstractMain implements Application {
public static enum EventBusPublisherFactory implements Factory<Publisher> {
INSTANCE;
public static EventBusPublisherFactory getInstance() {
return INSTANCE;
}
@Override
public Publisher get() {
return new EventBusPublisher();
}
}
public static enum PlatformThreadFactory implements Factory<ThreadFactory> {
INSTANCE;
public static PlatformThreadFactory getInstance() {
return INSTANCE;
}
@Override
public ThreadFactory get() {
return MoreExecutors.platformThreadFactory();
}
}
public static enum SingleDaemonThreadScheduledExectorFactory implements DefaultsFactory<ThreadFactory, ScheduledExecutorService> {
INSTANCE;
public static SingleDaemonThreadScheduledExectorFactory getInstance() {
return INSTANCE;
}
private final String nameFormat = "scheduled-%d";
@Override
public ScheduledExecutorService get() {
return get(PlatformThreadFactory.getInstance().get());
}
@Override
public ScheduledExecutorService get(ThreadFactory threadFactory) {
ScheduledExecutorService instance = Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder()
.setThreadFactory(threadFactory)
.setDaemon(true)
.setNameFormat(nameFormat)
.build());
//MoreExecutors.addDelayedShutdownHook(instance, 100, TimeUnit.MILLISECONDS);
return instance;
}
}
public static enum ApplicationExecutorFactory implements DefaultsFactory<ThreadFactory, ExecutorService> {
INSTANCE;
public static ApplicationExecutorFactory getInstance() {
return INSTANCE;
}
// TODO: configurable
private final int CORE_SIZE = Math.max(1,
Runtime.getRuntime().availableProcessors() * 2);
private final String nameFormat = "main-pool-%d";
@Override
public ExecutorService get() {
return get(PlatformThreadFactory.getInstance().get());
}
public ExecutorService get(ThreadFactory threadFactory) {
return Executors.newFixedThreadPool(CORE_SIZE,
new ThreadFactoryBuilder()
.setThreadFactory(threadFactory)
.setNameFormat(nameFormat)
.build());
}
}
public static ListeningExecutorServiceFactory listeningExecutors(ServiceMonitor serviceMonitor) {
return ListeningExecutorServiceFactory.newInstance(
serviceMonitor,
ImmutableMap.<Class<? extends ExecutorService>, Factory<? extends ExecutorService>>of(
ScheduledExecutorService.class, SingleDaemonThreadScheduledExectorFactory.getInstance(),
ExecutorService.class, ApplicationExecutorFactory.getInstance()));
}
public static class ExecutorServiceFactory implements ParameterizedFactory<Class<? extends ExecutorService>, ExecutorService> {
protected class TypeView<T extends ExecutorService> implements Singleton<T> {
protected final Class<T> type;
public TypeView(Class<T> type) {
this.type = type;
}
@SuppressWarnings("unchecked")
@Override
public T get() {
return (T) ExecutorServiceFactory.this.get(type);
}
}
public static ExecutorServiceFactory newInstance(
ServiceMonitor serviceMonitor,
Map<Class<? extends ExecutorService>, Factory<? extends ExecutorService>> factories) {
return new ExecutorServiceFactory(serviceMonitor, factories);
}
protected final ServiceMonitor serviceMonitor;
protected final Factories.ByTypeFactory<ExecutorService> factory;
protected final Map<Class<? extends ExecutorService>, ExecutorServiceMonitor<?>> instances;
protected ExecutorServiceFactory(
ServiceMonitor serviceMonitor,
Map<Class<? extends ExecutorService>, Factory<? extends ExecutorService>> factories) {
checkArgument(! factories.isEmpty());
this.serviceMonitor = serviceMonitor;
this.factory = Factories.ByTypeFactory.newInstance(factories);
this.instances = Maps.newHashMap();
}
public Singleton<ExecutorService> asExecutorServiceFactory() {
return newView(ExecutorService.class);
}
public Singleton<ScheduledExecutorService> asScheduledExecutorServiceFactory() {
return newView(ScheduledExecutorService.class);
}
protected <T extends ExecutorService> TypeView<T> newView(Class<T> type) {
return new TypeView<T>(type);
}
@Override
public synchronized ExecutorService get(Class<? extends ExecutorService> type) {
ExecutorServiceMonitor<?> instance = instances.get(type);
if (instance == null) {
instance = newInstance(type);
instances.put(type, instance);
serviceMonitor.add(instance);
}
return instance.get();
}
protected ExecutorServiceMonitor<?> newInstance(Class<? extends ExecutorService> type) {
ExecutorService executor = factory.get(type);
checkArgument(executor != null);
ExecutorServiceMonitor<?> instance = ExecutorServiceMonitor.newInstance(executor);
return instance;
}
}
/**
* Adds Listening wrapper around executor factory, and maps both a listening
* and non-listening interface to the same backing Executor.
*/
public static class ListeningExecutorServiceFactory extends ExecutorServiceFactory {
public static ListeningExecutorServiceFactory newInstance(
ServiceMonitor serviceMonitor,
Map<Class<? extends ExecutorService>, Factory<? extends ExecutorService>> factories) {
return new ListeningExecutorServiceFactory(serviceMonitor, factories);
}
protected ListeningExecutorServiceFactory(
ServiceMonitor serviceMonitor,
Map<Class<? extends ExecutorService>, Factory<? extends ExecutorService>> factories) {
super(serviceMonitor, factories);
}
public Singleton<ListeningExecutorService> asListeningExecutorServiceFactory() {
return newView(ListeningExecutorService.class);
}
public Singleton<ListeningScheduledExecutorService> asListeningScheduledExecutorServiceFactory() {
return newView(ListeningScheduledExecutorService.class);
}
@Override
protected ExecutorServiceMonitor<?> newInstance(Class<? extends ExecutorService> type) {
// create executor
ExecutorService executor = factory.get(type);
if (executor == null) {
if (ListeningScheduledExecutorService.class.isAssignableFrom(type)) {
executor = factory.get(ScheduledExecutorService.class);
} else if (ListeningExecutorService.class.isAssignableFrom(type)) {
executor = factory.get(ExecutorService.class);
} else {
if (ScheduledExecutorService.class.isAssignableFrom(type)) {
executor = factory.get(ListeningScheduledExecutorService.class);
} else {
executor = factory.get(ListeningExecutorService.class);
}
}
}
checkArgument(executor != null);
// wrap with listening interface
if (! (executor instanceof ListeningExecutorService)) {
if (ScheduledExecutorService.class.isAssignableFrom(type)) {
executor = MoreExecutors.listeningDecorator((ScheduledExecutorService)executor);
} else {
executor = MoreExecutors.listeningDecorator((ExecutorService)executor);
}
}
// wrap with service
ExecutorServiceMonitor<?> instance = ExecutorServiceMonitor.newInstance(executor);
// add extra lookup
Class<? extends ExecutorService> extraType;
if (ScheduledExecutorService.class.isAssignableFrom(type)) {
if (ListeningScheduledExecutorService.class.isAssignableFrom(type)) {
extraType = ScheduledExecutorService.class;
} else {
extraType = ListeningScheduledExecutorService.class;
}
} else {
if (ListeningExecutorService.class.isAssignableFrom(type)) {
extraType = ExecutorService.class;
} else {
extraType = ListeningExecutorService.class;
}
}
assert(! type.equals(extraType));
checkArgument(! instances.containsKey(extraType));
instances.put(extraType, instance);
return instance;
}
}
public static MonitorServiceFactory monitors(ServiceMonitor serviceMonitor) {
return MonitorServiceFactory.newInstance(serviceMonitor);
}
public static class MonitorServiceFactory implements ParameterizedFactory<Service, Service> {
public static <T extends Service> MonitorServiceFactory newInstance(
ServiceMonitor serviceMonitor) {
return new MonitorServiceFactory(serviceMonitor);
}
protected final ServiceMonitor serviceMonitor;
protected MonitorServiceFactory(
ServiceMonitor serviceMonitor) {
this.serviceMonitor = serviceMonitor;
}
@Override
public Service get(Service value) {
serviceMonitor.add(value);
return value;
}
public <T extends Service> T apply(T value) {
get(value);
return value;
}
}
public static class ConfigurableServerAddressViewFactory implements DefaultsFactory<Configuration, ServerView.Address<?>> {
public static ConfigurableServerAddressViewFactory newInstance() {
return newInstance("");
}
public static ConfigurableServerAddressViewFactory newInstance(String configPath) {
return new ConfigurableServerAddressViewFactory(configPath);
}
public static final String ARG = "server";
public static final String CONFIG_KEY = "Server";
public static final String DEFAULT_ADDRESS = "";
public static final int DEFAULT_PORT = 2181;
private final String configPath;
protected ConfigurableServerAddressViewFactory(String configPath) {
this.configPath = configPath;
}
@Override
public ServerInetAddressView get() {
return ServerInetAddressView.newInstance(
DEFAULT_ADDRESS, DEFAULT_PORT);
}
@Override
public ServerView.Address<?> get(Configuration value) {
Arguments arguments = value.asArguments();
if (! arguments.has(ARG)) {
arguments.add(arguments.newOption(ARG, "Address"));
}
arguments.parse();
Map<String, Object> args = Maps.newHashMap();
if (arguments.hasValue(ARG)) {
args.put(CONFIG_KEY, arguments.getValue(ARG));
}
Config config = value.asConfig();
if (configPath.length() > 0 && config.hasPath(configPath)) {
config = config.getConfig(configPath);
} else {
config = ConfigFactory.empty();
}
if (! args.isEmpty()) {
config = ConfigValueFactory.fromMap(args).toConfig().withFallback(config);
}
if (config.hasPath(CONFIG_KEY)) {
String input = config.getString(CONFIG_KEY);
try {
return ServerAddressView.fromString(input);
} catch (ClassNotFoundException e) {
throw Throwables.propagate(e);
}
} else {
return get();
}
}
}
public static class ConfigurableEnsembleViewFactory implements DefaultsFactory<Configuration, EnsembleView> {
public static ConfigurableEnsembleViewFactory newInstance() {
return new ConfigurableEnsembleViewFactory("");
}
public static final String ARG = "ensemble";
public static final String CONFIG_KEY = "Ensemble";
public static final String DEFAULT_ADDRESS = "localhost";
public static final int DEFAULT_PORT = 2181;
private final String configPath;
protected ConfigurableEnsembleViewFactory(String configPath) {
this.configPath = configPath;
}
@Override
public EnsembleView get() {
return EnsembleView.of(
ServerQuorumView.newInstance(ServerInetAddressView.newInstance(
DEFAULT_ADDRESS, DEFAULT_PORT)));
}
@Override
public EnsembleView get(Configuration value) {
Arguments arguments = value.asArguments();
if (! arguments.has(ARG)) {
arguments.add(arguments.newOption(ARG, "Ensemble"));
}
arguments.parse();
Map<String, Object> args = Maps.newHashMap();
if (arguments.hasValue(ARG)) {
args.put(CONFIG_KEY, arguments.getValue(ARG));
}
Config config = value.asConfig();
if (configPath.length() > 0 && config.hasPath(configPath)) {
config = config.getConfig(configPath);
} else {
config = ConfigFactory.empty();
}
if (! args.isEmpty()) {
config = ConfigValueFactory.fromMap(args).toConfig().withFallback(config);
}
if (config.hasPath(CONFIG_KEY)) {
String input = config.getString(CONFIG_KEY);
try {
return EnsembleView.fromString(input);
} catch (ClassNotFoundException e) {
throw Throwables.propagate(e);
}
} else {
return get();
}
}
}
protected static final TimeValue DEFAULT_SHUTDOWN_TIMEOUT = TimeValue.create(30L, TimeUnit.SECONDS);
protected final Factory<Publisher> publisherFactory;
protected final Singleton<ServiceMonitor> serviceMonitor;
protected final Singleton<Configuration> configuration;
protected final ListeningExecutorServiceFactory executors;
protected final TimeValue shutdownTimeout;
protected AbstractMain(Configuration configuration) {
this(configuration, DEFAULT_SHUTDOWN_TIMEOUT);
}
protected AbstractMain(Configuration configuration, TimeValue shutdownTimeout) {
this.configuration = Factories.holderOf(configuration);
this.publisherFactory = EventBusPublisherFactory.getInstance();
this.serviceMonitor = Factories.holderOf(ServiceMonitor.newInstance());
this.executors = listeningExecutors(serviceMonitor.get());
this.shutdownTimeout = shutdownTimeout;
}
public Configuration configuration() {
return configuration.get();
}
public Factory<ThreadFactory> threadFactory() {
return PlatformThreadFactory.getInstance();
}
public ServiceMonitor serviceMonitor() {
return serviceMonitor.get();
}
public Factory<Publisher> publisherFactory() {
return publisherFactory;
}
public ListeningExecutorServiceFactory executors() {
return executors;
}
@Override
public void run() {
Thread.currentThread().setUncaughtExceptionHandler(UncaughtExceptionHandlers.systemExit());
Application application = application();
ConfigurableMain.exitIfHelpSet(configuration().asArguments());
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
shutdown();
}
});
application.run();
}
protected Application application() {
return ServiceApplication.newInstance(serviceMonitor());
}
public void shutdown() {
ListenableFuture<Service.State> future = serviceMonitor().stop();
try {
future.get(shutdownTimeout.value(), shutdownTimeout.unit());
} catch (Exception e) {}
// TODO: hacky
try {
Class<?> cls = Class.forName("org.apache.log4j.LogManager");
cls.getMethod("shutdown").invoke(null);
} catch (Exception e) {
}
}
}
|
package com.gocardless.services;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.gocardless.http.*;
import com.gocardless.resources.Creditor;
import com.google.common.collect.ImmutableMap;
import com.google.gson.reflect.TypeToken;
/**
* Service class for working with creditor resources.
*
* Each [payment](#core-endpoints-payments) taken through the API is linked to a "creditor", to whom
* the payment is then paid out. In most cases your organisation will have a single "creditor", but
* the API also supports collecting payments on behalf of others.
*
* Please get in touch if you wish to use this endpoint. Currently, for Anti Money Laundering
* reasons, any creditors you add must be directly related to your organisation.
*/
public class CreditorService {
private final HttpClient httpClient;
/**
* Constructor. Users of this library should have no need to call this - an instance
* of this class can be obtained by calling
{@link com.gocardless.GoCardlessClient#creditors() }.
*/
public CreditorService(HttpClient httpClient) {
this.httpClient = httpClient;
}
/**
* Creates a new creditor.
*/
public CreditorCreateRequest create() {
return new CreditorCreateRequest(httpClient);
}
/**
* Returns a [cursor-paginated](#api-usage-cursor-pagination) list of your creditors.
*/
public CreditorListRequest<ListResponse<Creditor>> list() {
return new CreditorListRequest<>(httpClient, ListRequest.<Creditor>pagingExecutor());
}
public CreditorListRequest<Iterable<Creditor>> all() {
return new CreditorListRequest<>(httpClient, ListRequest.<Creditor>iteratingExecutor());
}
/**
* Retrieves the details of an existing creditor.
*/
public CreditorGetRequest get(String identity) {
return new CreditorGetRequest(httpClient, identity);
}
/**
* Updates a creditor object. Supports all of the fields supported when creating a creditor.
*/
public CreditorUpdateRequest update(String identity) {
return new CreditorUpdateRequest(httpClient, identity);
}
/**
* Request class for {@link CreditorService#create }.
*
* Creates a new creditor.
*/
public static final class CreditorCreateRequest extends IdempotentPostRequest<Creditor> {
private String addressLine1;
private String addressLine2;
private String addressLine3;
private String city;
private String countryCode;
private Map<String, String> links;
private String name;
private String postalCode;
private String region;
/**
* The first line of the creditor's address.
*/
public CreditorCreateRequest withAddressLine1(String addressLine1) {
this.addressLine1 = addressLine1;
return this;
}
/**
* The second line of the creditor's address.
*/
public CreditorCreateRequest withAddressLine2(String addressLine2) {
this.addressLine2 = addressLine2;
return this;
}
/**
* The third line of the creditor's address.
*/
public CreditorCreateRequest withAddressLine3(String addressLine3) {
this.addressLine3 = addressLine3;
return this;
}
/**
* The city of the creditor's address.
*/
public CreditorCreateRequest withCity(String city) {
this.city = city;
return this;
}
public CreditorCreateRequest withCountryCode(String countryCode) {
this.countryCode = countryCode;
return this;
}
public CreditorCreateRequest withLinks(Map<String, String> links) {
this.links = links;
return this;
}
public CreditorCreateRequest withLinks(String key, String value) {
if (links == null) {
links = new HashMap<>();
}
links.put(key, value);
return this;
}
/**
* The creditor's name.
*/
public CreditorCreateRequest withName(String name) {
this.name = name;
return this;
}
/**
* The creditor's postal code.
*/
public CreditorCreateRequest withPostalCode(String postalCode) {
this.postalCode = postalCode;
return this;
}
/**
* The creditor's address region, county or department.
*/
public CreditorCreateRequest withRegion(String region) {
this.region = region;
return this;
}
public CreditorCreateRequest withIdempotencyKey(String idempotencyKey) {
super.setIdempotencyKey(idempotencyKey);
return this;
}
@Override
protected GetRequest<Creditor> handleConflict(HttpClient httpClient, String id) {
CreditorGetRequest request = new CreditorGetRequest(httpClient, id);
for (Map.Entry<String, String> header : this.getCustomHeaders().entrySet()) {
request = request.withHeader(header.getKey(), header.getValue());
}
return request;
}
private CreditorCreateRequest(HttpClient httpClient) {
super(httpClient);
}
public CreditorCreateRequest withHeader(String headerName, String headerValue) {
this.addHeader(headerName, headerValue);
return this;
}
@Override
protected String getPathTemplate() {
return "creditors";
}
@Override
protected String getEnvelope() {
return "creditors";
}
@Override
protected Class<Creditor> getResponseClass() {
return Creditor.class;
}
@Override
protected boolean hasBody() {
return true;
}
}
/**
* Request class for {@link CreditorService#list }.
*
* Returns a [cursor-paginated](#api-usage-cursor-pagination) list of your creditors.
*/
public static final class CreditorListRequest<S> extends ListRequest<S, Creditor> {
private CreatedAt createdAt;
/**
* Cursor pointing to the start of the desired set.
*/
public CreditorListRequest<S> withAfter(String after) {
setAfter(after);
return this;
}
/**
* Cursor pointing to the end of the desired set.
*/
public CreditorListRequest<S> withBefore(String before) {
setBefore(before);
return this;
}
public CreditorListRequest<S> withCreatedAt(CreatedAt createdAt) {
this.createdAt = createdAt;
return this;
}
/**
* Limit to records created after the specified date-time.
*/
public CreditorListRequest<S> withCreatedAtGt(String gt) {
if (createdAt == null) {
createdAt = new CreatedAt();
}
createdAt.withGt(gt);
return this;
}
public CreditorListRequest<S> withCreatedAtGte(String gte) {
if (createdAt == null) {
createdAt = new CreatedAt();
}
createdAt.withGte(gte);
return this;
}
/**
* Limit to records created before the specified date-time.
*/
public CreditorListRequest<S> withCreatedAtLt(String lt) {
if (createdAt == null) {
createdAt = new CreatedAt();
}
createdAt.withLt(lt);
return this;
}
public CreditorListRequest<S> withCreatedAtLte(String lte) {
if (createdAt == null) {
createdAt = new CreatedAt();
}
createdAt.withLte(lte);
return this;
}
/**
* Number of records to return.
*/
public CreditorListRequest<S> withLimit(Integer limit) {
setLimit(limit);
return this;
}
private CreditorListRequest(HttpClient httpClient, ListRequestExecutor<S, Creditor> executor) {
super(httpClient, executor);
}
public CreditorListRequest<S> withHeader(String headerName, String headerValue) {
this.addHeader(headerName, headerValue);
return this;
}
@Override
protected Map<String, Object> getQueryParams() {
ImmutableMap.Builder<String, Object> params = ImmutableMap.builder();
params.putAll(super.getQueryParams());
if (createdAt != null) {
params.putAll(createdAt.getQueryParams());
}
return params.build();
}
@Override
protected String getPathTemplate() {
return "creditors";
}
@Override
protected String getEnvelope() {
return "creditors";
}
@Override
protected TypeToken<List<Creditor>> getTypeToken() {
return new TypeToken<List<Creditor>>() {};
}
public static class CreatedAt {
private String gt;
private String gte;
private String lt;
private String lte;
/**
* Limit to records created after the specified date-time.
*/
public CreatedAt withGt(String gt) {
this.gt = gt;
return this;
}
public CreatedAt withGte(String gte) {
this.gte = gte;
return this;
}
/**
* Limit to records created before the specified date-time.
*/
public CreatedAt withLt(String lt) {
this.lt = lt;
return this;
}
public CreatedAt withLte(String lte) {
this.lte = lte;
return this;
}
public Map<String, Object> getQueryParams() {
ImmutableMap.Builder<String, Object> params = ImmutableMap.builder();
if (gt != null) {
params.put("created_at[gt]", gt);
}
if (gte != null) {
params.put("created_at[gte]", gte);
}
if (lt != null) {
params.put("created_at[lt]", lt);
}
if (lte != null) {
params.put("created_at[lte]", lte);
}
return params.build();
}
}
}
/**
* Request class for {@link CreditorService#get }.
*
* Retrieves the details of an existing creditor.
*/
public static final class CreditorGetRequest extends GetRequest<Creditor> {
@PathParam
private final String identity;
private CreditorGetRequest(HttpClient httpClient, String identity) {
super(httpClient);
this.identity = identity;
}
public CreditorGetRequest withHeader(String headerName, String headerValue) {
this.addHeader(headerName, headerValue);
return this;
}
@Override
protected Map<String, String> getPathParams() {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("identity", identity);
return params.build();
}
@Override
protected Map<String, Object> getQueryParams() {
ImmutableMap.Builder<String, Object> params = ImmutableMap.builder();
params.putAll(super.getQueryParams());
return params.build();
}
@Override
protected String getPathTemplate() {
return "creditors/:identity";
}
@Override
protected String getEnvelope() {
return "creditors";
}
@Override
protected Class<Creditor> getResponseClass() {
return Creditor.class;
}
}
/**
* Request class for {@link CreditorService#update }.
*
* Updates a creditor object. Supports all of the fields supported when creating a creditor.
*/
public static final class CreditorUpdateRequest extends PutRequest<Creditor> {
@PathParam
private final String identity;
private String addressLine1;
private String addressLine2;
private String addressLine3;
private String city;
private String countryCode;
private Links links;
private String name;
private String postalCode;
private String region;
/**
* The first line of the creditor's address.
*/
public CreditorUpdateRequest withAddressLine1(String addressLine1) {
this.addressLine1 = addressLine1;
return this;
}
/**
* The second line of the creditor's address.
*/
public CreditorUpdateRequest withAddressLine2(String addressLine2) {
this.addressLine2 = addressLine2;
return this;
}
/**
* The third line of the creditor's address.
*/
public CreditorUpdateRequest withAddressLine3(String addressLine3) {
this.addressLine3 = addressLine3;
return this;
}
/**
* The city of the creditor's address.
*/
public CreditorUpdateRequest withCity(String city) {
this.city = city;
return this;
}
public CreditorUpdateRequest withCountryCode(String countryCode) {
this.countryCode = countryCode;
return this;
}
public CreditorUpdateRequest withLinks(Links links) {
this.links = links;
return this;
}
/**
* ID of the [bank account](#core-endpoints-creditor-bank-accounts) which is set up to receive
* payouts in DKK.
*/
public CreditorUpdateRequest withLinksDefaultDkkPayoutAccount(String defaultDkkPayoutAccount) {
if (links == null) {
links = new Links();
}
links.withDefaultDkkPayoutAccount(defaultDkkPayoutAccount);
return this;
}
/**
* ID of the [bank account](#core-endpoints-creditor-bank-accounts) which is set up to receive
* payouts in EUR.
*/
public CreditorUpdateRequest withLinksDefaultEurPayoutAccount(String defaultEurPayoutAccount) {
if (links == null) {
links = new Links();
}
links.withDefaultEurPayoutAccount(defaultEurPayoutAccount);
return this;
}
/**
* ID of the [bank account](#core-endpoints-creditor-bank-accounts) which is set up to receive
* payouts in GBP.
*/
public CreditorUpdateRequest withLinksDefaultGbpPayoutAccount(String defaultGbpPayoutAccount) {
if (links == null) {
links = new Links();
}
links.withDefaultGbpPayoutAccount(defaultGbpPayoutAccount);
return this;
}
/**
* ID of the [bank account](#core-endpoints-creditor-bank-accounts) which is set up to receive
* payouts in SEK.
*/
public CreditorUpdateRequest withLinksDefaultSekPayoutAccount(String defaultSekPayoutAccount) {
if (links == null) {
links = new Links();
}
links.withDefaultSekPayoutAccount(defaultSekPayoutAccount);
return this;
}
/**
* The creditor's name.
*/
public CreditorUpdateRequest withName(String name) {
this.name = name;
return this;
}
/**
* The creditor's postal code.
*/
public CreditorUpdateRequest withPostalCode(String postalCode) {
this.postalCode = postalCode;
return this;
}
/**
* The creditor's address region, county or department.
*/
public CreditorUpdateRequest withRegion(String region) {
this.region = region;
return this;
}
private CreditorUpdateRequest(HttpClient httpClient, String identity) {
super(httpClient);
this.identity = identity;
}
public CreditorUpdateRequest withHeader(String headerName, String headerValue) {
this.addHeader(headerName, headerValue);
return this;
}
@Override
protected Map<String, String> getPathParams() {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("identity", identity);
return params.build();
}
@Override
protected String getPathTemplate() {
return "creditors/:identity";
}
@Override
protected String getEnvelope() {
return "creditors";
}
@Override
protected Class<Creditor> getResponseClass() {
return Creditor.class;
}
@Override
protected boolean hasBody() {
return true;
}
public static class Links {
private String defaultDkkPayoutAccount;
private String defaultEurPayoutAccount;
private String defaultGbpPayoutAccount;
private String defaultSekPayoutAccount;
/**
* ID of the [bank account](#core-endpoints-creditor-bank-accounts) which is set up to receive
* payouts in DKK.
*/
public Links withDefaultDkkPayoutAccount(String defaultDkkPayoutAccount) {
this.defaultDkkPayoutAccount = defaultDkkPayoutAccount;
return this;
}
/**
* ID of the [bank account](#core-endpoints-creditor-bank-accounts) which is set up to receive
* payouts in EUR.
*/
public Links withDefaultEurPayoutAccount(String defaultEurPayoutAccount) {
this.defaultEurPayoutAccount = defaultEurPayoutAccount;
return this;
}
/**
* ID of the [bank account](#core-endpoints-creditor-bank-accounts) which is set up to receive
* payouts in GBP.
*/
public Links withDefaultGbpPayoutAccount(String defaultGbpPayoutAccount) {
this.defaultGbpPayoutAccount = defaultGbpPayoutAccount;
return this;
}
/**
* ID of the [bank account](#core-endpoints-creditor-bank-accounts) which is set up to receive
* payouts in SEK.
*/
public Links withDefaultSekPayoutAccount(String defaultSekPayoutAccount) {
this.defaultSekPayoutAccount = defaultSekPayoutAccount;
return this;
}
}
}
}
|
package com.sonymobile.jenkins.plugins.gitlabauth;
import com.sonymobile.gitlab.exceptions.ApiConnectionFailureException;
import com.sonymobile.gitlab.exceptions.GitLabApiException;
import com.sonymobile.gitlab.model.GitLabSessionInfo;
import com.sonymobile.jenkins.plugins.gitlabapi.GitLabConfig;
import hudson.Extension;
import hudson.model.Descriptor;
import hudson.security.AbstractPasswordBasedSecurityRealm;
import hudson.security.GroupDetails;
import hudson.security.SecurityRealm;
import net.sf.json.JSONObject;
import org.acegisecurity.AuthenticationException;
import org.acegisecurity.BadCredentialsException;
import org.acegisecurity.userdetails.UserDetails;
import org.acegisecurity.userdetails.UsernameNotFoundException;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.StaplerRequest;
import org.springframework.dao.DataAccessException;
/**
* A security realm to support the use of login in with GitLab credentials to a Jenkins server.
*
* @author Andreas Alanko
*/
public class GitLabSecurityRealm extends AbstractPasswordBasedSecurityRealm {
@DataBoundConstructor
public GitLabSecurityRealm() {
super();
}
/**
* Specifies if the configured Security Realm allows signup.
*
* GitLabSecurityRealm will not allow signups through Jenkins.
*
* @return true if signup is allowed
*/
@Override
public boolean allowsSignup() {
return false;
}
/**
* Tries to authenticate a user with the given username and password.
*
* @param username the username of the user
* @param password the password of the user
* @return a UserDetails object with user information
* @throws AuthenticationException if the authentication failed
*/
@Override
protected UserDetails authenticate(String username, String password) throws AuthenticationException {
try {
// authenticate credentials and create user details
return loadUserWithCredentials(username, password);
} catch (GitLabApiException e) {
// authentication or connection to the API failed
throw new BadCredentialsException("Authentication against GitLab failed", e);
}
}
/**
* Gets user details for a user matching a username and password.
*
* @param username the username of the user
* @param password the password of the user
* @return user details for a user matching the credentials
* @throws ApiConnectionFailureException if the API connection failed
*/
private UserDetails loadUserWithCredentials(String username, String password)
throws GitLabApiException {
GitLabSessionInfo session = GitLabConfig.getApiClient().getSession(username, password);
// create user details from the session
return new GitLabUserDetails(session);
}
/**
* Gets user information about the user with the given username.
*
* @param username the user of the user
* @return a UserDetails object with information about the user
* @throws UsernameNotFoundException if user with username does not exist
* @throws DataAccessException will never be thrown
*/
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException, DataAccessException {
//TODO
return null;
}
/**
* This feature is not supported.
*
* Will throw UsernameNotFoundException at all times.
*
* @param username the username of the user
* @throws UsernameNotFoundException will be thrown at all times
* @throws DataAccessException will never be thrown
*/
@Override
public GroupDetails loadGroupByGroupname(String groupname) throws UsernameNotFoundException, DataAccessException {
throw new UsernameNotFoundException("Feature not supported");
}
@Extension
public static final class DescriptorImpl extends Descriptor<SecurityRealm> {
/**
* Returns a new GitLabSecurityRealm object.
*
* @param req the http request
* @param formData form data
* @return a GitLabSecurityRealm object
*/
@Override
public SecurityRealm newInstance(StaplerRequest req, JSONObject formData) throws FormException {
return new GitLabSecurityRealm();
}
/**
* Gives the name to be displayed by the Jenkins view in the security configuration page.
*
* @return the display name
*/
public String getDisplayName() {
return "GitLab Authentication";
}
}
}
|
package estivate.core;
import java.lang.annotation.Annotation;
import java.lang.reflect.AnnotatedElement;
import org.jsoup.helper.StringUtil;
import org.jsoup.select.Elements;
import estivate.annotations.Attr;
import estivate.annotations.Select;
import estivate.annotations.TagName;
import estivate.annotations.Text;
import estivate.annotations.Val;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
/**
*
* @author Benoit Theunissen
*
*/
@Slf4j
@Deprecated
public abstract class SelectEvaluater {
public static Elements select(SelectBean bean, Elements elementsIn,
AnnotatedElement member) {
String select = bean.getSelect();
if (StringUtil.isBlank(select)) {
log.debug("no select expr on '{}' for '{}'", bean.getName(),
ClassUtils.getName(member));
return elementsIn;
}
log.debug("selecting '{}' with '{}' for '{}'", select, bean.getName(),
ClassUtils.getName(member));
log.trace("select in '{}'", elementsIn.toString());
Elements elementsOut = elementsIn.select(select);
if (bean.isFirst() && bean.isLast()) {
throw new IllegalArgumentException(
"Select cant be true for first() and last() a the same time");
}
if (bean.isFirst()) {
log.trace("select first element");
elementsOut = new Elements(elementsOut.first());
}
if (bean.isLast()) {
log.trace("select last element");
elementsOut = new Elements(elementsOut.last());
}
log.trace("select out '{}'", elementsOut.toString());
return elementsOut;
}
public static Elements select(Text annotation, Elements elements,
AnnotatedElement member) {
return select(new SelectBean(annotation), elements, member);
}
public static Elements select(Select annotation, Elements elements,
AnnotatedElement member) {
return select(new SelectBean(annotation), elements, member);
}
public static Elements select(Attr annotation, Elements elements,
AnnotatedElement member) {
return select(new SelectBean(annotation), elements, member);
}
public static Elements select(TagName annotation, Elements elements,
AnnotatedElement member) {
return select(new SelectBean(annotation), elements, member);
}
public static Elements select(Val annotation, Elements elements,
AnnotatedElement member) {
return select(new SelectBean(annotation), elements, member);
}
@Data
public static class SelectBean {
/**
* Hold the name of the original annotation
*/
protected String name;
protected String select;
protected int index;
protected boolean unique;
protected boolean first;
protected boolean last;
protected boolean optional;
public SelectBean(Select annotation) {
setAnnotation(annotation);
this.select = or(annotation.value(), annotation.select());
this.index = annotation.index();
this.unique = annotation.unique();
this.first = annotation.first();
this.last = annotation.last();
this.optional = annotation.optional();
}
public SelectBean(Text annotation) {
setAnnotation(annotation);
this.select = annotation.select();
this.index = annotation.index();
this.unique = annotation.unique();
this.first = annotation.first();
this.last = annotation.last();
this.optional = annotation.optional();
}
public SelectBean(Attr annotation) {
setAnnotation(annotation);
this.select = annotation.select();
this.index = annotation.index();
this.unique = annotation.unique();
this.first = annotation.first();
this.last = annotation.last();
this.optional = annotation.optional();
}
public SelectBean(TagName annotation) {
setAnnotation(annotation);
this.select = annotation.select();
this.index = annotation.index();
this.unique = annotation.unique();
this.first = annotation.first();
this.last = annotation.last();
this.optional = annotation.optional();
}
public SelectBean(Val annotation) {
setAnnotation(annotation);
this.select = annotation.select();
this.index = annotation.index();
this.unique = annotation.unique();
this.first = annotation.first();
this.last = annotation.last();
this.optional = annotation.optional();
}
protected void setAnnotation(Annotation annotation) {
this.name = annotation.annotationType().getSimpleName();
}
private String or(String value1, String value2) {
return StringUtil.isBlank(value1) ? value2 : value1;
}
}
}
|
package de.domisum.lib.animulus.npc.task.tasks.movement;
import de.domisum.lib.animulus.AnimulusLib;
import de.domisum.lib.animulus.npc.task.NPCTask;
import de.domisum.lib.animulus.npc.task.NPCTaskSlot;
import de.domisum.lib.auxilium.data.container.Duo;
import de.domisum.lib.auxilium.data.container.math.Vector2D;
import de.domisum.lib.auxilium.data.container.math.Vector3D;
import de.domisum.lib.auxilium.util.TextUtil;
import de.domisum.lib.auxilium.util.bukkit.LocationUtil;
import de.domisum.lib.auxilium.util.java.annotations.APIUsage;
import de.domisum.lib.auxilium.util.math.MathUtil;
import de.domisum.lib.compitum.transitionalpath.node.TransitionType;
import de.domisum.lib.compitum.transitionalpath.path.TransitionalPath;
import de.domisum.lib.compitum.universal.UniversalPathfinder;
import org.bukkit.Location;
@APIUsage
public class NPCTaskWalkTo extends NPCTask
{
// CONSTANTS
private static NPCTaskSlot[] USED_TASK_SLOTS = new NPCTaskSlot[] {NPCTaskSlot.MOVEMENT, NPCTaskSlot.HEAD_ROTATION};
private static final double NO_MOVEMENT_THRESHOLD = 0.001;
private static final int NO_MOVEMENT_STUCK_REPETITIONS = 20;
// PROPERTIES
private Location target;
private double speedMultiplier;
// STATUS
private TransitionalPath path;
private int currentWaypointIndex = 0;
private Vector3D lastPosition;
private int unchangedPositionsInRow = 0;
private int reuseLastDirectionTicks = 0;
private Vector2D lastDirection;
// CONSTRUCTOR
@APIUsage
public NPCTaskWalkTo(Location target)
{
this(target, 1);
}
@APIUsage
public NPCTaskWalkTo(Location target, double speedMultiplier)
{
super();
this.target = target;
this.speedMultiplier = speedMultiplier;
}
// GETTERS
@Override
public NPCTaskSlot[] USED_TASK_SLOTS()
{
return USED_TASK_SLOTS;
}
// EXECUTION
@Override
protected void onStart()
{
Location start = this.npc.getLocation();
UniversalPathfinder pathfinder = new UniversalPathfinder(start, this.target);
pathfinder.findPath();
this.path = pathfinder.getPath();
if(this.path == null)
{
this.npc.onWalkingFail();
AnimulusLib.getInstance().getLogger().warning(
this.npc.getId()+": No path was found from "+TextUtil.getLocationAsString(start)+" to "+TextUtil
.getLocationAsString(this.target));
AnimulusLib.getInstance().getLogger().warning("Pathfinder Data: "+pathfinder.getDiagnose());
if(pathfinder.getFailure() != null)
AnimulusLib.getInstance().getLogger().severe("Error: '"+pathfinder.getFailure()+"'");
this.cancel();
}
}
@Override
protected boolean onUpdate()
{
if(this.path == null)
return true;
if(this.currentWaypointIndex >= this.path.getNumberOfWaypoints())
return true;
if(this.lastPosition != null)
if(this.npc.getPosition().subtract(this.lastPosition).lengthSquared() < NO_MOVEMENT_THRESHOLD)
{
this.unchangedPositionsInRow++;
if(this.unchangedPositionsInRow >= NO_MOVEMENT_STUCK_REPETITIONS)
{
this.npc.onWalkingFail();
return true;
}
}
Location loc = this.npc.getLocation();
Duo<Vector3D, Integer> currentWaypoint = this.path.getWaypoint(this.currentWaypointIndex);
double dX = currentWaypoint.a.x-loc.getX();
double dY = currentWaypoint.a.y-loc.getY();
double dZ = currentWaypoint.a.z-loc.getZ();
if(dX*dX+dZ*dZ < 0.01)
{
this.currentWaypointIndex++;
this.reuseLastDirectionTicks = 2;
return false;
}
if(dY > 0 && currentWaypoint.b == TransitionType.JUMP)
this.npc.jump();
double speed = this.npc.getWalkSpeed()*this.speedMultiplier;
Vector2D mov = new Vector2D(dX, dZ);
double movLength = mov.length();
if(this.reuseLastDirectionTicks > 0)
{
mov = this.lastDirection.multiply(movLength);
this.reuseLastDirectionTicks
}
Vector2D direction = mov.divide(movLength);
if(movLength > speed)
mov = mov.multiply(speed/movLength);
// inair acceleration is not that good
if(!this.npc.isOnGround())
mov = mov.multiply(0.3);
this.npc.setVelocity(new Vector3D(mov.x, this.npc.getVelocity().y, mov.y));
this.lastDirection = direction;
// HEAD ROTATION
Location waypointLocation = new Location(loc.getWorld(), currentWaypoint.a.x, currentWaypoint.a.y, currentWaypoint.a.z);
Location directionLoc = LocationUtil.lookAt(loc, waypointLocation);
float targetYaw = directionLoc.getYaw();
float targetPitch = directionLoc.getPitch();
targetPitch = (float) MathUtil.clampAbs(targetPitch, 25);
Duo<Float, Float> stepYawAndPitch = NPCTaskLookTowards.getStepYawAndPitch(loc, targetYaw, targetPitch, 10);
this.npc.setYawPitch(loc.getYaw()+stepYawAndPitch.a, loc.getPitch()+stepYawAndPitch.b);
this.lastPosition = this.npc.getPosition();
return false;
}
@Override
protected void onCancel()
{
}
}
|
package com.hpcloud.mon.resource;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.validation.Valid;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriInfo;
import org.joda.time.DateTime;
import com.codahale.metrics.annotation.Timed;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Iterables;
import com.hpcloud.mon.app.MetricService;
import com.hpcloud.mon.app.command.CreateMetricCommand;
import com.hpcloud.mon.app.validation.DimensionValidation;
import com.hpcloud.mon.app.validation.MetricNameValidation;
import com.hpcloud.mon.app.validation.Validation;
import com.hpcloud.mon.common.model.Services;
import com.hpcloud.mon.common.model.metric.Metric;
import com.hpcloud.mon.domain.model.metric.Measurement;
import com.hpcloud.mon.domain.model.metric.MeasurementRepository;
import com.hpcloud.mon.resource.exception.Exceptions;
/**
* Metric resource implementation.
*
* @author Todd Walk
* @author Jonathan Halterman
*/
@Path("/v2.0/metrics")
public class MetricResource {
private static final String MONITORING_DELEGATE_ROLE = "monitoring-delegate";
private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults();
private static final Splitter COLON_SPLITTER = Splitter.on(':').omitEmptyStrings().trimResults();
private final MetricService service;
private final MeasurementRepository measurementRepo;
@Inject
public MetricResource(MetricService service, MeasurementRepository measurementRepo) {
this.service = service;
this.measurementRepo = measurementRepo;
}
@POST
@Timed
@Consumes(MediaType.APPLICATION_JSON)
public void create(@Context UriInfo uriInfo, @HeaderParam("X-Auth-Token") String authToken,
@HeaderParam("X-Tenant-Id") String tenantId, @HeaderParam("X-Roles") String roles,
@QueryParam("tenant_id") String crossTenantId, @Valid CreateMetricCommand[] commands) {
boolean isDelegate = !Strings.isNullOrEmpty(roles)
&& Arrays.asList(COMMA_SPLITTER.split(roles)).contains(MONITORING_DELEGATE_ROLE);
List<Metric> metrics = new ArrayList<>(commands.length);
for (CreateMetricCommand command : commands) {
if (!isDelegate) {
if (Services.isReserved(command.name))
throw Exceptions.forbidden("Project %s cannot POST metrics for the hpcs name", tenantId);
if (!Strings.isNullOrEmpty(crossTenantId))
throw Exceptions.forbidden("Project %s cannot POST cross tenant metrics", tenantId);
}
command.validate();
metrics.add(command.toMetric());
}
service.create(metrics, tenantId, crossTenantId, authToken);
}
// @GET
// @Timed
// @Produces(MediaType.APPLICATION_JSON)
// public List<Metric> get(@HeaderParam("X-Tenant-Id") String tenantId) {
// return null;
@GET
@Timed
@Produces(MediaType.APPLICATION_JSON)
public List<Measurement> get(@HeaderParam("X-Auth-Token") String authToken,
@HeaderParam("X-Tenant-Id") String tenantId, @QueryParam("name") String name,
@QueryParam("start_time") String startTimeStr, @QueryParam("end_time") String endTimeStr,
@QueryParam("dimensions") String dimensionsStr,
@QueryParam("statistics") String statisticsStr,
@DefaultValue("300") @QueryParam("period") String periodStr) {
// Validate query parameters
DateTime startTime = Validation.parseAndValidateDate(startTimeStr, "start_time", true);
DateTime endTime = Validation.parseAndValidateDate(endTimeStr, "end_time", false);
if (!startTime.isBefore(endTime))
throw Exceptions.badRequest("start_time must be before end_time");
Validation.validateNotNullOrEmpty(dimensionsStr, "dimensions");
Validation.validateNotNullOrEmpty(statisticsStr, "statistics");
int period = Validation.parseAndValidateNumber(periodStr, "period");
List<String> statistics = Validation.parseValidateAndNormalizeStatistics(COMMA_SPLITTER.split(statisticsStr));
// Parse dimensions
Map<String, String> dimensions = new HashMap<String, String>();
for (String dimensionStr : COMMA_SPLITTER.split(dimensionsStr)) {
String[] dimensionArr = Iterables.toArray(COLON_SPLITTER.split(dimensionStr), String.class);
if (dimensionArr.length == 2)
dimensions.put(dimensionArr[0], dimensionArr[1]);
}
// Validate metric definition
String service = dimensions.get(Services.SERVICE_DIMENSION);
MetricNameValidation.validate(name, service);
DimensionValidation.validate(dimensions, service);
// Verify ownership
Validation.verifyOwnership(tenantId, name, dimensions, authToken);
// Return measurements
return measurementRepo.find(authToken, name, startTime, endTime, dimensions, statistics, period);
}
}
|
package eu.digitisation.layout;
import eu.digitisation.image.Bimage;
import eu.digitisation.io.FileType;
import java.awt.Color;
import java.awt.Desktop;
import java.io.File;
import java.io.IOException;
/**
* Shows text regions (as stored in PAGE XML) on image
*
* @author R.C.C
*/
public class Viewer {
/**
* Split a file name into path, base-name and extension
*
* @param filename
* @return path (before last separator), base-name (before last dot) and
* extension (after last dot)
*/
private static String[] getFilenameTokens(String filename) {
String[] tokens = new String[3];
int first = filename.lastIndexOf(File.separator);
int last = filename.lastIndexOf('.');
tokens[0] = filename.substring(0, first);
tokens[1] = filename.substring(first + 1, last);
tokens[2] = filename.substring(last + 1);
return tokens;
}
/**
* Demo main
*
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
if (args.length < 2) {
System.err.println("Usage: Viewer image_file page_file [options]");
System.exit(0);
}
File ifile = new File(args[0]);
File xmlfile = new File(args[1]);
String opts = (args.length > 2) ? args[2] : null;
FileType ftype = FileType.valueOf(xmlfile);
String[] tokens = getFilenameTokens(args[0]);
String path = tokens[0];
String id = tokens[1];
String ext = tokens[2];
File ofile = new File(path + File.separator + id + "_marked." + ext);
Bimage page = null;
Bimage scaled;
float[] shortDash = {4f, 2f};
float[] longDash = {8f, 4f};
Page gt = null;
if (ifile.exists()) {
try {
page = new Bimage(ifile).toRGB();
} catch (NullPointerException ex) {
throw new IOException("Unsupported format");
}
} else {
throw new java.io.IOException(ifile.getCanonicalPath() + " not found");
}
if (xmlfile.exists()) {
switch (ftype) {
case PAGE:
gt = new PAGEPage(xmlfile);
break;
case HOCR:
gt = new HOCRPage(xmlfile);
break;
case FR10:
gt = new FR10Page(xmlfile);
break;
case ALTO:
gt = new ALTOPage(xmlfile);
break;
default:
throw new java.lang.UnsupportedOperationException("Still not implemented");
}
} else {
throw new java.io.IOException(xmlfile.getCanonicalPath() + " not found");
}
if (opts == null || opts.contains("b")) {
page.add(gt.getFrontiers(ComponentType.BLOCK), Color.RED, 2f, shortDash);
}
if (opts == null || opts.contains("l")) {
page.add(gt.getFrontiers(ComponentType.LINE), Color.GREEN, 2f, longDash);
}
if (opts == null || opts.contains("w")) {
page.add(gt.getFrontiers(ComponentType.WORD), Color.BLUE, 2f);
}
for (TextComponent component : gt.getComponents(ComponentType.WORD)) {
System.out.println(component);
// page.add(component.getFrontier(), Color.BLUE, 2f);
}
scaled = new Bimage(page, 1.0);
scaled.write(ofile);
System.out.println("output=" + ofile);
if (Desktop.getDesktop().isSupported(Desktop.Action.OPEN)) {
Desktop.getDesktop().open(ofile);
}
}
}
|
package de.fernunihagen.dna.jkn.scalephant.network;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import de.fernunihagen.dna.jkn.scalephant.storage.BoundingBox;
public class InsertTuplePackage implements NetworkPackage {
/**
* The name of the table
*/
protected final String table;
/**
* The key to insert
*/
protected final String key;
/**
* The timestamp of the tuple
*/
protected final long timestamp;
/**
* The bounding box of the tuple
*/
protected final BoundingBox bbox;
/**
* The data
*/
protected final String data;
/**
* The Logger
*/
private final static Logger logger = LoggerFactory.getLogger(InsertTuplePackage.class);
/**
* Create package from parameter
*
* @param table
* @param key
* @param timestamp
* @param bbox
* @param data
*/
public InsertTuplePackage(final String table, final String key, final long timestamp,
final BoundingBox bbox, final String data) {
this.table = table;
this.key = key;
this.timestamp = timestamp;
this.bbox = bbox;
this.data = data;
}
/**
* Decode the encoded tuple into a object
*
* @param encodedPackage
* @return
*/
public static InsertTuplePackage decodeTuple(final byte encodedPackage[]) {
final ByteBuffer bb = NetworkPackageDecoder.encapsulateBytes(encodedPackage);
NetworkPackageDecoder.validatePackageHeader(bb);
short tableLength = bb.getShort();
short keyLength = bb.getShort();
int bBoxLength = bb.getInt();
int dataLength = bb.getInt();
long timeStamp = bb.getLong();
System.out.println("POsition: " + bb.position());
System.out.println("Timestamp: " + timeStamp);
final byte[] tableBytes = new byte[tableLength];
bb.get(tableBytes, 0, tableBytes.length);
final String tableName = new String(tableBytes);
System.out.println(tableLength + " - " + tableName);
final byte[] keyBytes = new byte[keyLength];
bb.get(keyBytes, 0, keyBytes.length);
final String key = new String(keyBytes);
System.out.println(keyLength + " - " + key);
final byte[] bbBoyBytes = new byte[bBoxLength];
bb.get(bbBoyBytes, 0, bbBoyBytes.length);
final byte[] dataBytes = new byte[dataLength];
bb.get(dataBytes, 0, dataBytes.length);
System.out.println("Remain: " + bb.remaining());
return null;
}
/**
* Check validity of the entries
* @return
*/
protected boolean isValied() {
if(table.getBytes().length > 16) {
logger.warn("Tablename to long: " + table);
return false;
}
return true;
}
@Override
public byte[] getByteArray(final SequenceNumberGenerator sequenceNumberGenerator) {
final NetworkPackageEncoder networkPackageEncoder
= new NetworkPackageEncoder(sequenceNumberGenerator);
final ByteArrayOutputStream bos = networkPackageEncoder.getByteOutputStream(getPackageType());
try {
final byte[] tableBytes = table.getBytes();
final byte[] keyBytes = key.getBytes();
final byte[] bboxBytes = bbox.toByteArray();
final byte[] dataBytes = data.getBytes();
final ByteBuffer bb = ByteBuffer.allocate(20);
bb.order(NetworkConst.NETWORK_BYTEORDER);
bb.putShort((short) tableBytes.length);
bb.putShort((short) keyBytes.length);
bb.putInt(bboxBytes.length);
bb.putInt(dataBytes.length);
bb.putLong(timestamp);
// Write body length
final int bodyLength = bb.capacity() + tableBytes.length
+ keyBytes.length + bboxBytes.length + dataBytes.length;
final ByteBuffer bodyLengthBuffer = ByteBuffer.allocate(4);
bodyLengthBuffer.order(NetworkConst.NETWORK_BYTEORDER);
bodyLengthBuffer.putInt(bodyLength);
bos.write(bodyLengthBuffer.array());
// Write body
bos.write(bb.array());
bos.write(tableBytes);
bos.write(keyBytes);
bos.write(bboxBytes);
bos.write(dataBytes);
bos.close();
} catch (IOException e) {
logger.error("Got exception while converting package into bytes", e);
return null;
}
return bos.toByteArray();
}
public String getTable() {
return table;
}
public String getKey() {
return key;
}
public long getTimestamp() {
return timestamp;
}
public BoundingBox getBbox() {
return bbox;
}
public String getData() {
return data;
}
@Override
public String toString() {
return "InsertPackage [table=" + table + ", key=" + key
+ ", timestamp=" + timestamp + ", bbox=" + bbox + ", data="
+ data + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((bbox == null) ? 0 : bbox.hashCode());
result = prime * result + ((data == null) ? 0 : data.hashCode());
result = prime * result + ((key == null) ? 0 : key.hashCode());
result = prime * result + ((table == null) ? 0 : table.hashCode());
result = prime * result + (int) (timestamp ^ (timestamp >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
InsertTuplePackage other = (InsertTuplePackage) obj;
if (bbox == null) {
if (other.bbox != null)
return false;
} else if (!bbox.equals(other.bbox))
return false;
if (data == null) {
if (other.data != null)
return false;
} else if (!data.equals(other.data))
return false;
if (key == null) {
if (other.key != null)
return false;
} else if (!key.equals(other.key))
return false;
if (table == null) {
if (other.table != null)
return false;
} else if (!table.equals(other.table))
return false;
if (timestamp != other.timestamp)
return false;
return true;
}
@Override
public byte getPackageType() {
return NetworkConst.REQUEST_TYPE_INSERT_TUPLE;
}
}
|
package com.imcode.imcms.domain.dto;
import com.imcode.imcms.persistence.entity.Meta;
import lombok.Data;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.stream.Stream;
@Data
public class MenuItemDTO implements Serializable {
private static final long serialVersionUID = 8297109006105427219L;
private String title;
private Meta.DocumentType type;
private Integer documentId;
private String target;
private String link;
private DocumentStatus documentStatus;
private Date publishedDate;
private Date modifiedDate;
private boolean hasNewerVersion;
private Integer dataLevel;
private Integer dataIndex;
private List<MenuItemDTO> children = new ArrayList<>();
public Stream<MenuItemDTO> flattened() {
return Stream.concat(
Stream.of(this),
children.stream().flatMap(MenuItemDTO::flattened));
}
}
|
package graphql.execution;
import graphql.PublicApi;
import graphql.language.Argument;
import graphql.language.Field;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.function.Consumer;
import static graphql.Assert.assertNotEmpty;
/**
* This represent all Fields in a query which overlap and are merged into one.
* This means they all represent the same field actually when the query is executed.
*
* Example query with more than one Field merged together:
*
* <pre>
* {@code
*
* query Foo {
* bar
* ...BarFragment
* }
*
* fragment BarFragment on Query {
* bar
* }
* }
* </pre>
*
* Another example:
* <pre>
* {@code
* {
* me{fistName}
* me{lastName}
* }
* }
* </pre>
*
* Here the me field is merged together including the sub selections.
*
* A third example with different directives:
* <pre>
* {@code
* {
* foo @someDirective
* foo @anotherDirective
* }
* }
* </pre>
* These examples make clear that you need to consider all merged fields together to have the full picture.
*
* The actual logic when fields can successfully merged together is implemented in {#graphql.validation.rules.OverlappingFieldsCanBeMerged}
*/
@PublicApi
public class MergedField {
private final List<Field> fields;
private MergedField(List<Field> fields) {
assertNotEmpty(fields);
this.fields = new ArrayList<>(fields);
}
/**
* All merged fields have the same name.
*
* WARNING: This is not always the key in the execution result, because of possible aliases. See {@link #getResultKey()}
*
* @return the name of of the merged fields.
*/
public String getName() {
return fields.get(0).getName();
}
/**
* Returns the key of this MergedField for the overall result.
* This is either an alias or the field name.
*
* @return the key for this MergedField.
*/
public String getResultKey() {
Field singleField = getSingleField();
if (singleField.getAlias() != null) {
return singleField.getAlias();
}
return singleField.getName();
}
/**
* The first of the merged fields.
*
* Because all fields are almost identically
* often only one of the merged fields are used.
*
* @return the fist of the merged Fields
*/
public Field getSingleField() {
return fields.get(0);
}
/**
* All merged fields share the same arguments.
*
* @return the list of arguments
*/
public List<Argument> getArguments() {
return getSingleField().getArguments();
}
/**
* All merged fields
*
* @return all merged fields
*/
public List<Field> getFields() {
return new ArrayList<>(fields);
}
public static Builder newMergedField() {
return new Builder();
}
public static Builder newMergedField(Field field) {
return new Builder().addField(field);
}
public static Builder newMergedField(List<Field> fields) {
return new Builder().fields(fields);
}
public MergedField transform(Consumer<Builder> builderConsumer) {
Builder builder = new Builder(this);
builderConsumer.accept(builder);
return builder.build();
}
public static class Builder {
private List<Field> fields = new ArrayList<>();
private Builder() {
}
private Builder(MergedField existing) {
this.fields = existing.getFields();
}
public Builder fields(List<Field> fields) {
this.fields = fields;
return this;
}
public Builder addField(Field field) {
this.fields.add(field);
return this;
}
public MergedField build() {
return new MergedField(fields);
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MergedField that = (MergedField) o;
return fields.equals(that.fields);
}
@Override
public int hashCode() {
return Objects.hash(fields);
}
@Override
public String toString() {
return "MergedField{" +
"fields=" + fields +
'}';
}
}
|
package io.github.lukehutch.fastclasspathscanner.scanner;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import io.github.lukehutch.fastclasspathscanner.FastClasspathScanner;
import io.github.lukehutch.fastclasspathscanner.scanner.ClasspathResourceQueueProcessor.ClasspathResourceProcessor;
import io.github.lukehutch.fastclasspathscanner.utils.LoggedThread;
public class ScanExecutor {
/**
* Scan the classpath, and call any MatchProcessors on files or classes that match.
*/
public static Future<ScanResult> scan(final ScanSpec scanSpec, final List<File> classpathElts,
final ExecutorService executorService, final int numParallelTasks) {
// Get classpath elements
final long scanStart = System.nanoTime();
// Two threads are used for recursive scanning and class graph building, the rest are used as
// worker threads for parallel classfile binary format parsing
int numWorkerThreads = Math.max(1, numParallelTasks - 2);
final List<Future<Void>> futures = new ArrayList<>(numWorkerThreads);
// Recursively scan classpath
// The output of the recursive scan for files that matched requested criteria.
final LinkedBlockingQueue<ClasspathResource> matchingFiles = new LinkedBlockingQueue<>();
// The output of the recursive scan for classfiles that matched requested criteria.
final LinkedBlockingQueue<ClasspathResource> matchingClassfiles = new LinkedBlockingQueue<>();
// End of queue marker
final ClasspathResource END_OF_CLASSPATH_RESOURCE_QUEUE = new ClasspathResource();
// A map from a file to its timestamp at time of scan.
final Map<File, Long> fileToTimestamp = new HashMap<>();
// If any thread is interrupted (in particular by calling ScanResult#cancel(true), interrupt all of them
final AtomicBoolean killAllThreads = new AtomicBoolean(false);
// Start recursively scanning classpath
futures.add(executorService.submit(new LoggedThread<Void>() {
@Override
public Void doWork() throws Exception {
try {
// Scan classpath recursively
new RecursiveScanner(classpathElts, scanSpec, matchingFiles, matchingClassfiles,
fileToTimestamp, killAllThreads, log).scan();
} finally {
if (Thread.currentThread().isInterrupted()) {
// Signal to other threads that they should shut down
killAllThreads.set(true);
}
// Place numWorkerThreads poison pills at end of work queues, whether or not this thread
// succeeds (so that the workers in the next stage do not get stuck blocking)
for (int i = 0; i < numWorkerThreads; i++) {
matchingClassfiles.add(END_OF_CLASSPATH_RESOURCE_QUEUE);
matchingFiles.add(END_OF_CLASSPATH_RESOURCE_QUEUE);
}
}
return null;
}
}));
// Parse classfile binary headers in parallel, producing ClassInfoUnlinked object for each class
// The output of the classfile binary parser
final LinkedBlockingQueue<ClassInfoUnlinked> classInfoUnlinked = new LinkedBlockingQueue<>();
// End of queue marker
final ClassInfoUnlinked END_OF_CLASSINFO_UNLINKED_QUEUE = new ClassInfoUnlinked();
// A map holding interned strings, to save memory. */
final ConcurrentHashMap<String, String> stringInternMap = new ConcurrentHashMap<>();
// Start classfile parser threads -- these consume ClasspathResource objects from the matchingClassfiles
// queue, and map them to ClassInfoUnlinked objects in the classInfoUnlinked queue.
for (int i = 0; i < numWorkerThreads; i++) {
// Create and start a new ClassfileBinaryParserCaller thread that consumes entries from
// the classpathResourcesToScan queue and creates objects in the classInfoUnlinked queue
futures.add(executorService.submit(new LoggedThread<Void>() {
@Override
public Void doWork() throws Exception {
try {
final ClassfileBinaryParser classfileBinaryParser = new ClassfileBinaryParser(scanSpec,
log);
ClasspathResourceQueueProcessor.processClasspathResourceQueue(matchingClassfiles,
END_OF_CLASSPATH_RESOURCE_QUEUE, new ClasspathResourceProcessor() {
@Override
public void processClasspathResource(final ClasspathResource classpathResource,
final InputStream inputStream, final long inputStreamLength)
throws IOException, InterruptedException {
// Parse classpath binary format, creating a ClassInfoUnlinked object
final ClassInfoUnlinked thisClassInfoUnlinked = classfileBinaryParser
.readClassInfoFromClassfileHeader(classpathResource.relativePath,
inputStream,
scanSpec.getClassNameToStaticFinalFieldsToMatch(),
stringInternMap);
// If class was successfully read, output new ClassInfoUnlinked object
if (thisClassInfoUnlinked != null) {
classInfoUnlinked.add(thisClassInfoUnlinked);
// Log info about class
thisClassInfoUnlinked.logTo(log);
}
if (killAllThreads.get() || Thread.currentThread().isInterrupted()) {
throw new InterruptedException();
}
}
}, log);
} catch (final InterruptedException e) {
// Signal to other threads that they should shut down
killAllThreads.set(true);
} finally {
if (Thread.currentThread().isInterrupted()) {
killAllThreads.set(true);
}
// Place poison pill at end of work queues, whether or not this thread succeeds
// (so that the thread in the next stage does not get stuck blocking)
classInfoUnlinked.add(END_OF_CLASSINFO_UNLINKED_QUEUE);
}
return null;
}
}));
}
// Create ClassInfo object for each class; cross-link the ClassInfo objects with each other;
// wait for worker thread completion; create ScanResult; call MatchProcessors; return ScanResult
// Start final thread that creates cross-linked ClassInfo objects from each ClassInfoUnlinked object
final Future<ScanResult> scanResult = executorService.submit(new LoggedThread<ScanResult>() {
@Override
public ScanResult doWork() throws InterruptedException, ExecutionException {
try {
// Convert ClassInfoUnlinked to linked ClassInfo objects
final Map<String, ClassInfo> classNameToClassInfo = new HashMap<>();
for (int threadsStillRunning = numWorkerThreads; threadsStillRunning > 0;) {
final ClassInfoUnlinked c = classInfoUnlinked.take();
if (c == END_OF_CLASSINFO_UNLINKED_QUEUE) {
--threadsStillRunning;
} else {
// Create ClassInfo object from ClassInfoUnlinked object, and link into class graph
c.link(classNameToClassInfo);
}
if (Thread.currentThread().isInterrupted()) {
throw new InterruptedException();
}
}
// Barrier -- wait for worker thread completion (they should have already all completed
// if this line is reached).
for (int i = 0; i < futures.size(); i++) {
// Will throw ExecutionException if one of the other threads threw an uncaught exception.
// This is then passed back to the caller of this Future<ScanResult>#get()
futures.get(i).get();
if (killAllThreads.get() || Thread.currentThread().isInterrupted()) {
throw new InterruptedException();
}
}
// Create the ScanResult, which builds the class graph.
// (ClassMatchProcessors need access to the class graph to find matching classes.)
final ScanResult scanResult = new ScanResult(scanSpec, classNameToClassInfo, fileToTimestamp,
log);
// Call MatchProcessors
final long startMatchProcessors = System.nanoTime();
scanSpec.callMatchProcessors(scanResult, matchingFiles, END_OF_CLASSPATH_RESOURCE_QUEUE,
classNameToClassInfo, log);
if (FastClasspathScanner.verbose) {
log.log(1, "Finished calling MatchProcessors", System.nanoTime() - startMatchProcessors);
log.log("Finished scan", System.nanoTime() - scanStart);
}
return scanResult;
} finally {
// In case an exception was thrown in this thread, or an ExecutionException is thrown
// during the futures.get(i).get() call, kill the other threads.
killAllThreads.set(true);
for (int i = 0; i < futures.size(); i++) {
// cancel(true) will do nothing if the Future has already completed
futures.get(i).cancel(true);
}
}
}
});
return scanResult;
}
}
|
package graphql.util;
import graphql.PublicApi;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Traversal context.
*
* It is used as providing context for traversing, but also for returning an accumulate value. ({@link #setAccumulate(Object)}
*
* There is always a "fake" root context with null node, null parent, null position. See {@link #isRootContext()}
*
* @param <T> type of tree node
*/
@PublicApi
public interface TraverserContext<T> {
/**
* Returns current node being visited
*
* @return current node traverser is visiting. Is null for the root context
*/
T thisNode();
/**
* Change the current node to the provided node. Only applicable in enter.
*
* Useful when the tree should be changed while traversing.
*
* Also: changing a node makes only a difference when it has different children than the current one.
*
* @param newNode the new Node
*/
void changeNode(T newNode);
/**
* Returns parent context.
* Effectively organizes Context objects in a linked list so
* by following {@link #getParentContext() } links one could obtain
* the current path as well as the variables {@link #getVar(java.lang.Class) }
* stored in every parent context.
*
* @return context associated with the node parent
*/
TraverserContext<T> getParentContext();
/**
* The list of parent nodes starting from the current parent.
*
* @return list of parent nodes
*/
List<T> getParentNodes();
/**
* The position of the current node regarding to the parent node.
*
* @return the position or null if this node is a root node
*/
NodePosition getPosition();
/**
* Informs that the current node has been already "visited"
*
* @return {@code true} if a node had been already visited
*/
boolean isVisited();
/**
* Obtains all visited nodes and values received by the {@link TraverserVisitor#enter(graphql.util.TraverserContext) }
* method
*
* @return a map containg all nodes visited and values passed when visiting nodes for the first time
*/
Set<T> visitedNodes();
/**
* Obtains a context local variable
*
* @param <S> type of the variable
* @param key key to lookup the variable value
*
* @return a variable value or {@code null}
*/
<S> S getVar(Class<? super S> key);
/**
* Searches for a context variable starting from the parent
* up the hierarchy of contexts until the first variable is found.
*
* @param <S> type of the variable
* @param key key to lookup the variable value
*
* @return a variable value or {@code null}
*/
<S> S getVarFromParents(Class<? super S> key);
/**
* Stores a variable in the context
*
* @param <S> type of a varable
* @param key key to create bindings for the variable
* @param value value of variable
*
* @return this context to allow operations chaining
*/
<S> TraverserContext<T> setVar(Class<? super S> key, S value);
/**
* Sets the new accumulate value.
*
* Can be retrieved by getA
*
* @param accumulate to set
*/
void setAccumulate(Object accumulate);
/**
* The new accumulate value, previously set by {@link #setAccumulate(Object)}
* or {@link #getCurrentAccumulate()} if {@link #setAccumulate(Object)} not invoked.
*
* @param <U> and me
*
* @return the new accumulate value
*/
<U> U getNewAccumulate();
/**
* The current accumulate value used as "input" for the current step.
*
* @param <U> and me
*
* @return the current accumulate value
*/
<U> U getCurrentAccumulate();
/**
* Used to share something across all TraverserContext.
*
* @param <U> and me
*
* @return contextData
*/
<U> U getSharedContextData();
/**
* Returns true for the root context, which doesn't have a node or a position.
*
* @return true for the root context, otherwise false
*/
boolean isRootContext();
/**
* In case of leave returns the children contexts, which have already been visited.
*
*
* @return the children contexts. If the childs are a simple list the key is null.
*/
Map<String, List<TraverserContext<T>>> getChildrenContexts();
}
|
package hex.deeplearning;
import static water.util.MRUtils.sampleFrame;
import static water.util.MRUtils.sampleFrameStratified;
import hex.FrameTask;
import hex.FrameTask.DataInfo;
import water.H2O;
import water.Job;
import water.Key;
import water.UKV;
import water.api.DeepLearningProgressPage;
import water.api.DocGen;
import water.api.RequestServer;
import water.fvec.Frame;
import water.fvec.RebalanceDataSet;
import water.fvec.Vec;
import water.util.Log;
import water.util.MRUtils;
import water.util.RString;
import water.util.Utils;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.Random;
/**
* Deep Learning Neural Net implementation based on MRTask2
*/
public class DeepLearning extends Job.ValidatedJob {
static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields
public static DocGen.FieldDoc[] DOC_FIELDS;
public static final String DOC_GET = "Deep Learning";
@API(help = "Model checkpoint to resume training with.", filter= Default.class, json = true, gridable = false)
public Key checkpoint;
@API(help = "Enable expert mode (to access all options from GUI)", filter = Default.class, json = true, gridable = false)
public boolean expert_mode = false;
/*Neural Net Topology*/
@API(help = "Activation function", filter = Default.class, json = true)
public Activation activation = Activation.Tanh;
@API(help = "Hidden layer sizes (e.g. 100,100). Grid search: (10,10), (20,20,20)", filter = Default.class, json = true)
public int[] hidden = new int[] { 200, 200 };
@API(help = "How many times the dataset should be iterated (streamed), can be fractional", filter = Default.class, dmin = 1e-3, json = true)
public double epochs = 10;
@API(help = "Number of training samples (globally) per MapReduce iteration. Special values are 0: one epoch, -1: all available data (e.g., replicated training data)", filter = Default.class, lmin = -1, json = true)
public long train_samples_per_iteration = 10000l;
public long actual_train_samples_per_iteration;
@API(help = "Seed for random numbers (affects sampling) - Note: only reproducible when running single threaded", filter = Default.class, json = true)
public long seed = new Random().nextLong();
/*Adaptive Learning Rate*/
@API(help = "Adaptive learning rate (ADADELTA)", filter = Default.class, json = true)
public boolean adaptive_rate = true;
@API(help = "Adaptive learning rate time decay factor (similarity to prior updates)", filter = Default.class, dmin = 0.01, dmax = 1, json = true)
public double rho = 0.95;
@API(help = "Adaptive learning rate smoothing factor (to avoid divisions by zero and allow progress)", filter = Default.class, dmin = 1e-15, dmax = 1, json = true)
public double epsilon = 1e-6;
/*Learning Rate*/
@API(help = "Learning rate (higher => less stable, lower => slower convergence)", filter = Default.class, dmin = 1e-10, dmax = 1, json = true)
public double rate = .005;
@API(help = "Learning rate annealing: rate / (1 + rate_annealing * samples)", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double rate_annealing = 1 / 1e6;
@API(help = "Learning rate decay factor between layers (N-th layer: rate*alpha^(N-1))", filter = Default.class, dmin = 0, json = true)
public double rate_decay = 1.0;
/*Momentum*/
@API(help = "Initial momentum at the beginning of training (try 0.5)", filter = Default.class, dmin = 0, dmax = 0.9999999999, json = true)
public double momentum_start = 0;
@API(help = "Number of training samples for which momentum increases", filter = Default.class, lmin = 1, json = true)
public long momentum_ramp = 1000000;
@API(help = "Final momentum after the ramp is over (try 0.99)", filter = Default.class, dmin = 0, dmax = 0.9999999999, json = true)
public double momentum_stable = 0;
@API(help = "Use Nesterov accelerated gradient (recommended)", filter = Default.class, json = true)
public boolean nesterov_accelerated_gradient = true;
/*Regularization*/
@API(help = "Input layer dropout ratio (can improve generalization, try 0.1 or 0.2)", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double input_dropout_ratio = 0.0;
@API(help = "Hidden layer dropout ratios (can improve generalization), specify one value per hidden layer, defaults to 0.5", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double[] hidden_dropout_ratios;
@API(help = "L1 regularization (can add stability and improve generalization, causes many weights to become 0)", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double l1 = 0.0;
@API(help = "L2 regularization (can add stability and improve generalization, causes many weights to be small", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double l2 = 0.0;
@API(help = "Constraint for squared sum of incoming weights per unit (e.g. for Rectifier)", filter = Default.class, dmin = 1e-10, json = true)
public float max_w2 = Float.POSITIVE_INFINITY;
/*Initialization*/
@API(help = "Initial Weight Distribution", filter = Default.class, json = true)
public InitialWeightDistribution initial_weight_distribution = InitialWeightDistribution.UniformAdaptive;
@API(help = "Uniform: -value...value, Normal: stddev)", filter = Default.class, dmin = 0, json = true)
public double initial_weight_scale = 1.0;
@API(help = "Loss function", filter = Default.class, json = true)
public Loss loss = Loss.CrossEntropy;
/*Scoring*/
@API(help = "Shortest time interval (in secs) between model scoring", filter = Default.class, dmin = 0, json = true)
public double score_interval = 5;
@API(help = "Number of training set samples for scoring (0 for all)", filter = Default.class, lmin = 0, json = true)
public long score_training_samples = 10000l;
@API(help = "Number of validation set samples for scoring (0 for all)", filter = Default.class, lmin = 0, json = true)
public long score_validation_samples = 0l;
@API(help = "Maximum duty cycle fraction for scoring (lower: more training, higher: more scoring).", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double score_duty_cycle = 0.1;
@API(help = "Stopping criterion for classification error fraction on training data (-1 to disable)", filter = Default.class, dmin=-1, dmax=1, json = true, gridable = false)
public double classification_stop = 0;
@API(help = "Stopping criterion for regression error (MSE) on training data (-1 to disable)", filter = Default.class, dmin=-1, json = true, gridable = false)
public double regression_stop = 1e-6;
@API(help = "Enable quiet mode for less output to standard output", filter = Default.class, json = true, gridable = false)
public boolean quiet_mode = false;
@API(help = "Max. size (number of classes) for confusion matrices to be shown", filter = Default.class, json = true, gridable = false)
public int max_confusion_matrix_size = 20;
@API(help = "Max. number (top K) of predictions to use for hit ratio computation (for multi-class only, 0 to disable)", filter = Default.class, lmin=0, json = true, gridable = false)
public int max_hit_ratio_k = 10;
/*Imbalanced Classes*/
@API(help = "Balance training data class counts via over/under-sampling (for imbalanced data)", filter = Default.class, json = true, gridable = false)
public boolean balance_classes = false;
@API(help = "Maximum relative size of the training data after balancing class counts (can be less than 1.0)", filter = Default.class, json = true, dmin=1e-3, gridable = false)
public float max_after_balance_size = 5.0f;
@API(help = "Method used to sample validation dataset for scoring", filter = Default.class, json = true, gridable = false)
public ClassSamplingMethod score_validation_sampling = ClassSamplingMethod.Uniform;
/*Misc*/
@API(help = "Enable diagnostics for hidden layers", filter = Default.class, json = true, gridable = false)
public boolean diagnostics = true;
@API(help = "Compute variable importances for input features (Gedeon method) - can be slow for large networks", filter = Default.class, json = true)
public boolean variable_importances = true;
@API(help = "Enable fast mode (minor approximation in back-propagation)", filter = Default.class, json = true)
public boolean fast_mode = true;
@API(help = "Ignore constant training columns (no information can be gained anyway)", filter = Default.class, json = true)
public boolean ignore_const_cols = true;
@API(help = "Force extra load balancing to increase training speed for small datasets (to keep all cores busy)", filter = Default.class, json = true)
public boolean force_load_balance = true;
@API(help = "Replicate the entire training dataset onto every node for faster training on small datasets", filter = Default.class, json = true)
public boolean replicate_training_data = true;
@API(help = "Run on a single node for fine-tuning of model parameters", filter = Default.class, json = true)
public boolean single_node_mode = false;
@API(help = "Enable shuffling of training data (recommended if training data is replicated and train_samples_per_iteration is close to #nodes x #rows)", filter = Default.class, json = true)
public boolean shuffle_training_data = false;
public enum ClassSamplingMethod {
Uniform, Stratified
}
public enum InitialWeightDistribution {
UniformAdaptive, Uniform, Normal
}
/**
* Activation functions
*/
public enum Activation {
Tanh, TanhWithDropout, Rectifier, RectifierWithDropout, Maxout, MaxoutWithDropout
}
/**
* Loss functions
* CrossEntropy is recommended
*/
public enum Loss {
MeanSquare, CrossEntropy
}
// the following parameters can only be specified in expert mode
transient final String [] expert_options = new String[] {
"loss",
"max_w2",
"warmup_samples",
"score_training_samples",
"score_validation_samples",
"initial_weight_distribution",
"initial_weight_scale",
"diagnostics",
"rate_decay",
"score_duty_cycle",
"variable_importances",
"fast_mode",
"score_validation_sampling",
"balance_classes",
"max_after_balance_size",
"max_after_balance_size",
"ignore_const_cols",
"force_load_balance",
"replicate_training_data",
"shuffle_training_data",
"nesterov_accelerated_gradient",
"classification_stop",
"regression_stop",
"quiet_mode",
"max_confusion_matrix_size",
"max_hit_ratio_k",
"hidden_dropout_ratios",
"single_node_mode",
};
// the following parameters can be modified when restarting from a checkpoint
transient final String [] cp_modifiable = new String[] {
"expert_mode",
"seed",
"epochs",
"score_interval",
"train_samples_per_iteration",
"score_duty_cycle",
"classification_stop",
"regression_stop",
"quiet_mode",
"max_confusion_matrix_size",
"max_hit_ratio_k",
"diagnostics",
"variable_importances",
"force_load_balance",
"replicate_training_data",
"shuffle_training_data",
"single_node_mode",
};
/**
* Helper to specify which arguments trigger a refresh on change
* @param ver
*/
@Override
protected void registered(RequestServer.API_VERSION ver) {
super.registered(ver);
for (Argument arg : _arguments) {
if ( arg._name.equals("activation") || arg._name.equals("initial_weight_distribution")
|| arg._name.equals("expert_mode") || arg._name.equals("adaptive_rate")
|| arg._name.equals("replicate_training_data")
|| arg._name.equals("balance_classes") || arg._name.equals("checkpoint")) {
arg.setRefreshOnChange();
}
}
}
/**
* Helper to handle arguments based on existing input values
* @param arg
* @param inputArgs
*/
@Override protected void queryArgumentValueSet(Argument arg, java.util.Properties inputArgs) {
super.queryArgumentValueSet(arg, inputArgs);
if (!arg._name.equals("checkpoint") && !Utils.contains(cp_modifiable, arg._name)) {
if (checkpoint != null) {
arg.disable("Taken from model checkpoint.");
final DeepLearningModel cp_model = UKV.get(checkpoint);
if (cp_model == null) {
throw new IllegalArgumentException("Checkpointed model was not found.");
}
if (cp_model.model_info().unstable()) {
throw new IllegalArgumentException("Checkpointed model was unstable. Not restarting.");
}
return;
}
}
if(arg._name.equals("initial_weight_scale") &&
(initial_weight_distribution == InitialWeightDistribution.UniformAdaptive)
) {
arg.disable("Using sqrt(6 / (# units + # units of previous layer)) for Uniform distribution.", inputArgs);
}
if(arg._name.equals("loss") && !classification) {
arg.disable("Using MeanSquare loss for regression.", inputArgs);
loss = Loss.MeanSquare;
}
if (classification) {
if(arg._name.equals("regression_stop")) {
arg.disable("Only for regression.", inputArgs);
}
if(arg._name.equals("max_after_balance_size") && !balance_classes) {
arg.disable("Requires balance_classes.", inputArgs);
}
}
else {
if(arg._name.equals("classification_stop")
|| arg._name.equals("max_confusion_matrix_size")
|| arg._name.equals("max_hit_ratio_k")
|| arg._name.equals("max_after_balance_size")
|| arg._name.equals("balance_classes")) {
arg.disable("Only for classification.", inputArgs);
}
if (validation != null && arg._name.equals("score_validation_sampling")) {
score_validation_sampling = ClassSamplingMethod.Uniform;
arg.disable("Using uniform sampling for validation scoring dataset.", inputArgs);
}
}
if ((arg._name.equals("score_validation_samples") || arg._name.equals("score_validation_sampling")) && validation == null) {
arg.disable("Requires a validation data set.", inputArgs);
}
if (Utils.contains(expert_options, arg._name) && !expert_mode) {
arg.disable("Only in expert mode.", inputArgs);
}
if (!adaptive_rate) {
if (arg._name.equals("rho") || arg._name.equals("epsilon")) {
arg.disable("Only for adaptive learning rate.", inputArgs);
rho = 0;
epsilon = 0;
}
} else {
if (arg._name.equals("rate") || arg._name.equals("rate_annealing") || arg._name.equals("rate_decay") || arg._name.equals("nesterov_accelerated_gradient")
|| arg._name.equals("momentum_start") || arg._name.equals("momentum_ramp") || arg._name.equals("momentum_stable") ) {
arg.disable("Only for non-adaptive learning rate.", inputArgs);
momentum_start = 0;
momentum_stable = 0;
}
}
if (arg._name.equals("hidden_dropout_ratios")) {
if (activation != Activation.TanhWithDropout && activation != Activation.MaxoutWithDropout && activation != Activation.RectifierWithDropout) {
arg.disable("Only for activation functions with dropout.", inputArgs);
}
}
if (arg._name.equals("replicate_training_data") && (H2O.CLOUD.size() == 1)) {
arg.disable("Only for multi-node operation.");
replicate_training_data = false;
}
if (arg._name.equals("single_node_mode") && (H2O.CLOUD.size() == 1 || !replicate_training_data)) {
arg.disable("Only for multi-node operation with replication.");
single_node_mode = false;
}
}
/** Print model parameters as JSON */
@Override public boolean toHTML(StringBuilder sb) {
return makeJsonBox(sb);
}
/**
* Return a query link to this page
* @param k Model Key
* @param content Link text
* @return HTML Link
*/
public static String link(Key k, String content) {
return link(k, content, null, null, null);
}
/**
* Return a query link to this page
* @param k Model Key
* @param content Link text
* @param cp Key to checkpoint to continue training with (optional)
* @param response Response
* @param val Validation data set key
* @return HTML Link
*/
public static String link(Key k, String content, Key cp, String response, Key val) {
DeepLearning req = new DeepLearning();
RString rs = new RString("<a href='" + req.href() + ".query?source=%$key" +
(cp == null ? "" : "&checkpoint=%$cp") +
(response == null ? "" : "&response=%$resp") +
(val == null ? "" : "&validation=%$valkey") +
"'>%content</a>");
rs.replace("key", k.toString());
rs.replace("content", content);
if (cp != null) rs.replace("cp", cp.toString());
if (response != null) rs.replace("resp", response);
if (val != null) rs.replace("valkey", val);
return rs.toString();
}
/**
* Report the relative progress of building a Deep Learning model (measured by how many epochs are done)
* @return floating point number between 0 and 1
*/
@Override public float progress(){
if(UKV.get(dest()) == null)return 0;
DeepLearningModel m = UKV.get(dest());
if (m != null && m.model_info()!=null )
return (float)Math.min(1, (m.epoch_counter / m.model_info().get_params().epochs));
return 0;
}
/**
* Train a Deep Learning model, assumes that all members are populated
*/
@Override
public final void execImpl() {
DeepLearningModel cp;
if (checkpoint == null) cp = initModel();
else {
final DeepLearningModel previous = UKV.get(checkpoint);
if (previous == null) throw new IllegalArgumentException("Checkpoint not found.");
epochs += previous.epoch_counter; //add new epochs to existing model
Log.info("Adding " + String.format("%.3f", previous.epoch_counter) + " epochs from the checkpointed model.");
cp = new DeepLearningModel(previous, destination_key, job_key);
cp.model_info().get_params().state = JobState.RUNNING;
try {
Log.info("Resuming from checkpoint.");
cp.write_lock(self());
assert(state==JobState.RUNNING);
if (source == null || !Arrays.equals(source._key._kb, previous.model_info().get_params().source._key._kb)) {
throw new IllegalArgumentException("source must be the same as for the checkpointed model.");
}
if (response == null || !Arrays.equals(response._key._kb, previous.model_info().get_params().response._key._kb)) {
throw new IllegalArgumentException("response must be the same as for the checkpointed model.");
}
if (Utils.difference(ignored_cols, previous.model_info().get_params().ignored_cols).length != 0) {
throw new IllegalArgumentException("ignored_cols must be the same as for the checkpointed model.");
}
if ((validation!=null) != (previous.model_info().get_params().validation != null)
|| (validation != null && !Arrays.equals(validation._key._kb, previous.model_info().get_params().validation._key._kb))) {
throw new IllegalArgumentException("validation must be the same as for the checkpointed model.");
}
if (classification != previous.model_info().get_params().classification) {
Log.warn("Automatically switching to " + ((classification=!classification) ? "classification" : "regression") + " (same as the checkpointed model).");
}
final DeepLearning mp = cp.model_info().get_params();
Object A = mp, B = this;
for (Field fA : A.getClass().getDeclaredFields()) {
if (Utils.contains(cp_modifiable, fA.getName())) {
if (!expert_mode && Utils.contains(expert_options, fA.getName())) continue;
for (Field fB : B.getClass().getDeclaredFields()) {
if (fA.equals(fB)) {
try {
if (!fA.get(A).toString().equals(fB.get(B).toString())) {
Log.info("Applying user-requested modification of '" + fA.getName() + "': " + fA.get(A) + " -> " + fB.get(B));
fA.set(A, fB.get(B));
}
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
}
}
cp.update(self());
} finally {
cp.unlock(self());
}
}
trainModel(cp);
delete();
}
/**
* Redirect to the model page for that model that is trained by this job
* @return Response
*/
@Override protected Response redirect() {
return DeepLearningProgressPage.redirect(this, self(), dest());
}
private boolean _fakejob;
//Sanity check for Deep Learning job parameters
private void checkParams() {
if (source.numCols() <= 1)
throw new IllegalArgumentException("Training data must have at least 2 features (incl. response).");
if (hidden == null) throw new IllegalArgumentException("There must be at least one hidden layer.");
for (int i=0;i<hidden.length;++i) {
if (hidden[i]==0)
throw new IllegalArgumentException("Hidden layer size must be >0.");
}
//Auto-fill defaults
if (hidden_dropout_ratios == null) {
hidden_dropout_ratios = new double[hidden.length];
if (activation == Activation.TanhWithDropout || activation == Activation.MaxoutWithDropout || activation == Activation.RectifierWithDropout) {
Arrays.fill(hidden_dropout_ratios, 0.5);
}
}
else if (hidden_dropout_ratios.length != hidden.length) throw new IllegalArgumentException("Must have " + hidden.length + " hidden layer dropout ratios.");
if(!classification && loss != Loss.MeanSquare) {
Log.warn("Setting loss to MeanSquare for regression.");
loss = Loss.MeanSquare;
}
// make default job_key and destination_key in case they are missing
if (dest() == null) {
destination_key = Key.make();
}
if (self() == null) {
job_key = Key.make();
}
if (UKV.get(self()) == null) {
start_time = System.currentTimeMillis();
state = JobState.RUNNING;
UKV.put(self(), this);
_fakejob = true;
}
}
/**
* Create an initial Deep Learning model, typically to be trained by trainModel(model)
* @return Randomly initialized model
*/
public final DeepLearningModel initModel() {
try {
lock_data();
checkParams();
final boolean del_enum_resp = (classification && !response.isEnum());
final Frame train = FrameTask.DataInfo.prepareFrame(source, response, ignored_cols, classification, ignore_const_cols, true /*drop >20% NA cols*/);
final DataInfo dinfo = new FrameTask.DataInfo(train, 1, true, !classification);
final Vec resp = dinfo._adaptedFrame.lastVec();
assert(!classification ^ resp.isEnum()); //either regression or enum response
float[] priorDist = classification ? new MRUtils.ClassDist(resp).doAll(resp).rel_dist() : null;
final DeepLearningModel model = new DeepLearningModel(dest(), self(), source._key, dinfo, this, priorDist);
model.model_info().initializeMembers();
if (del_enum_resp) model.toDelete(resp._key);
return model;
}
finally {
unlock_data();
}
}
/**
* Incrementally train an existing model
* @param model Initial model
* @param epochs How many epochs to train for
* @return Updated model
*/
public final DeepLearningModel trainModel(DeepLearningModel model, double epochs) {
model.model_info().get_params().epochs += epochs;
return trainModel(model);
}
/**
* Helper to update a Frame and adding it to the local trash at the same time
* @param target Frame referece, to be overwritten
* @param src Newly made frame, to be deleted via local trash
* @return src
*/
Frame updateFrame(Frame target, Frame src) {
if (src != target) ltrash(src);
return src;
}
/**
* Train a Deep Learning neural net model
* @param model Input model (e.g., from initModel(), or from a previous training run)
* @return Trained model
*/
public final DeepLearningModel trainModel(DeepLearningModel model) {
Frame validScoreFrame = null;
Frame train, trainScoreFrame;
try {
lock_data();
if (checkpoint == null) logStart(); //if checkpoint is given, some Job's params might be uninitialized (but the restarted model's parameters are correct)
if (model == null) {
model = UKV.get(dest());
}
model.write_lock(self());
final DeepLearning mp = model.model_info().get_params(); //use the model's parameters for everything below - NOT the job's parameters (can be different after checkpoint restart)
prepareValidationWithModel(model);
final long model_size = model.model_info().size();
Log.info("Number of model parameters (weights/biases): " + String.format("%,d", model_size));
// Log.info("Memory usage of the model: " + String.format("%.2f", (double)model_size*Float.SIZE / (1<<23)) + " MB.");
train = model.model_info().data_info()._adaptedFrame;
if (mp.force_load_balance) train = updateFrame(train, reBalance(train, mp.replicate_training_data /*rebalance into only 4*cores per node*/));
// train = updateFrame(train, reBalance(train, mp.seed, mp.replicate_training_data, mp.force_load_balance, mp.shuffle_training_data));
float[] trainSamplingFactors;
if (mp.classification && mp.balance_classes) {
trainSamplingFactors = new float[train.lastVec().domain().length]; //leave initialized to 0 -> will be filled up below
train = updateFrame(train, sampleFrameStratified(
train, train.lastVec(), trainSamplingFactors, (long)(mp.max_after_balance_size*train.numRows()), mp.seed, true, false));
model.setModelClassDistribution(new MRUtils.ClassDist(train.lastVec()).doAll(train.lastVec()).rel_dist());
}
model.training_rows = train.numRows();
trainScoreFrame = sampleFrame(train, mp.score_training_samples, mp.seed); //training scoring dataset is always sampled uniformly from the training dataset
if (train != trainScoreFrame) ltrash(trainScoreFrame);
Log.info("Number of chunks of the training data: " + train.anyVec().nChunks());
if (validation != null) {
Frame adaptedValid = getValidation();
if (getValidAdaptor().needsAdaptation2CM()) {
adaptedValid.add(getValidAdaptor().adaptedValidationResponse(_responseName), getValidAdaptor().getAdaptedValidationResponse2CM());
}
// validation scoring dataset can be sampled in multiple ways from the given validation dataset
if (mp.classification && mp.balance_classes && mp.score_validation_sampling == ClassSamplingMethod.Stratified) {
validScoreFrame = updateFrame(adaptedValid, sampleFrameStratified(adaptedValid, adaptedValid.lastVec(), null,
mp.score_validation_samples > 0 ? mp.score_validation_samples : adaptedValid.numRows(), mp.seed+1, false /* no oversampling */, false));
} else {
validScoreFrame = updateFrame(adaptedValid, sampleFrame(adaptedValid, mp.score_validation_samples, mp.seed+1));
}
if (mp.force_load_balance) validScoreFrame = updateFrame(validScoreFrame, reBalance(validScoreFrame, false /*always split up globally since scoring should be distributed*/));
Log.info("Number of chunks of the validation data: " + validScoreFrame.anyVec().nChunks());
}
// Set train_samples_per_iteration size (cannot be done earlier since this depends on whether stratified sampling is done)
mp.actual_train_samples_per_iteration = computeTrainSamplesPerIteration(mp.train_samples_per_iteration, train.numRows(), mp.replicate_training_data, mp.single_node_mode);
// Determine whether shuffling is enforced
if(mp.replicate_training_data && (mp.actual_train_samples_per_iteration == train.numRows()*H2O.CLOUD.size()) && !mp.shuffle_training_data && H2O.CLOUD.size() > 1) {
Log.warn("Enabling training data shuffling, because all nodes train on the full dataset (replicated training data)");
mp.shuffle_training_data = true;
}
final float rowUsageFraction = computeRowUsageFraction(train.numRows(), mp.actual_train_samples_per_iteration, mp.replicate_training_data);
if (!mp.quiet_mode) Log.info("Initial model:\n" + model.model_info());
Log.info("Starting to train the Deep Learning model.");
//main loop
do model.set_model_info(H2O.CLOUD.size() > 1 && mp.replicate_training_data ? ( mp.single_node_mode ?
new DeepLearningTask2(train, model.model_info(), rowUsageFraction).invoke(Key.make()).model_info() : //replicated data + single node mode
new DeepLearningTask2(train, model.model_info(), rowUsageFraction).invokeOnAllNodes().model_info() ) : //replicated data + multi-node mode
new DeepLearningTask(model.model_info(), rowUsageFraction).doAll(train).model_info()); //distributed data (always in multi-node mode)
while (model.doScoring(train, trainScoreFrame, validScoreFrame, self(), getValidAdaptor()));
Log.info("Finished training the Deep Learning model.");
return model;
}
catch(JobCancelledException ex) {
Log.info("Deep Learning model building was cancelled.");
model = UKV.get(dest());
return model;
}
catch(Exception ex) {
ex.printStackTrace();
throw new RuntimeException(ex);
}
finally {
if (model != null) model.unlock(self());
unlock_data();
emptyLTrash();
}
}
/**
* Lock the input datasets against deletes
*/
private void lock_data() {
source.read_lock(self());
if( validation != null && source._key != null && validation._key !=null && !source._key.equals(validation._key) )
validation.read_lock(self());
}
/**
* Release the lock for the input datasets
*/
private void unlock_data() {
source.unlock(self());
if( validation != null && source._key != null && validation._key != null && !source._key.equals(validation._key) )
validation.unlock(self());
}
/**
* Delete job related keys
*/
public void delete() {
cleanup();
if (_fakejob) UKV.remove(job_key);
remove();
}
/**
* Rebalance a frame for load balancing
* @param fr Input frame
* @param local whether to only create enough chunks to max out all cores on one node only
* @return Frame that has potentially more chunks
*/
private Frame reBalance(final Frame fr, boolean local) {
final int chunks = (int)Math.min( 4 * H2O.NUMCPUS * (local ? 1 : H2O.CLOUD.size()), fr.numRows());
if (fr.anyVec().nChunks() > chunks) {
Log.info("Dataset already contains " + fr.anyVec().nChunks() + " chunks. No need to rebalance.");
return fr;
}
Log.info("Starting load balancing into (at least) " + chunks + " chunks.");
// return MRUtils.shuffleAndBalance(fr, chunks, seed, local, shuffle_training_data);
Key newKey = fr._key != null ? Key.make(fr._key.toString() + ".balanced") : Key.make();
RebalanceDataSet rb = new RebalanceDataSet(fr, newKey, chunks);
H2O.submitTask(rb);
rb.join();
Frame rebalanced = UKV.get(newKey);
Log.info("Load balancing done.");
return rebalanced;
}
/**
* Compute the actual train_samples_per_iteration size from the user-given parameter
* @param train_samples_per_iteration user-given train_samples_per_iteration size
* @param numRows number of training rows
* @param replicate_training_data whether or not the training data is replicated on each node
* @param single_node_mode whether or not the single node mode is enabled
* @return The total number of training rows to be processed per iteration (summed over on all nodes)
*/
private static long computeTrainSamplesPerIteration(final long train_samples_per_iteration, final long numRows, final boolean replicate_training_data, final boolean single_node_mode) {
long tspi = train_samples_per_iteration;
assert(tspi == 0 || tspi == -1 || tspi >= 1);
if (tspi == 0 || (!replicate_training_data && (tspi == -1 || tspi > numRows)) || (replicate_training_data && single_node_mode))
Log.info("Setting train_samples_per_iteration (" + tspi + ") to one epoch: #rows (" + (tspi=numRows) + ").");
else if (tspi == -1 || tspi > H2O.CLOUD.size()*numRows)
Log.info("Setting train_samples_per_iteration (" + tspi + ") to the largest possible number: #nodes x #rows (" + (tspi=H2O.CLOUD.size()*numRows) + ").");
assert(tspi != 0 && tspi != -1 && tspi >= 1);
return tspi;
}
/**
* Compute the fraction of rows that need to be used for training during one iteration
* @param numRows number of training rows
* @param train_samples_per_iteration number of training rows to be processed per iteration
* @param replicate_training_data whether of not the training data is replicated on each node
* @return fraction of rows to be used for training during one iteration
*/
private static float computeRowUsageFraction(final long numRows, long train_samples_per_iteration, boolean replicate_training_data) {
float rowUsageFraction = (float)train_samples_per_iteration / numRows;
if (replicate_training_data) rowUsageFraction /= H2O.CLOUD.size();
assert(rowUsageFraction > 0 && rowUsageFraction <= 1.);
return rowUsageFraction;
}
}
|
package io.reactivesw.catalog.category.domain.service;
import com.google.common.collect.Lists;
import io.reactivesw.catalog.category.application.model.Category;
import io.reactivesw.catalog.category.application.model.CategoryDraft;
import io.reactivesw.catalog.category.application.model.mapper.CategoryMapper;
import io.reactivesw.catalog.category.domain.entity.CategoryEntity;
import io.reactivesw.catalog.category.domain.service.update.CategoryUpdateService;
import io.reactivesw.catalog.category.infrastructure.repository.CategoryRepository;
import io.reactivesw.catalog.category.infrastructure.validator.CategoryNameValidator;
import io.reactivesw.common.exception.AlreadyExistException;
import io.reactivesw.common.exception.ConflictException;
import io.reactivesw.common.exception.NotExistException;
import io.reactivesw.common.model.PagedQueryResult;
import io.reactivesw.common.model.QueryConditions;
import io.reactivesw.common.model.Reference;
import io.reactivesw.common.model.UpdateAction;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.exception.ConstraintViolationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import javax.transaction.Transactional;
@Service
public class CategoryService {
/**
* log.
*/
private static final Logger LOG = LoggerFactory.getLogger(CategoryService.class);
/**
* CategoryEntity Repository.
*/
@Autowired
private transient CategoryRepository categoryRepository;
/**
* category update service.
*/
@Autowired
private transient CategoryUpdateService updateService;
/**
* Create category.
*
* @param categoryDraft the category draft
* @return the category
*/
@Transactional
public Category createCategory(CategoryDraft categoryDraft) {
LOG.debug("enter createCategory, CategoryDraft is {}", categoryDraft.toString());
String parentId = "";
List<String> ancestors = new ArrayList<>();
Reference parentReference = categoryDraft.getParent();
if (parentReference != null && StringUtils.isNotBlank(parentReference.getId())) {
parentId = parentReference.getId();
CategoryEntity parent = getParentCategory(parentId);
ancestors = setAncestors(parentId, parent);
}
List<CategoryEntity> sameRootCategories = categoryRepository.queryCategoryByParent(parentId);
CategoryNameValidator.validateEqual(categoryDraft.getName(), sameRootCategories);
CategoryEntity entity = CategoryMapper.modelToEntity(categoryDraft, parentId, ancestors);
CategoryEntity savedEntity = saveCategoryEntity(entity);
Category category = CategoryMapper.entityToModel(savedEntity);
LOG.debug("end createCategory, new CategoryEntity is: {}", category.toString());
//TODO send message
return category;
}
/**
* Delete category by id and version.
*
* @param id the id
* @param version the version
*/
@Transactional
public void deleteCategory(String id, Integer version) {
LOG.debug("enter deleteCategory, id:{}, version:{}", id, version);
CategoryEntity entity = this.getById(id);
validateVersion(entity, version);
List<CategoryEntity> tatalCategoryEitities = Lists.newArrayList(entity);
List<CategoryEntity> subCategories = categoryRepository.querySubCategoriesByAncestorId(id);
if (subCategories != null && !subCategories.isEmpty()) {
tatalCategoryEitities.addAll(subCategories);
}
categoryRepository.delete(tatalCategoryEitities);
//TODO send message for:
// remove from all those product that had that category assigned in their ProductData
LOG.debug("end deleteCategory, id:{}, version:{}", id, version);
}
/**
* Update category.
*
* @param id the id
* @param version the update request
* @param actions the update action
* @return the category
*/
@Transactional
public Category updateCategory(String id, Integer version, List<UpdateAction> actions) {
LOG.debug("enter updateCategory, id is {}, version is {}, update action is {}",
id, version, actions);
CategoryEntity entity = getById(id);
validateVersion(entity, version);
actions.parallelStream().forEach(action -> {
updateService.handle(entity, action);
});
CategoryEntity updatedEntity = categoryRepository.save(entity);
//TODO send message, if slug be updated
Category result = CategoryMapper.entityToModel(updatedEntity);
LOG.debug("end updateCategory, updated Category is {}", result);
return result;
}
/**
* Gets category by id.
*
* @param id the id
* @return the category by id
* @throws NotExistException if the can not find CategoryEntity by the id
*/
public Category getCategoryById(String id) {
LOG.debug("enter getCategoryById, id is {}", id);
CategoryEntity entity = getById(id);
Category result = CategoryMapper.entityToModel(entity);
LOG.debug("end getCategoryById, get category is : {}", result.toString());
return result;
}
/**
* Query category.
*
* @param queryConditions the QueryConditions
* @return the paged query result
*/
// TODO: 16/12/13 queryconditions
public PagedQueryResult<Category> queryCategories(QueryConditions queryConditions) {
LOG.debug("enter queryCategories, QueryConditions is : {}", queryConditions.toString());
List<CategoryEntity> entities = categoryRepository.findAll();
List<Category> result = CategoryMapper.entityToModel(entities);
LOG.debug("end queryCategories, get Categories : {}", result);
PagedQueryResult<Category> pagedQueryResult = new PagedQueryResult<>();
pagedQueryResult.setResults(result);
return pagedQueryResult;
}
/**
* Save category entity.
*
* @param entity the entity
* @return the category entity
* @throws AlreadyExistException if slug is already exist and get DataIntegrityViolationException
*/
private CategoryEntity saveCategoryEntity(CategoryEntity entity) {
CategoryEntity savedEntity = null;
try {
savedEntity = categoryRepository.save(entity);
} catch (ConstraintViolationException e) {
LOG.debug("slug is already exist", e);
throw new AlreadyExistException("Slug is already exist");
}
return savedEntity;
}
/**
* Gets category by id.
*
* @param id the id
* @return the category by id
* @throws NotExistException if the can not find CategoryEntity by the id
*/
private CategoryEntity getById(String id) {
LOG.debug("enter getById, id is {}", id);
CategoryEntity categoryEntity = categoryRepository.findOne(id);
if (categoryEntity == null) {
LOG.debug("fail getById, can not find category by id:{}", id);
throw new NotExistException("can not find category by id:" + id);
}
LOG.debug("end getById, id is {}, get CategoryEntity:{}",
id, categoryEntity.toString());
return categoryEntity;
}
/**
* judge entity and version.
*
* @param entity the CategoryEntity
* @param version the version
* @throws ConflictException when version not match
*/
private void validateVersion(CategoryEntity entity, Integer version) {
if (!Objects.equals(version, entity.getVersion())) {
LOG.debug("Version not match, input version:{}, entity version:{}",
version, entity.getVersion());
throw new ConflictException("Version not match");
}
}
/**
* Gets parent category.
*
* @param parentId the parent id
* @return the parent category
*/
private CategoryEntity getParentCategory(String parentId) {
CategoryEntity parent = categoryRepository.findOne(parentId);
validateParentCategory(parentId, parent);
return parent;
}
/**
* set ancestors.
*
* @param parentId the parent id
* @param parent the parent category
* @return list of ancestors
*/
private List<String> setAncestors(String parentId, CategoryEntity parent) {
List<String> ancestors = Lists.newArrayList();
if (parent.getAncestors() != null) {
ancestors = Lists.newArrayList(parent.getAncestors());
}
ancestors.add(parentId);
return ancestors;
}
/**
* validateNull parent category.
*
* @param parentId parent id
* @param parent parent category
* @throws NotExistException when parent category is null
*/
private void validateParentCategory(String parentId, CategoryEntity parent) {
if (parent == null) {
LOG.debug("can not find parent category by id:{}", parentId);
throw new NotExistException("Can not find parent category by id : " + parentId);
}
}
}
|
package hex.deeplearning;
import hex.FrameTask;
import hex.FrameTask.DataInfo;
import water.Job;
import water.Key;
import water.UKV;
import water.api.DeepLearningProgressPage;
import water.api.DocGen;
import water.api.RequestServer;
import water.fvec.Frame;
import water.util.Log;
import water.util.MRUtils;
import water.util.RString;
import water.util.Utils;
import java.util.Arrays;
import java.util.Random;
import static water.util.MRUtils.sampleFrame;
import static water.util.MRUtils.sampleFrameStratified;
/**
* Deep Learning Neural Net implementation based on MRTask2
*/
public class DeepLearning extends Job.ValidatedJob {
static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields
public static DocGen.FieldDoc[] DOC_FIELDS;
public static final String DOC_GET = "Deep Learning";
@API(help = "Model checkpoint to resume training with.", filter= Default.class, json = true, gridable = false)
public Key checkpoint;
@API(help = "Enable expert mode (to access all options from GUI)", filter = Default.class, json = true, gridable = false)
public boolean expert_mode = false;
/*Neural Net Topology*/
@API(help = "Activation function", filter = Default.class, json = true)
public Activation activation = Activation.Tanh;
@API(help = "Hidden layer sizes (e.g. 100,100). Grid search: (10,10), (20,20,20)", filter = Default.class, json = true)
public int[] hidden = new int[] { 200, 200 };
@API(help = "How many times the dataset should be iterated (streamed), can be fractional", filter = Default.class, dmin = 1e-3, json = true)
public double epochs = 10;
@API(help = "Number of training samples after which multi-node synchronization and scoring can happen (0 for all, i.e., one epoch)", filter = Default.class, lmin = 0, json = true)
public long mini_batch = 100000l;
@API(help = "Seed for random numbers (reproducible results for small (single-chunk) datasets only, cf. Hogwild!)", filter = Default.class, json = true)
public long seed = new Random().nextLong();
/*Adaptive Learning Rate*/
@API(help = "Adaptive learning rate (AdaDelta)", filter = Default.class, json = true)
public boolean adaptive_rate = true;
@API(help = "Adaptive learning rate time decay factor (similarity to prior updates)", filter = Default.class, dmin = 0.01, dmax = 1, json = true)
public double rho = 0.95;
@API(help = "Adaptive learning rate smoothing factor (to avoid divisions by zero and allow progress)", filter = Default.class, dmin = 1e-10, dmax = 1, json = true)
public double epsilon = 1e-6;
/*Learning Rate*/
@API(help = "Learning rate (higher => less stable, lower => slower convergence)", filter = Default.class, dmin = 1e-10, dmax = 1, json = true)
public double rate = .005;
@API(help = "Learning rate annealing: rate / (1 + rate_annealing * samples)", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double rate_annealing = 1 / 1e6;
@API(help = "Learning rate decay factor between layers (N-th layer: rate*alpha^(N-1))", filter = Default.class, dmin = 0, json = true)
public double rate_decay = 1.0;
/*Momentum*/
@API(help = "Initial momentum at the beginning of training", filter = Default.class, dmin = 0, dmax = 0.9999999999, json = true)
public double momentum_start = 0;
@API(help = "Number of training samples for which momentum increases", filter = Default.class, lmin = 1, json = true)
public long momentum_ramp = 1000000;
@API(help = "Final momentum after the ramp is over", filter = Default.class, dmin = 0, dmax = 0.9999999999, json = true)
public double momentum_stable = 0;
@API(help = "Use Nesterov accelerated gradient (recommended)", filter = Default.class, json = true)
public boolean nesterov_accelerated_gradient = true;
/*Regularization*/
@API(help = "Input layer dropout ratio (can improve generalization, try 0.1 or 0.2)", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double input_dropout_ratio = 0.0;
@API(help = "L1 regularization (can add stability and improve generalization, causes many weights to become 0)", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double l1 = 0.0;
@API(help = "L2 regularization (can add stability and improve generalization, causes many weights to be small", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double l2 = 0.0;
@API(help = "Constraint for squared sum of incoming weights per unit (e.g. for Rectifier)", filter = Default.class, json = true)
public double max_w2 = Double.POSITIVE_INFINITY;
/*Initialization*/
@API(help = "Initial Weight Distribution", filter = Default.class, json = true)
public InitialWeightDistribution initial_weight_distribution = InitialWeightDistribution.UniformAdaptive;
@API(help = "Uniform: -value...value, Normal: stddev)", filter = Default.class, dmin = 0, json = true)
public double initial_weight_scale = 1.0;
@API(help = "Loss function", filter = Default.class, json = true)
public Loss loss = Loss.CrossEntropy;
/*Scoring*/
@API(help = "Shortest time interval (in secs) between model scoring", filter = Default.class, dmin = 0, json = true)
public double score_interval = 5;
@API(help = "Number of training set samples for scoring (0 for all)", filter = Default.class, lmin = 0, json = true)
public long score_training_samples = 10000l;
@API(help = "Number of validation set samples for scoring (0 for all)", filter = Default.class, lmin = 0, json = true)
public long score_validation_samples = 0l;
@API(help = "Maximum duty cycle fraction for scoring (lower: more training, higher: more scoring).", filter = Default.class, dmin = 0, dmax = 1, json = true)
public double score_duty_cycle = 0.1;
@API(help = "Stopping criterion for classification error fraction on training data (-1 to disable)", filter = Default.class, dmin=-1, dmax=1, json = true, gridable = false)
public double classification_stop = 0;
@API(help = "Stopping criterion for regression error (MSE) on training data (-1 to disable)", filter = Default.class, dmin=-1, json = true, gridable = false)
public double regression_stop = 1e-6;
@API(help = "Enable quiet mode for less output to standard output", filter = Default.class, json = true, gridable = false)
public boolean quiet_mode = false;
@API(help = "Max. size (number of classes) for confusion matrices to be shown", filter = Default.class, json = true, gridable = false)
public int max_confusion_matrix_size = 20;
@API(help = "Max. number (K) of predictions to use for hit ratio computation (for multi-class only, 0 to disable)", filter = Default.class, lmin=0, json = true, gridable = false)
public int max_hit_ratio_k = 10;
/*Imbalanced Classes*/
@API(help = "Balance training data class counts via over/under-sampling (for imbalanced data)", filter = Default.class, json = true, gridable = false)
public boolean balance_classes = false;
@API(help = "Maximum relative size of the training data after balancing class counts (can be less than 1.0)", filter = Default.class, json = true, dmin=1e-3, gridable = false)
public float max_after_balance_size = 5.0f;
@API(help = "Method used to sample validation dataset for scoring", filter = Default.class, json = true, gridable = false)
public ClassSamplingMethod score_validation_sampling = ClassSamplingMethod.Uniform;
/*Misc*/
@API(help = "Enable diagnostics for hidden layers", filter = Default.class, json = true, gridable = false)
public boolean diagnostics = true;
@API(help = "Compute input variable importances", filter = Default.class, json = true)
public boolean variable_importances = true;
@API(help = "Enable fast mode (minor approximation in back-propagation)", filter = Default.class, json = true)
public boolean fast_mode = true;
@API(help = "Ignore constant training columns", filter = Default.class, json = true)
public boolean ignore_const_cols = true;
@API(help = "Force extra load balancing to increase training speed for small datasets", filter = Default.class, json = true)
public boolean force_load_balance = false;
@API(help = "Enable shuffling of training data (beta)", filter = Default.class, json = true)
public boolean shuffle_training_data = false;
public enum ClassSamplingMethod {
Uniform, Stratified
}
public enum InitialWeightDistribution {
UniformAdaptive, Uniform, Normal
}
/**
* Activation functions
*/
public enum Activation {
Tanh, TanhWithDropout, Rectifier, RectifierWithDropout, Maxout, MaxoutWithDropout
}
/**
* Loss functions
* CrossEntropy is recommended
*/
public enum Loss {
MeanSquare, CrossEntropy
}
@Override
protected void registered(RequestServer.API_VERSION ver) {
super.registered(ver);
for (Argument arg : _arguments) {
if ( arg._name.equals("activation") || arg._name.equals("initial_weight_distribution")
|| arg._name.equals("expert_mode") || arg._name.equals("adaptive_rate")
|| arg._name.equals("balance_classes") || arg._name.equals("checkpoint")) {
arg.setRefreshOnChange();
}
}
}
@Override protected void queryArgumentValueSet(Argument arg, java.util.Properties inputArgs) {
super.queryArgumentValueSet(arg, inputArgs);
if (!arg._name.equals("checkpoint")
&& !arg._name.equals("epochs")
&& !arg._name.equals("expert_mode")
&& !arg._name.equals("seed")
&& !arg._name.equals("score_interval")
&& !arg._name.equals("score_duty_cycle")
&& !arg._name.equals("quiet_mode")
&& !arg._name.equals("diagnostics")
) {
if (checkpoint != null) {
arg.disable("Taken from model checkpoint.");
final DeepLearningModel cp_model = UKV.get(checkpoint);
if (cp_model == null) {
throw new IllegalArgumentException("Checkpointed model was not found.");
}
if (cp_model.model_info().unstable()) {
throw new IllegalArgumentException("Checkpointed model was unstable. Not restarting.");
}
final DeepLearning cp = cp_model.model_info().get_params();
// destination_key = cp.destination_key; //continue training the SAME model
// the following parameters are needed in the DeepLearning class for training
balance_classes = cp.balance_classes;
score_validation_sampling = cp.score_validation_sampling;
max_after_balance_size = cp.max_after_balance_size;
score_training_samples = cp.score_training_samples;
score_validation_samples = cp.score_validation_samples;
state = JobState.RUNNING;
return;
}
}
if(arg._name.equals("initial_weight_scale") &&
(initial_weight_distribution == InitialWeightDistribution.UniformAdaptive)
) {
arg.disable("Using sqrt(6 / (# units + # units of previous layer)) for Uniform distribution.", inputArgs);
}
if(arg._name.equals("loss") && !classification) {
arg.disable("Using MeanSquare loss for regression.", inputArgs);
loss = Loss.MeanSquare;
}
if (classification) {
if(arg._name.equals("regression_stop")) {
arg.disable("Only for regression.", inputArgs);
}
if(arg._name.equals("max_after_balance_size") && !balance_classes) {
arg.disable("Requires balance_classes.", inputArgs);
}
}
else {
if(arg._name.equals("classification_stop")
|| arg._name.equals("max_confusion_matrix_size")
|| arg._name.equals("max_hit_ratio_k")
|| arg._name.equals("max_after_balance_size")
|| arg._name.equals("balance_classes")) {
arg.disable("Only for classification.", inputArgs);
}
if (validation != null && arg._name.equals("score_validation_sampling")) {
score_validation_sampling = ClassSamplingMethod.Uniform;
arg.disable("Using uniform sampling for validation scoring dataset.", inputArgs);
}
}
if ((arg._name.equals("score_validation_samples") || arg._name.equals("score_validation_sampling")) && validation == null) {
arg.disable("Requires a validation data set.", inputArgs);
}
if (arg._name.equals("loss")
|| arg._name.equals("max_w2")
|| arg._name.equals("warmup_samples")
|| arg._name.equals("score_training_samples")
|| arg._name.equals("score_validation_samples")
|| arg._name.equals("initial_weight_distribution")
|| arg._name.equals("initial_weight_scale")
|| arg._name.equals("diagnostics")
|| arg._name.equals("rate_decay")
|| arg._name.equals("score_duty_cycle")
|| arg._name.equals("fast_mode")
|| arg._name.equals("score_validation_sampling")
|| arg._name.equals("max_after_balance_size")
|| arg._name.equals("ignore_const_cols")
|| arg._name.equals("force_load_balance")
|| arg._name.equals("shuffle_training_data")
|| arg._name.equals("nesterov_accelerated_gradient")
|| arg._name.equals("classification_stop")
|| arg._name.equals("regression_stop")
|| arg._name.equals("quiet_mode")
|| arg._name.equals("max_confusion_matrix_size")
|| arg._name.equals("max_hit_ratio_k")
) {
if (!expert_mode) arg.disable("Only in expert mode.", inputArgs);
}
if (!adaptive_rate) {
if (arg._name.equals("rho") || arg._name.equals("epsilon")) {
arg.disable("Only for adaptive learning rate.", inputArgs);
rho = 0;
epsilon = 0;
}
} else {
if (arg._name.equals("rate") || arg._name.equals("rate_annealing") || arg._name.equals("rate_decay") || arg._name.equals("nesterov_accelerated_gradient")
|| arg._name.equals("momentum_start") || arg._name.equals("momentum_ramp") || arg._name.equals("momentum_stable") ) {
arg.disable("Only for non-adaptive learning rate.", inputArgs);
momentum_start = 0;
momentum_stable = 0;
}
}
}
public Frame score( Frame fr ) { return ((DeepLearningModel)UKV.get(dest())).score(fr); }
/** Print model parameters as JSON */
@Override public boolean toHTML(StringBuilder sb) {
return makeJsonBox(sb);
}
/**
* Return a query link to this page
* @param k Model Key
* @param content Link text
* @return HTML Link
*/
public static String link(Key k, String content) {
return link(k, content, null, null, null);
}
/**
* Return a query link to this page
* @param k Model Key
* @param content Link text
* @param cp Key to checkpoint to continue training with (optional)
* @param response Response
* @param val Validation data set key
* @return HTML Link
*/
public static String link(Key k, String content, Key cp, String response, Key val) {
DeepLearning req = new DeepLearning();
RString rs = new RString("<a href='" + req.href() + ".query?source=%$key" +
(cp == null ? "" : "&checkpoint=%$cp") +
(response == null ? "" : "&response=%$resp") +
(val == null ? "" : "&validation=%$valkey") +
"'>%content</a>");
rs.replace("key", k.toString());
rs.replace("content", content);
if (cp != null) rs.replace("cp", cp.toString());
if (response != null) rs.replace("resp", response);
if (val != null) rs.replace("valkey", val);
return rs.toString();
}
@Override public float progress(){
if(UKV.get(dest()) == null)return 0;
DeepLearningModel m = UKV.get(dest());
if (m != null && m.model_info()!=null )
return (float)Math.min(1, (m.epoch_counter / m.model_info().get_params().epochs));
return 0;
}
@Override public JobState execImpl() {
DeepLearningModel cp;
if (checkpoint == null) cp = initModel();
else {
final DeepLearningModel previous = UKV.get(checkpoint);
if (previous == null) throw new IllegalArgumentException("Checkpoint not found.");
cp = new DeepLearningModel(previous, destination_key, job_key);
try {
cp.write_lock(self());
assert(state==JobState.RUNNING);
if (source == null || !Arrays.equals(source._key._kb, previous.model_info().get_params().source._key._kb)) {
throw new IllegalArgumentException("source must be the same as for the checkpointed model.");
}
if (response == null || !Arrays.equals(response._key._kb, previous.model_info().get_params().response._key._kb)) {
throw new IllegalArgumentException("response must be the same as for the checkpointed model.");
}
if (Utils.difference(ignored_cols, previous.model_info().get_params().ignored_cols).length != 0) {
throw new IllegalArgumentException("ignored_cols must be the same as for the checkpointed model.");
}
if ((validation!=null) != (previous.model_info().get_params().validation != null)
|| (validation != null && !Arrays.equals(validation._key._kb, previous.model_info().get_params().validation._key._kb))) {
throw new IllegalArgumentException("validation must be the same as for the checkpointed model.");
}
if (classification != previous.model_info().get_params().classification) {
throw new IllegalArgumentException("classification must be the same as for the checkpointed model.");
}
// the following parameters might have been modified when restarting from a checkpoint
cp.model_info().get_params().expert_mode = expert_mode;
cp.model_info().get_params().seed = seed;
cp.model_info().get_params().epochs = previous.epoch_counter + epochs; //add previously processed epochs to total epochs
cp.model_info().get_params().score_interval = score_interval;
cp.model_info().get_params().score_duty_cycle = score_duty_cycle;
cp.model_info().get_params().quiet_mode = quiet_mode;
cp.model_info().get_params().diagnostics = diagnostics;
cp.update(self());
} finally {
cp.unlock(self());
}
}
trainModel(cp);
delete();
return JobState.DONE;
}
@Override protected Response redirect() {
return DeepLearningProgressPage.redirect(this, self(), dest());
}
private boolean _fakejob;
private void checkParams() {
if (source.numCols() <= 1)
throw new IllegalArgumentException("Training data must have at least 2 features (incl. response).");
for (int i=0;i<hidden.length;++i) {
if (hidden[i]==0)
throw new IllegalArgumentException("Hidden layer size must be >0.");
}
if(!classification && loss != Loss.MeanSquare) {
Log.warn("Setting loss to MeanSquare for regression.");
loss = Loss.MeanSquare;
}
// make default job_key and destination_key in case they are missing
if (dest() == null) {
destination_key = Key.make();
}
if (self() == null) {
job_key = Key.make();
}
if (UKV.get(self()) == null) {
start_time = System.currentTimeMillis();
state = JobState.RUNNING;
UKV.put(self(), this);
_fakejob = true;
}
}
/**
* Create an initial Deep Learning model, typically to be trained by trainModel(model)
* @return Randomly initialized model
*/
public final DeepLearningModel initModel() {
try {
lock_data();
checkParams();
final Frame train = FrameTask.DataInfo.prepareFrame(source, response, ignored_cols, classification, ignore_const_cols);
final DataInfo dinfo = new FrameTask.DataInfo(train, 1, true, !classification);
float[] priorDist = classification ? new MRUtils.ClassDist(dinfo._adaptedFrame.lastVec()).doAll(dinfo._adaptedFrame.lastVec()).rel_dist() : null;
final DeepLearningModel model = new DeepLearningModel(dest(), self(), source._key, dinfo, this, priorDist);
model.model_info().initializeMembers();
return model;
}
finally {
unlock_data();
}
}
/**
* Incrementally train an existing model
* @param model Initial model
* @param epochs How many epochs to train for
* @return Updated model
*/
public final DeepLearningModel trainModel(DeepLearningModel model, double epochs) {
model.model_info().get_params().epochs += epochs;
return trainModel(model);
}
/**
* Helper to update a Frame and adding it to the local trash at the same time
* @param target Frame referece, to be overwritten
* @param src Newly made frame, to be deleted via local trash
* @return src
*/
Frame updateFrame(Frame target, Frame src) {
if (src != target) ltrash(src);
return src;
}
/**
* Train a Deep Learning neural net model
* @param model Input model (e.g., from initModel(), or from a previous training run)
* @return Trained model
*/
public final DeepLearningModel trainModel(DeepLearningModel model) {
Frame valid, validScoreFrame = null;
Frame train, trainScoreFrame;
try {
lock_data();
logStart();
if (model == null) {
model = UKV.get(dest());
}
model.write_lock(self());
prepareValidationWithModel(model);
final long model_size = model.model_info().size();
Log.info("Number of model parameters (weights/biases): " + String.format("%,d", model_size));
// Log.info("Memory usage of the model: " + String.format("%.2f", (double)model_size*Float.SIZE / (1<<23)) + " MB.");
train = model.model_info().data_info()._adaptedFrame;
train = updateFrame(train, reBalance(model.model_info().data_info()._adaptedFrame, seed));
float[] trainSamplingFactors;
if (classification && balance_classes) {
trainSamplingFactors = new float[train.lastVec().domain().length]; //leave initialized to 0 -> will be filled up below
train = updateFrame(train, sampleFrameStratified(
train, train.lastVec(), trainSamplingFactors, (long)(max_after_balance_size*train.numRows()), seed, true, false));
model.setModelClassDistribution(new MRUtils.ClassDist(train.lastVec()).doAll(train.lastVec()).rel_dist());
}
model.training_rows = train.numRows();
trainScoreFrame = sampleFrame(train, score_training_samples, seed); //training scoring dataset is always sampled uniformly from the training dataset
if (train != trainScoreFrame) ltrash(trainScoreFrame);
Log.info("Number of chunks of the training data: " + train.anyVec().nChunks());
if (validation != null) {
Frame adaptedValid = getValidation();
if (getValidAdaptor().needsAdaptation2CM())
adaptedValid.add("adaptedValidationResponse", getValidAdaptor().getAdaptedValidationResponse2CM());
valid = reBalance(adaptedValid, seed+1); //rebalance for load balancing, shuffle for "fairness"
// validation scoring dataset can be sampled in multiple ways from the given validation dataset
if (classification && balance_classes && score_validation_sampling == ClassSamplingMethod.Stratified) {
validScoreFrame = sampleFrameStratified(valid, valid.lastVec(), null,
score_validation_samples > 0 ? score_validation_samples : valid.numRows(), seed+1, false /* no oversampling */, false);
} else {
validScoreFrame = sampleFrame(valid, score_validation_samples, seed+1);
}
if (valid != validScoreFrame) ltrash(validScoreFrame);
Log.info("Number of chunks of the validation data: " + valid.anyVec().nChunks());
}
if (mini_batch > train.numRows()) {
Log.warn("Setting mini_batch (" + mini_batch
+ ") to the number of rows of the training data (" + (mini_batch=train.numRows()) + ").");
}
// determines the number of rows processed during DeepLearningTask, affects synchronization (happens at the end of each DeepLearningTask)
final float sync_fraction = mini_batch == 0l ? 1.0f : (float)mini_batch / train.numRows();
if (!quiet_mode) Log.info("Initial model:\n" + model.model_info());
Log.info("Starting to train the Deep Learning model.");
long timeStart = System.currentTimeMillis();
//main loop
do model.set_model_info(new DeepLearningTask(model.model_info(), sync_fraction).doAll(train).model_info());
while (model.doScoring(train, trainScoreFrame, validScoreFrame, timeStart, self()));
Log.info("Finished training the Deep Learning model.");
return model;
}
catch(JobCancelledException ex) {
Log.info("Deep Learning model building was cancelled.");
model = UKV.get(dest());
return model;
}
catch(Exception ex) {
ex.printStackTrace();
throw new RuntimeException(ex);
}
finally {
if (model != null) model.unlock(self());
unlock_data();
emptyLTrash();
}
}
/**
* Lock the input datasets against deletes
*/
private void lock_data() {
source.read_lock(self());
if( validation != null && source._key != null && validation._key !=null && !source._key.equals(validation._key) )
validation.read_lock(self());
}
/**
* Release the lock for the input datasets
*/
private void unlock_data() {
source.unlock(self());
if( validation != null && !source._key.equals(validation._key) )
validation.unlock(self());
}
/**
* Delete job related keys
*/
public void delete() {
cleanup();
if (_fakejob) UKV.remove(job_key);
remove();
}
/**
* Rebalance a frame for load balancing
* @param fr Input frame
* @param seed RNG seed
* @return Frame that can be load-balanced (and shuffled), depending on whether force_load_balance and shuffle_training_data are set
*/
private Frame reBalance(final Frame fr, long seed) {
Frame f = force_load_balance || shuffle_training_data ? MRUtils.shuffleAndBalance(fr, seed, shuffle_training_data) : fr;
if (f != fr) ltrash(f);
return f;
}
}
|
package net.glowstone.generator.populators.overworld;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import net.glowstone.generator.decorators.overworld.DoublePlantDecorator.DoublePlantDecoration;
import net.glowstone.generator.decorators.overworld.TreeDecorator.TreeDecoration;
import net.glowstone.generator.objects.trees.AcaciaTree;
import net.glowstone.generator.objects.trees.GenericTree;
import org.bukkit.block.Biome;
import org.bukkit.material.types.DoublePlantSpecies;
public class SavannaPopulator extends BiomePopulator {
private static final Biome[] BIOMES = {Biome.SAVANNA, Biome.SAVANNA_ROCK};
private static final DoublePlantDecoration[] DOUBLE_PLANTS = {
new DoublePlantDecoration(DoublePlantSpecies.DOUBLE_TALLGRASS, 1)};
private static final TreeDecoration[] TREES = {new TreeDecoration(AcaciaTree.class, 4),
new TreeDecoration(GenericTree.class, 1)};
/**
* Creates a populator specialized for the Savanna and Savanna Plateau biomes.
*/
public SavannaPopulator() {
doublePlantDecorator.setAmount(7);
doublePlantDecorator.setDoublePlants(DOUBLE_PLANTS);
treeDecorator.setAmount(1);
treeDecorator.setTrees(TREES);
flowerDecorator.setAmount(4);
tallGrassDecorator.setAmount(20);
}
@Override
public Collection<Biome> getBiomes() {
return Collections.unmodifiableList(Arrays.asList(BIOMES));
}
}
|
package hudson.ivy;
import hudson.ivy.AbstractIvyBuild.ParameterizedUpstreamCause;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Action;
import hudson.model.Cause;
import hudson.model.ItemGroup;
import hudson.model.ParametersAction;
import hudson.triggers.Trigger;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Common part between {@link IvyModule} and {@link IvyModuleSet}.
*
* @author Timothy Bingaman
*/
public abstract class AbstractIvyProject<P extends AbstractProject<P,R>,R extends AbstractBuild<P,R>> extends AbstractProject<P,R> {
protected AbstractIvyProject(ItemGroup parent, String name) {
super(parent, name);
}
@Override
protected List<Action> createTransientActions() {
List<Action> r = super.createTransientActions();
// if we just pick up the project actions from the last build,
// and if the last build failed very early, then the reports that
// kick in later (like test results) won't be displayed.
// so pick up last successful build, too.
Set<Class> added = new HashSet<Class>();
addTransientActionsFromBuild(getLastBuild(), r, added);
addTransientActionsFromBuild(getLastSuccessfulBuild(), r, added);
for (Trigger<?> trigger : getTriggers().values())
r.addAll(trigger.getProjectActions());
return r;
}
/**
* @param collection
* Add the transient actions to this collection.
*/
protected abstract void addTransientActionsFromBuild(R lastBuild, List<Action> collection, Set<Class> added);
public abstract boolean isUseUpstreamParameters();
@Override
public boolean scheduleBuild(Cause c) {
if(c instanceof ParameterizedUpstreamCause) {
ParameterizedUpstreamCause upc = (ParameterizedUpstreamCause) c;
if(isUseUpstreamParameters()) {
List<ParametersAction> upStreamParams = upc.getUpStreamParameters();
return scheduleBuild(getQuietPeriod(),c,upStreamParams.toArray(new ParametersAction[0]));
}
}
return super.scheduleBuild(c);
}
}
|
package net.greghaines.arithmeticparser.regexor;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.greghaines.arithmeticparser.ArithmeticParser;
public class RegexArithmeticParser implements ArithmeticParser {
private static final Pattern parenPattern = Pattern.compile("\\(([^\\)\\(]+)\\)");
private static final Pattern multiDividePattern = Pattern.compile("([-]?[0-9]+(?:\\.[0-9]+)?)\\s*([\\*\\/])\\s*([-]?[0-9]+(?:\\.[0-9]+)?)");
private static final Pattern addSubPattern = Pattern.compile("([-]?[0-9]+(?:\\.[0-9]+)?)\\s*([\\+\\-])\\s*([-]?[0-9]+(?:\\.[0-9]+)?)");
private static final Pattern[] opPatterns = { multiDividePattern, addSubPattern };
/**
* {@inheritDoc}
*/
@Override
public double evaluate(final String expression) {
// System.out.println("evaluate: " + expression);
String text = expression;
final Matcher p = parenPattern.matcher(text);
while (p.reset(text).find()) {
text = text.substring(0, p.start()) + evaluate(p.group(1)) + text.substring(p.end());
}
for (final Pattern opPattern : opPatterns) {
p.usePattern(opPattern);
while (p.reset(text).find()) {
// System.out.println("PRE text: " + text);
final String arg1 = p.group(1);
final String operator = p.group(2);
final String arg2 = p.group(3);
// System.out.println("arg1: " + arg1);
// System.out.println("operator: " + operator);
// System.out.println("arg2: " + arg2);
final double result;
switch (operator) {
case "*": {
result = evaluate(arg1) * evaluate(arg2);
break;
}
case "/": {
result = evaluate(arg1) / evaluate(arg2);
break;
}
case "+": {
result = evaluate(arg1) + evaluate(arg2);
break;
}
case "-": {
result = evaluate(arg1) - evaluate(arg2);
break;
}
default: {
throw new IllegalArgumentException("Unknown operator: " + operator);
}
}
// System.out.println("result: " + result);
text = text.substring(0, p.start()) + result + text.substring(p.end());
// System.out.println("POST text: " + text);
}
}
return Double.parseDouble(text);
}
}
|
package i5.las2peer.security;
import i5.las2peer.communication.Message;
import i5.las2peer.communication.MessageException;
import i5.las2peer.execution.L2pServiceException;
import i5.las2peer.execution.NoSuchServiceException;
import i5.las2peer.execution.ServiceInvocationException;
import i5.las2peer.execution.UnlockNeededException;
import i5.las2peer.logging.NodeObserver.Event;
import i5.las2peer.p2p.AgentNotKnownException;
import i5.las2peer.p2p.Node;
import i5.las2peer.p2p.TimeoutException;
import i5.las2peer.persistency.EncodingFailedException;
import i5.las2peer.tools.SerializationException;
import java.io.Serializable;
import java.util.LinkedList;
import java.util.Vector;
/**
* A Mediator acts on behalf of an {@link PassphraseAgent}. This necessary e.g. for remote
* users logged in via a {@link i5.las2peer.api.Connector} to collect incoming messages from the
* P2P network and transfer it to the connector.
* <br>
* Two ways for message handling are provided: Register a {@link MessageHandler} that will be called for each
* received message. Multiple MessageHandlers are possible (for example for different message contents).
* The second way to handle messages is to get pending messages from the Mediator directly via the provided methods.
* Handling then has to be done via the calling entity (for example a service).
*
*
*
*/
public class Mediator implements MessageReceiver {
private LinkedList<Message> pending = new LinkedList<Message>();
private Agent myAgent;
private Node runningAt = null;
private Vector<MessageHandler> registeredHandlers = new Vector<MessageHandler> ();
/**
* Creates a new mediator.
*
* @param a
* @throws L2pSecurityException
*/
public Mediator ( Agent a ) throws L2pSecurityException {
if ( a.isLocked())
throw new L2pSecurityException ("You need to unlock the private key of the agent for mediating.");
myAgent = a;
}
/**
* Gets (and removes) the next pending message.
*
* @return the next collected message
*/
public Message getNextMessage() {
if (pending.size() == 0) return null;
return pending.pollFirst();
}
/**
* Does this mediator have pending messages?
*
* @return true, if messages have arrived
*/
public boolean hasMessages() {
return pending.size() > 0;
}
@Override
public void receiveMessage(Message message, Context c)
throws MessageException {
if ( message.getRecipientId() != myAgent.getId())
throw new MessageException ("I'm not responsible for the receiver (something went very wrong)!");
try {
message.open(myAgent, c);
//START
//This part enables message answering for all messages that were sent to an (UserAgent) mediator.
//Disable this section to reduce network traffic
if(getMyNode() != null){ //This line is needed to allow the tests to work (since they do not have a node..)
try {
Message response = new Message(message, "thank you");
response.setSendingNodeId(getMyNode().getNodeId());
getMyNode().sendMessage(response, null);
} catch (EncodingFailedException e) {
throw new MessageException ("Unable to send response ", e);
} catch (SerializationException e) {
throw new MessageException ("Unable to send response ", e);
}
}
//END
} catch (L2pSecurityException e) {
throw new MessageException ("Unable to open message because of security problems! ", e);
} catch (AgentNotKnownException e) {
throw new MessageException ("Sender unkown (since this is the receiver). Has the sending node gone offline? ", e);
}
if ( ! workOnMessage ( message, c ))
pending.add( message );
}
/**
* Method for message reception treatment.
* Will call all registered {@link MessageHandler}s for message handling.
*
* A return value of true indicates, that the received message has been treated by a MessageHandler and
* does not need further storage for later use (and will not be added to pending messages).
*
* @param message
* @param context
*
* @return true, if a message had been treated successfully
*/
public boolean workOnMessage( Message message, Context context ) {
for ( int i=0; i<registeredHandlers.size(); i++ ){
try {
if ( registeredHandlers.get(i).handleMessage(message, context))
return true;
} catch (Exception e) {
runningAt.observerNotice(Event.MESSAGE_FAILED, runningAt.getNodeId(), this, "Exception in MessageHandler " + registeredHandlers.get(i) + ": " + e );
}
}
return false;
}
/**
* Grants access to the node this Mediator is registered to.
*
* @return the node this Mediator is running at
*/
protected Node getMyNode () {
return runningAt;
}
@Override
public long getResponsibleForAgentId() {
return myAgent.getId();
}
@Override
public void notifyRegistrationTo(Node node) {
runningAt = node;
}
@Override
public void notifyUnregister() {
runningAt = null;
}
/**
* Invokes a service method (in the network) for the mediated agent.
*
* @param service
* @param method
* @param parameters
* @param preferLocal if a local running service should be preferred
*
* @return result of the method invocation
*
* @throws L2pSecurityException
* @throws InterruptedException
* @throws TimeoutException
* @throws ServiceInvocationException
* @throws UnlockNeededException
*/
public Serializable invoke ( String service, String method, Serializable[] parameters, boolean preferLocal ) throws L2pSecurityException, InterruptedException, TimeoutException, ServiceInvocationException, UnlockNeededException {
boolean isBusy=runningAt.isBusy();
if ( preferLocal && !isBusy && runningAt.hasService ( service ) )
{
try {
return runningAt.invokeLocally(myAgent.getId(), service, method, parameters);
} catch ( NoSuchServiceException e ) {
// just try globally
System.out.println ( "Local access to service " + service + " failed - trying globally");
return runningAt.invokeGlobally(myAgent, service, method, parameters);
} catch (AgentNotKnownException e) {
throw new L2pSecurityException(e.getMessage());
} catch (L2pServiceException e) {
throw new ServiceInvocationException(e.getMessage());
}
}
return null;
}
/**
* Gets the number of waiting messages.
*
* @return number of waiting messages
*/
public int getNumberOfWaiting() {
return pending.size();
}
/**
* Registers a MessageHandler for message processing.
*
* Message handlers will be used for handling incoming messages in the order of
* registration.
*
* @param handler
*/
public void registerMessageHandler( MessageHandler handler ) {
if ( handler == null)
throw new NullPointerException ();
if ( registeredHandlers.contains(handler))
return;
registeredHandlers.add( handler );
}
/**
* Unregisters a handler from this mediator.
*
* @param handler
*/
public void unregisterMessageHandler ( MessageHandler handler ) {
registeredHandlers.remove(handler);
}
/**
* Unregisters all handlers of the given class.
*
* @param cls
*
* @return number of successfully removed message handlers
*/
public int unregisterMessageHandlerClass ( @SuppressWarnings("rawtypes") Class cls) {
int result = 0;
Vector<MessageHandler> newHandlers = new Vector<MessageHandler> ();
for ( int i=0; i<registeredHandlers.size(); i++)
if ( ! cls.isInstance( registeredHandlers.get(i) ) ) {
newHandlers.add( registeredHandlers.get(i));
} else
result ++;
registeredHandlers = newHandlers;
return result;
}
/**
* Unregisters all handlers of the given class.
*
* @param classname
*
* @return number of successfully removed message handlers
*/
public int unregisterMessageHandlerClass ( String classname ) {
try {
return unregisterMessageHandlerClass ( Class.forName( classname ));
} catch (Exception e) {
// if the class cannot be found, there won't be any instances of it registered here...
return 0;
}
}
/**
* Is the given message handler registered at this mediator?
*
* @param handler
*
* @return true, if at least one message handler is registered to this mediator
*/
public boolean hasMessageHandler ( MessageHandler handler ) {
return registeredHandlers.contains( handler );
}
/**
* Has this mediator a registered message handler of the given class?
*
* @param cls
* @return true, if this mediator has a message handler of the given class
*/
public boolean hasMessageHandlerClass ( @SuppressWarnings("rawtypes") Class cls) {
for ( MessageHandler handler : registeredHandlers)
if ( cls.isInstance( handler ))
return true;
return false;
}
}
|
package net.tropicraft.core.common.dimension.noise;
import com.mojang.datafixers.util.Pair;
import com.mojang.serialization.*;
import net.minecraft.core.Holder;
import net.minecraft.core.Registry;
import net.minecraft.data.BuiltinRegistries;
import net.minecraft.resources.ResourceKey;
import net.minecraft.resources.ResourceLocation;
import net.minecraft.world.level.levelgen.*;
import net.minecraft.world.level.levelgen.synth.NormalNoise;
public final class TropicraftNoiseGen {
private static final float ORE_THICKNESS = 0.08F;
private static final double VEININESS_FREQUENCY = 1.5D;
private static final double NOODLE_SPACING_AND_STRAIGHTNESS = 1.5D;
private static final double SURFACE_DENSITY_THRESHOLD = 1.5625D;
private static final DensityFunction BLENDING_FACTOR = DensityFunctions.constant(10.0D);
private static final DensityFunction BLENDING_JAGGEDNESS = DensityFunctions.zero();
private static final ResourceKey<DensityFunction> ZERO = createKey("zero");
private static final ResourceKey<DensityFunction> Y = createKey("y");
private static final ResourceKey<DensityFunction> SHIFT_X = createKey("shift_x");
private static final ResourceKey<DensityFunction> SHIFT_Z = createKey("shift_z");
private static final ResourceKey<DensityFunction> BASE_3D_NOISE = createKey("overworld/base_3d_noise");
private static final ResourceKey<DensityFunction> CONTINENTS = createKey("overworld/continents");
private static final ResourceKey<DensityFunction> EROSION = createKey("overworld/erosion");
private static final ResourceKey<DensityFunction> RIDGES = createKey("overworld/ridges");
private static final ResourceKey<DensityFunction> FACTOR = createKey("overworld/factor");
private static final ResourceKey<DensityFunction> DEPTH = createKey("overworld/depth");
private static final ResourceKey<DensityFunction> SLOPED_CHEESE = createKey("overworld/sloped_cheese");
private static final ResourceKey<DensityFunction> CONTINENTS_LARGE = createKey("overworld_large_biomes/continents");
private static final ResourceKey<DensityFunction> EROSION_LARGE = createKey("overworld_large_biomes/erosion");
private static final ResourceKey<DensityFunction> FACTOR_LARGE = createKey("overworld_large_biomes/factor");
private static final ResourceKey<DensityFunction> DEPTH_LARGE = createKey("overworld_large_biomes/depth");
private static final ResourceKey<DensityFunction> SLOPED_CHEESE_LARGE = createKey("overworld_large_biomes/sloped_cheese");
private static final ResourceKey<DensityFunction> SLOPED_CHEESE_END = createKey("end/sloped_cheese");
private static final ResourceKey<DensityFunction> SPAGHETTI_ROUGHNESS_FUNCTION = createKey("overworld/caves/spaghetti_roughness_function");
private static final ResourceKey<DensityFunction> ENTRANCES = createKey("overworld/caves/entrances");
private static final ResourceKey<DensityFunction> NOODLE = createKey("overworld/caves/noodle");
private static final ResourceKey<DensityFunction> PILLARS = createKey("overworld/caves/pillars");
private static final ResourceKey<DensityFunction> SPAGHETTI_2D_THICKNESS_MODULATOR = createKey("overworld/caves/spaghetti_2d_thickness_modulator");
private static final ResourceKey<DensityFunction> SPAGHETTI_2D = createKey("overworld/caves/spaghetti_2d");
public static NoiseRouterWithOnlyNoises overworldWithNewCaves(NoiseSettings p_212283_, boolean p_212284_) {
DensityFunction densityfunction = DensityFunctions.noise(getNoise(Noises.AQUIFER_BARRIER), 0.5D);
DensityFunction densityfunction1 = DensityFunctions.noise(getNoise(Noises.AQUIFER_FLUID_LEVEL_FLOODEDNESS), 0.67D);
DensityFunction densityfunction2 = DensityFunctions.noise(getNoise(Noises.AQUIFER_FLUID_LEVEL_SPREAD), 0.7142857142857143D);
DensityFunction densityfunction3 = DensityFunctions.noise(getNoise(Noises.AQUIFER_LAVA));
DensityFunction densityfunction4 = getFunction(SHIFT_X);
DensityFunction densityfunction5 = getFunction(SHIFT_Z);
DensityFunction densityfunction6 = DensityFunctions.shiftedNoise2d(densityfunction4, densityfunction5, 0.25D, getNoise(p_212284_ ? Noises.TEMPERATURE_LARGE : Noises.TEMPERATURE));
DensityFunction densityfunction7 = DensityFunctions.shiftedNoise2d(densityfunction4, densityfunction5, 0.25D, getNoise(p_212284_ ? Noises.VEGETATION_LARGE : Noises.VEGETATION));
DensityFunction densityfunction8 = getFunction(p_212284_ ? FACTOR_LARGE : FACTOR);
DensityFunction densityfunction9 = getFunction(p_212284_ ? DEPTH_LARGE : DEPTH);
DensityFunction densityfunction10 = noiseGradientDensity(DensityFunctions.cache2d(densityfunction8), densityfunction9);
DensityFunction densityfunction11 = getFunction(p_212284_ ? SLOPED_CHEESE_LARGE : SLOPED_CHEESE);
DensityFunction densityfunction12 = DensityFunctions.min(densityfunction11, DensityFunctions.mul(DensityFunctions.constant(5.0D), getFunction(ENTRANCES)));
DensityFunction densityfunction13 = DensityFunctions.rangeChoice(densityfunction11, -1000000.0D, 1.5625D, densityfunction12, underground(densityfunction11));
DensityFunction densityfunction14 = DensityFunctions.min(postProcess(p_212283_, densityfunction13), getFunction(NOODLE));
DensityFunction densityfunction15 = getFunction(Y);
int i = p_212283_.minY();
int j = -60;
int k = 50;
DensityFunction densityfunction16 = yLimitedInterpolatable(densityfunction15, DensityFunctions.noise(getNoise(Noises.ORE_VEININESS), 1.5D, 1.5D), j, k, 0);
float f = 4.0F;
DensityFunction densityfunction17 = yLimitedInterpolatable(densityfunction15, DensityFunctions.noise(getNoise(Noises.ORE_VEIN_A), 4.0D, 4.0D), j, k, 0).abs();
DensityFunction densityfunction18 = yLimitedInterpolatable(densityfunction15, DensityFunctions.noise(getNoise(Noises.ORE_VEIN_B), 4.0D, 4.0D), j, k, 0).abs();
DensityFunction densityfunction19 = DensityFunctions.add(DensityFunctions.constant((double)-0.08F), DensityFunctions.max(densityfunction17, densityfunction18));
DensityFunction densityfunction20 = DensityFunctions.noise(getNoise(Noises.ORE_GAP));
return new NoiseRouterWithOnlyNoises(densityfunction, densityfunction1, densityfunction2, densityfunction3, densityfunction6, densityfunction7, getFunction(p_212284_ ? CONTINENTS_LARGE : CONTINENTS), getFunction(p_212284_ ? EROSION_LARGE : EROSION), getFunction(p_212284_ ? DEPTH_LARGE : DEPTH), getFunction(RIDGES), densityfunction10, densityfunction14, densityfunction16, densityfunction19, densityfunction20);
}
private static DensityFunction postProcess(NoiseSettings p_212275_, DensityFunction p_212276_) {
DensityFunction densityfunction = DensityFunctions.slide(p_212275_, p_212276_);
DensityFunction densityfunction1 = DensityFunctions.blendDensity(densityfunction);
return DensityFunctions.mul(DensityFunctions.interpolated(densityfunction1), DensityFunctions.constant(0.64D)).squeeze();
}
private static Holder<NormalNoise.NoiseParameters> getNoise(ResourceKey<NormalNoise.NoiseParameters> p_209543_) {
return BuiltinRegistries.NOISE.getHolderOrThrow(p_209543_);
}
private static ResourceKey<DensityFunction> createKey(String p_209537_) {
return ResourceKey.create(Registry.DENSITY_FUNCTION_REGISTRY, new ResourceLocation(p_209537_));
}
private static DensityFunction getFunction(ResourceKey<DensityFunction> p_209553_) {
return BuiltinRegistries.DENSITY_FUNCTION.getHolderOrThrow(p_209553_).value();
}
private static DensityFunction yLimitedInterpolatable(DensityFunction p_209472_, DensityFunction p_209473_, int p_209474_, int p_209475_, int p_209476_) {
return DensityFunctions.interpolated(DensityFunctions.rangeChoice(p_209472_, (double)p_209474_, (double)(p_209475_ + 1), p_209473_, DensityFunctions.constant((double)p_209476_)));
}
private static DensityFunction noiseGradientDensity(DensityFunction p_212272_, DensityFunction p_212273_) {
DensityFunction densityfunction = DensityFunctions.mul(p_212273_, p_212272_);
return DensityFunctions.mul(DensityFunctions.constant(4.0D), densityfunction.quarterNegative());
}
private static DensityFunction underground(DensityFunction p_209470_) {
DensityFunction densityfunction = getFunction(SPAGHETTI_2D);
DensityFunction densityfunction1 = getFunction(SPAGHETTI_ROUGHNESS_FUNCTION);
DensityFunction densityfunction2 = DensityFunctions.noise(getNoise(Noises.CAVE_LAYER), 8.0D);
DensityFunction densityfunction3 = DensityFunctions.mul(DensityFunctions.constant(4.0D), densityfunction2.square());
DensityFunction densityfunction4 = DensityFunctions.noise(getNoise(Noises.CAVE_CHEESE), 0.6666666666666666D);
DensityFunction densityfunction5 = DensityFunctions.add(DensityFunctions.add(DensityFunctions.constant(0.27D), densityfunction4).clamp(-1.0D, 1.0D), DensityFunctions.add(DensityFunctions.constant(1.5D), DensityFunctions.mul(DensityFunctions.constant(-0.64D), p_209470_)).clamp(0.0D, 0.5D));
DensityFunction densityfunction6 = DensityFunctions.add(densityfunction3, densityfunction5);
DensityFunction densityfunction7 = DensityFunctions.min(DensityFunctions.min(densityfunction6, getFunction(ENTRANCES)), DensityFunctions.add(densityfunction, densityfunction1));
DensityFunction densityfunction8 = getFunction(PILLARS);
DensityFunction densityfunction9 = DensityFunctions.rangeChoice(densityfunction8, -1000000.0D, 0.03D, DensityFunctions.constant(-1000000.0D), densityfunction8);
return DensityFunctions.max(densityfunction7, densityfunction9);
}
}
|
package com.mkl.tools.eu.vo.map;
import com.mkl.tools.eu.util.ToolsUtil;
import com.mkl.tools.eu.vo.province.*;
import com.thoughtworks.xstream.XStream;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Utility class that gather all injection data for client.
*
* @author MKL.
*/
public class ClientGenerator {
/**
* Create the geo.json file used by the application.
*
* @param provinces data gathered by the input.
* @param specialBoxes special boxes on the maps.
* @param log log writer.
* @throws Exception exception.
*/
public static void createMapData(Map<String, Province> provinces, Map<String, Province> specialBoxes, Writer log) throws Exception {
Writer writer = ToolsUtil.createFileWriter("src/main/resources/output/countries.geo.json", false);
writer.append("{\"type\":\"FeatureCollection\",\"features\":[\n");
boolean first = true;
for (String prov : provinces.keySet()) {
Province province = provinces.get(prov);
province.restructure();
if (!first) {
writer.append(",\n");
} else {
first = false;
}
writer.append(" {\"type\":\"Feature\",\"properties\":{\"terrain\":\"").append(province.getTerrain())
.append("\"");
if (province.getInfo() != null) {
writer.append(",\"rotw\":\"false\"")
.append(",\"income\":\"").append(Integer.toString(province.getInfo().getIncome())).append("\"")
.append(",\"fortress\":\"").append(Integer.toString(province.getInfo().getFortress())).append("\"")
.append(",\"capital\":\"").append(Boolean.toString(province.getInfo().isCapital())).append("\"")
.append(",\"port\":\"").append(Boolean.toString(province.getInfo().isPort())).append("\"")
.append(",\"arsenal\":\"").append(Boolean.toString(province.getInfo().isArsenal())).append("\"")
.append(",\"praesidiable\":\"").append(Boolean.toString(province.getInfo().isPraesidiable())).append("\"")
.append(",\"metadata\":\"").append(String.join(";;", province.getInfo().getMetadata(province.getName()))).append("\"");
} else if (!province.getPortions().get(0).isRotw()) {
writer.append(",\"rotw\":\"false\"");
} else {
writer.append(",\"rotw\":\"true\"");
}
writer.append("},\"geometry\":{\"type\":\"");
if (province.getCoords().size() == 1) {
writer.append("Polygon");
} else if (province.getCoords().size() > 1) {
writer.append("MultiPolygon");
} else {
log.append(province.getName()).append("\t").append("No border.").append("\n");
}
writer.append("\",\"coordinates\":[");
if (province.getCoords().size() == 1) {
writePolygone(province.getCoords().get(0), writer);
} else {
boolean firstPolygon = true;
for (Pair<List<List<Pair<Integer, Integer>>>, Boolean> polygon : province.getCoords()) {
if (!firstPolygon) {
writer.append(", ");
} else {
firstPolygon = false;
}
writer.append("[");
writePolygone(polygon, writer);
writer.append("]");
}
}
writer.append("]},\"id\":\"").append(province.getName()).append("\"}");
}
for (String prov : specialBoxes.keySet()) {
Province province = specialBoxes.get(prov);
province.restructure();
writer.append(",\n");
writer.append(" {\"type\":\"Feature\",\"geometry\":{\"type\":\"Polygon\",\"coordinates\":[");
if (province.getCoords().size() == 1) {
writePolygone(province.getCoords().get(0), writer);
} else {
boolean firstPolygon = true;
for (Pair<List<List<Pair<Integer, Integer>>>, Boolean> polygon : province.getCoords()) {
if (!firstPolygon) {
writer.append(", ");
} else {
firstPolygon = false;
}
writer.append("[");
writePolygone(polygon, writer);
writer.append("]");
}
}
writer.append("]},\"id\":\"").append(province.getName()).append("\"}");
}
writer.append("\n]}");
writer.flush();
writer.close();
}
/**
* Write a polygon in a geo.json format.
*
* @param polygons List of coordinates of the polygons.
* @param writer File Writer.
* @throws Exception exception.
*/
private static void writePolygone(Pair<List<List<Pair<Integer, Integer>>>, Boolean> polygons, Writer writer) throws Exception {
boolean firstPolygon = true;
for (List<Pair<Integer, Integer>> polygone : polygons.getLeft()) {
if (!firstPolygon) {
writer.append(", ");
} else {
firstPolygon = false;
}
writer.append("[");
boolean firstCoord = true;
for (Pair<Integer, Integer> coord : polygone) {
if (!firstCoord) {
writer.append(", ");
} else {
firstCoord = false;
}
double x;
double y;
if (polygons.getRight()) {
x = 4.659 + coord.getLeft() * 12.484 / 8183;
y = 11.409 + coord.getRight() * 5.251 / 3546;
} else {
x = 4.658 + coord.getLeft() * 12.859 / 8425;
y = 2.109 + coord.getRight() * 8.845 / 5840;
}
writer.append("[").append(Double.toString(x)).append(", ").append(Double.toString(y)).append("]");
}
writer.append("]");
}
}
/**
* Create provinces neighbour file used by the application.
*
* @param provinces data gathered by the input.
* @param specialBorders rivers, moutain passes and straits.
* @param log log writer.
* @throws Exception exception.
*/
public static List<Border> createProvincesData(Map<String, Province> provinces, Map<String, List<Path>> specialBorders, Writer log) throws Exception {
Map<Path, List<Province>> provincesByPath = new HashMap<>();
for (Province province : provinces.values()) {
for (SubProvince subProvince : province.getPortions()) {
for (DirectedPath path : subProvince.getPaths()) {
if (!provincesByPath.containsKey(path.getPath())) {
provincesByPath.put(path.getPath(), new ArrayList<>());
}
List<Province> provincesForPath = provincesByPath.get(path.getPath());
if (!provincesForPath.contains(province)) {
provincesForPath.add(province);
}
}
}
}
List<Border> borders = createBorders(provincesByPath, specialBorders, log);
XStream xstream = new XStream();
xstream.processAnnotations(Border.class);
Writer borderWriter = ToolsUtil.createFileWriter("src/main/resources/output/borders.xml", false);
xstream.toXML(borders, borderWriter);
return borders;
}
/**
* Create borders object from arranged provinces by paths.
*
* @param provincesByPath provinces arranged by paths.
* @param specialBorders rivers, moutain passes and straits.
* @param log log writer.
* @return the borders.
* @throws IOException exception.
*/
private static List<Border> createBorders(Map<Path, List<Province>> provincesByPath, Map<String, List<Path>> specialBorders, Writer log) throws IOException {
List<Border> borders = new ArrayList<>();
for (Path path : provincesByPath.keySet()) {
List<Province> provincesForPath = provincesByPath.get(path);
if (path.getName().contains("bord")) {
continue;
}
for (int i = 0; i < provincesForPath.size(); i++) {
for (int j = i + 1; j < provincesForPath.size(); j++) {
Province first = provincesForPath.get(i);
Province second = provincesForPath.get(j);
if (first != second) {
String type = null;
for (String specialType : specialBorders.keySet()) {
if (specialBorders.get(specialType).contains(path)) {
type = specialType;
}
}
Border border = new Border(first, second, type);
if (borders.contains(border)) {
Border existingBorder = borders.get(borders.indexOf(border));
if (!StringUtils.equals(border.getType(), existingBorder.getType())) {
log.append(first.getName()).append("\t").append("Duplicate borders").append("\t")
.append(second.getName()).append("\n");
}
} else {
borders.add(border);
}
}
}
}
}
return borders;
}
}
|
package io.vertx.docgen;
import com.sun.source.doctree.*;
import com.sun.source.doctree.ErroneousTree;
import com.sun.source.doctree.LiteralTree;
import com.sun.source.util.DocTreeScanner;
import com.sun.source.util.DocTrees;
import com.sun.source.util.TreePath;
import com.sun.tools.javac.code.Symbol;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.ProcessingEnvironment;
import javax.annotation.processing.RoundEnvironment;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.*;
import javax.tools.Diagnostic;
import javax.tools.JavaFileObject;
import java.io.*;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author <a href="mailto:julien@julienviet.com">Julien Viet</a>
*/
public abstract class BaseProcessor extends AbstractProcessor {
protected DocTrees docTrees;
protected Helper helper;
protected List<String> sources;
protected Set<PostProcessor> postProcessors = new LinkedHashSet<>();
protected Map<String, ElementResolution> resolutions = new HashMap<>();
Map<String, String> failures = new HashMap<>();
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.RELEASE_8;
}
@Override
public Set<String> getSupportedOptions() {
return new HashSet<>(Arrays.asList("docgen.output", "docgen.extension", "docgen.source"));
}
@Override
public Set<String> getSupportedAnnotationTypes() {
return Collections.singleton("*");
}
public synchronized BaseProcessor registerPostProcessor(PostProcessor postProcessor) {
if (getPostProcessor(postProcessor.getName()) != null) {
throw new IllegalArgumentException("Post-processor with name '" + postProcessor.getName() + "' is already " +
"registered.");
}
postProcessors.add(postProcessor);
return this;
}
public synchronized PostProcessor getPostProcessor(String name) {
for (PostProcessor pp : postProcessors) {
if (pp.getName().equalsIgnoreCase(name)) {
return pp;
}
}
return null;
}
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
String sourceOpt = processingEnv.getOptions().get("docgen.source");
if (sourceOpt != null) {
sources = new ArrayList<>(Arrays.asList(sourceOpt.split("\\s*,\\s*")));
}
docTrees = DocTrees.instance(processingEnv);
helper = new Helper(processingEnv);
registerPostProcessor(new LanguageFilterPostProcessor());
}
private String render(List<? extends DocTree> trees) {
StringBuilder buffer = new StringBuilder();
DocTreeVisitor<Void, Void> visitor = new DocTreeScanner<Void, Void>() {
@Override
public Void visitText(TextTree node, Void aVoid) {
buffer.append(node.getBody());
return super.visitText(node, aVoid);
}
};
trees.forEach(tree -> tree.accept(visitor, null));
return buffer.toString();
}
private final Map<Doc, Map<DocGenerator, DocWriter>> state = new HashMap<>();
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
StackTraceElement[] trace = Thread.currentThread().getStackTrace();
for (StackTraceElement elt : trace) {
if (elt.getClassName().startsWith("org.jetbrains")) {
return true;
}
}
if (failures.isEmpty()) {
try {
if (!roundEnv.processingOver()) {
roundEnv.getElementsAnnotatedWith(Document.class).forEach(elt -> {
try {
PackageDoc doc = new PackageDoc((PackageElement) elt);
state.put(doc, handleGen(doc));
} catch (DocGenException e) {
if (e.element == null) {
e.element = elt;
}
throw e;
}
});
if (sources != null && sources.size() > 0) {
for (String source : sources) {
// Handle wildcards
List<File> files = new ArrayList<>();
File f = new File(source);
if (!f.exists()) {
if (f.getName().contains("*")) {
StringBuilder sb = new StringBuilder();
for (char c : f.getName().toCharArray()) {
if (c == '*') {
sb.append(".*");
} else {
sb.append(Matcher.quoteReplacement(Character.toString(c)));
}
}
Pattern p = Pattern.compile(sb.toString());
File parentFile = f.getParentFile();
File[] children = parentFile.listFiles();
if (children != null) {
for (File child : children) {
if (p.matcher(child.getName()).matches()) {
files.add(child);
}
}
}
} else {
throw new FileNotFoundException("Cannot process document " + source);
}
} else {
files.add(f);
}
for (File file : files) {
if (file.isFile()) {
FileDoc fileDoc = new FileDoc(file, file.getName());
Map<DocGenerator, DocWriter> m = handleGen(fileDoc);
state.put(fileDoc, m);
} else if (file.isDirectory()) {
Files.walk(file.toPath())
.map(Path::toFile)
.filter(File::isFile).forEach(docFile -> {
String relativePath = file.toPath().relativize(docFile.toPath()).toString();
FileDoc fileDoc = new FileDoc(docFile, relativePath);
Map<DocGenerator, DocWriter> m = handleGen(fileDoc);
state.put(fileDoc, m);
});
} else {
throw new IOException("Document " + file.getAbsolutePath() + " is not a file nor a dir");
}
}
}
sources.clear();
}
Set<String> processed = new HashSet<>();
while (true) {
Optional<ElementResolution> opt = resolutions
.values()
.stream()
.filter(res -> res.elt == null && !processed.contains(res.signature))
.findFirst();
if (opt.isPresent()) {
ElementResolution res = opt.get();
processed.add(res.signature);
res.tryResolve();
} else {
break;
}
}
} else {
state.forEach((doc, m) -> {
m.forEach((gen, w) -> {
String content = postProcess(gen.getName(), w.render());
write(gen, doc, content);
});
});
}
} catch(Exception e) {
Element reportedElt = (e instanceof DocGenException) ? ((DocGenException) e).element : null;
String msg = e.getMessage();
if (msg == null) {
msg = e.toString();
}
e.printStackTrace();
if (reportedElt != null) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, msg, reportedElt);
if (reportedElt instanceof PackageElement) {
failures.put(((PackageElement) reportedElt).getQualifiedName().toString(), msg);
} else {
throw new UnsupportedOperationException("not implemented");
}
} else {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, msg);
}
}
}
return false;
}
protected abstract Iterable<DocGenerator> generators();
private Map<DocGenerator, DocWriter> handleGen(Doc doc) {
Map<DocGenerator, DocWriter> map = new HashMap<>();
for (DocGenerator generator : generators()) {
generator.init(processingEnv);
DocWriter writer = new DocWriter();
doc.process(generator, writer);
map.put(generator, writer);
}
return map;
}
/**
* @return the extension obtained from processor option {@literal docgen.extension} defaults to {@literal .adoc}
* when absent.
*/
protected String getExtension() {
String extension = processingEnv.getOptions().get("docgen.extension");
if (extension != null) {
return extension;
}
return ".adoc";
}
protected String resolveLinkToPackageDoc(PackageElement elt) {
Document annotation = elt.getAnnotation(Document.class);
String fileName = annotation.fileName();
if (fileName.isEmpty()) {
return elt.toString() + getExtension();
} else {
return fileName;
}
}
/**
* Resolve the coordinate of the type element, this method returns either:
* <ul>
* <li>a {@link io.vertx.docgen.Coordinate} object, the coordinate object can have null fields</li>
* <li>{@code null} : the current element is being compiled, which likely means create a local link</li>
* </ul>
*
* @param typeElt the type element to resolve
* @return the resolved coordinate object or null if the element is locally compiled
*/
private Coordinate resolveCoordinate(TypeElement typeElt) {
try {
Symbol.ClassSymbol cs = (Symbol.ClassSymbol) typeElt;
if (cs.sourcefile != null && getURL(cs.sourcefile) != null) {
// .java source we can link locally
return null;
}
if (cs.classfile != null) {
JavaFileObject cf = cs.classfile;
URL classURL = getURL(cf);
if (classURL != null && classURL.getFile().endsWith(".class")) {
URL manifestURL = new URL(classURL.toString().substring(0, classURL.toString().length() - (typeElt.getQualifiedName().toString().length() + 6)) + "META-INF/MANIFEST.MF");
InputStream manifestIs = manifestURL.openStream();
if (manifestIs != null) {
Manifest manifest = new Manifest(manifestIs);
Attributes attributes = manifest.getMainAttributes();
String groupId = attributes.getValue(new Attributes.Name("Maven-Group-Id"));
String artifactId = attributes.getValue(new Attributes.Name("Maven-Artifact-Id"));
String version = attributes.getValue(new Attributes.Name("Maven-Version"));
return new Coordinate(groupId, artifactId, version);
}
}
}
} catch (Exception ignore) {
}
return new Coordinate(null, null, null);
}
private URL getURL(JavaFileObject fileObject) {
try {
return fileObject.toUri().toURL();
} catch (Exception e) {
return null;
}
}
/**
* Resolve a label for the specified element, this is used when a link to a program element
* does not specify an explicit label.<p/>
* <p/>
* Subclasses can override it to implement a particular behavior for elements.
*
* @param elt the elt to resolve a label for
* @return the label
*/
private String resolveLabel(DocGenerator generator, Element elt) {
String label = elt.getSimpleName().toString();
if (elt.getModifiers().contains(Modifier.STATIC) &&
(elt.getKind() == ElementKind.METHOD || elt.getKind() == ElementKind.FIELD)) {
label = elt.getEnclosingElement().getSimpleName() + "." + label;
}
if (elt.getKind() == ElementKind.ANNOTATION_TYPE) {
label = "@" +label;
}
return generator.resolveLabel(elt, label);
}
private final LinkedList<PackageElement> stack = new LinkedList<>();
abstract class Doc {
abstract String id();
abstract String resolveRelativeFileName(DocGenerator generator);
protected final void process(DocGenerator generator, DocWriter writer) {
if (this instanceof PackageDoc) {
PackageElement pkgElt = ((PackageDoc) this).elt;
for (PackageElement stackElt : stack) {
if (pkgElt.getQualifiedName().equals(stackElt.getQualifiedName())) {
throw new DocGenException(stack.peekLast(), "Circular include");
}
}
stack.addLast(pkgElt);
String pkgSource = helper.readSource(pkgElt);
TreePath pkgPath = docTrees.getPath(pkgElt);
DocCommentTree docTree = docTrees.getDocCommentTree(pkgPath);
DocTreeVisitor<Void, Void> visitor = new DocTreeScanner<Void, Void>() {
private void copyContent(DocTree node) {
int from = (int) docTrees.getSourcePositions().getStartPosition(pkgPath.getCompilationUnit(), docTree, node);
int to = (int) docTrees.getSourcePositions().getEndPosition(pkgPath.getCompilationUnit(), docTree, node);
writer.append(pkgSource, from, to);
}
@Override
public Void visitUnknownBlockTag(UnknownBlockTagTree node, Void v) {
writer.append("@").append(node.getTagName()).append(" ");
return super.visitUnknownBlockTag(node, v);
}
@Override
public Void visitDocComment(DocCommentTree node, Void v) {
v = scan(node.getFirstSentence(), v);
List<? extends DocTree> body = node.getBody();
if (body.size() > 0) {
writer.append("\n\n");
writer.resetParagraph();
v = scan(body, v);
}
List<? extends DocTree> blockTags = node.getBlockTags();
if (blockTags.size() > 0) {
writer.append("\n");
v = scan(blockTags, v);
}
return v;
}
@Override
public Void visitErroneous(ErroneousTree node, Void v) {
return visitText(node, v);
}
@Override
public Void visitText(TextTree node, Void v) {
String body = node.getBody();
helper.filterLang(body, generator.getName(), writer);
return super.visitText(node, v);
}
/**
* Handles both literal and code. We generate the asciidoc output using {@literal `}.
*/
@Override
public Void visitLiteral(LiteralTree node, Void aVoid) {
writer.append("`").append(node.getBody().getBody()).append("`");
return super.visitLiteral(node, aVoid);
}
@Override
public Void visitEntity(EntityTree node, Void aVoid) {
writer.append(EntityUtils.unescapeEntity(node.getName().toString()));
return super.visitEntity(node, aVoid);
}
@Override
public Void visitStartElement(StartElementTree node, Void v) {
copyContent(node);
return v;
}
@Override
public Void visitEndElement(EndElementTree node, Void v) {
writer.write("</");
writer.append(node.getName());
writer.append('>');
return v;
}
@Override
public Void visitLink(LinkTree node, Void v) {
String signature = node.getReference().getSignature();
String label = render(node.getLabel()).trim();
BaseProcessor.this.visitLink(pkgElt, label, signature, generator, writer);
return v;
}
};
docTree.accept(visitor, null);
stack.removeLast();
} else {
FileDoc fileDoc = (FileDoc) this;
try {
String content = new String(Files.readAllBytes(fileDoc.file.toPath()), StandardCharsets.UTF_8);
StringBuilder intermediate = new StringBuilder(content.length());
helper.filterLang(content, generator.getName(), intermediate);
content = intermediate.toString();
Matcher linkMatcher = LINK_PATTERN.matcher(content);
int prev = 0;
while (linkMatcher.find()) {
writer.write(content, prev, linkMatcher.start() - prev);
String value = linkMatcher.group(1).trim();
Matcher methodLinkMatcher = METHOD_LINK_PATTERN.matcher(value);
if (methodLinkMatcher.find()) {
String signature = value.substring(0, methodLinkMatcher.end());
String label = value.substring(methodLinkMatcher.end()).trim();
writer.exec(() -> {
BaseProcessor.this.visitLink(null, label, signature, generator, writer);
});
}
prev = linkMatcher.end();
}
writer.append(content, prev, content.length());
} catch (IOException e) {
throw new DocGenException(e.getMessage());
}
}
}
}
class PackageDoc extends Doc {
final PackageElement elt;
PackageDoc(PackageElement elt) {
this.elt = elt;
}
@Override
public String id() {
return elt.getQualifiedName().toString();
}
/**
* Return the relative file name of a document.
*
* @param generator the doc generator
* @return the relative file name
*/
public String resolveRelativeFileName(DocGenerator generator) {
Document doc = elt.getAnnotation(Document.class);
String relativeName = doc.fileName();
if (relativeName.isEmpty()) {
relativeName = elt.getQualifiedName() + getExtension();
}
return generator.resolveRelativeFileName(elt, relativeName);
}
}
class FileDoc extends Doc {
final File file;
final String relativePath;
FileDoc(File file, String relativePath) {
this.file = file;
this.relativePath = relativePath;
}
@Override
public String id() {
return relativePath;
}
@Override
public String resolveRelativeFileName(DocGenerator generator) {
return relativePath;
}
}
private static final Pattern LINK_PATTERN = Pattern.compile("\\{@link\\s([^}]+)\\}");
private static final Pattern METHOD_LINK_PATTERN = Pattern.compile(
"^([$_\\w]+\\.)*[$_\\w]+" +
"(?:" +
"
"(?:(?:\\([^)]*)\\)|$|(?= ))" +
")?");
private void visitLink(PackageElement pkgElt, String label, String signature, DocGenerator generator, DocWriter writer) {
ElementResolution res = resolutions.get(signature);
if (res == null) {
res = new ElementResolution(signature);
resolutions.put(signature, res);
}
LinkProcessing fut = new LinkProcessing(generator, label);
res.add(fut);
writer.write(() -> {
DocWriter ww = fut.writer;
if (ww == null) {
throw new DocGenException(pkgElt, "Could not resolve " + signature);
}
return ww;
});
}
/**
* The resolution of an element.
*/
class ElementResolution {
final String signature;
private Element elt;
private List<LinkProcessing> handlers = new ArrayList<>();
public ElementResolution(String signature) {
this.signature = signature;
}
boolean tryResolve() {
if (elt == null) {
doResolve();
}
return elt != null;
}
public boolean equals(Object o) {
if (o instanceof ElementResolution) {
ElementResolution that = (ElementResolution) o;
return signature.equals(that.signature);
} else {
return false;
}
}
@Override
public int hashCode() {
return signature.hashCode();
}
private void doResolve() {
elt = helper.resolveLink(signature);
if (elt != null) {
for (LinkProcessing fut : handlers) {
fut.handle(elt);
}
handlers.clear();
}
}
private void add(LinkProcessing fut) {
if (elt != null) {
fut.handle(elt);
} else {
handlers.add(fut);
}
}
}
class LinkProcessing {
final DocGenerator generator;
final String label;
private DocWriter writer;
public LinkProcessing(DocGenerator generator, String label) {
this.generator = generator;
this.label = label;
}
void handle(Element elt) {
writer = new DocWriter();
if (elt instanceof PackageElement) {
PackageElement includedElt = (PackageElement) elt;
if (includedElt.getAnnotation(Document.class) == null) {
new PackageDoc(includedElt).process(generator, writer);
} else {
String link = resolveLinkToPackageDoc((PackageElement) elt);
writer.append(link);
}
} else {
if (helper.isExample(elt)) {
String source = helper.readSource(elt);
switch (elt.getKind()) {
case CONSTRUCTOR:
case METHOD:
// Check whether or not the fragment must be translated
String fragment;
if (helper.hasToBeTranslated(elt)) {
// Invoke the custom renderer, this may should the translation to the expected language.
fragment = generator.renderSource((ExecutableElement) elt, source);
} else {
// Do not call the custom rendering process, just use the default / java one.
JavaDocGenerator javaGen = new JavaDocGenerator();
javaGen.init(processingEnv);
fragment = javaGen.renderSource((ExecutableElement) elt, source);
}
if (fragment != null) {
writer.literalMode();
writer.append(fragment);
writer.commentMode();
}
return;
case CLASS:
case INTERFACE:
case ENUM:
case ANNOTATION_TYPE:
TypeElement typeElt = (TypeElement) elt;
JavaDocGenerator javaGen = new JavaDocGenerator();
javaGen.init(processingEnv);
fragment = javaGen.renderSource(typeElt, source);
if (fragment != null) {
writer.literalMode();
writer.append(fragment);
writer.commentMode();
}
return;
default:
throw new UnsupportedOperationException("unsupported element: " + elt.getKind());
}
}
String link;
switch (elt.getKind()) {
case CLASS:
case INTERFACE:
case ANNOTATION_TYPE:
case ENUM: {
TypeElement typeElt = (TypeElement) elt;
link = generator.resolveTypeLink(typeElt, resolveCoordinate(typeElt));
break;
}
case METHOD: {
ExecutableElement methodElt = (ExecutableElement) elt;
TypeElement typeElt = (TypeElement) methodElt.getEnclosingElement();
link = generator.resolveMethodLink(methodElt, resolveCoordinate(typeElt));
break;
}
case CONSTRUCTOR: {
ExecutableElement constructorElt = (ExecutableElement) elt;
TypeElement typeElt = (TypeElement) constructorElt.getEnclosingElement();
link = generator.resolveConstructorLink(constructorElt, resolveCoordinate(typeElt));
break;
}
case FIELD:
case ENUM_CONSTANT: {
VariableElement variableElt = (VariableElement) elt;
TypeElement typeElt = (TypeElement) variableElt.getEnclosingElement();
link = generator.resolveFieldLink(variableElt, resolveCoordinate(typeElt));
break;
}
default:
throw new UnsupportedOperationException("Not yet implemented " + elt + " with kind " + elt.getKind());
}
String s;
if (label.length() == 0) {
s = resolveLabel(generator, elt);
} else {
s = label;
}
if (link != null) {
writer.append("`link:").append(link).append("[").append(s).append("]`");
} else {
writer.append("`").append(s).append("`");
}
}
}
}
protected String postProcess(String name, String content) {
String processed = applyVariableSubstitution(content);
processed = applyPostProcessors(name, processed);
return processed;
}
protected void write(DocGenerator generator, Doc doc, String content) {
String outputOpt = processingEnv.getOptions().get("docgen.output");
if (outputOpt != null) {
outputOpt = outputOpt.replace("$lang", generator.getName());
String relativeName = doc.resolveRelativeFileName(generator);
try {
File dir = new File(outputOpt);
for (int i = relativeName.indexOf('/'); i != -1; i = relativeName.indexOf('/', i + 1)) {
dir = new File(dir, relativeName.substring(0, i));
relativeName = relativeName.substring(i + 1);
}
File file = new File(dir, relativeName);
ensureDir(file.getParentFile());
try (FileWriter writer = new FileWriter(file)) {
writer.write(content);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* Apply post-processors.
*
* @param content the (asciidoc) content
* @return the content after post-processing.
*/
protected String applyPostProcessors(String name2, String content) {
final List<String> lines = Arrays.asList(content.split("\r?\n"));
StringBuilder processed = new StringBuilder();
Iterator<String> iterator = lines.iterator();
while (iterator.hasNext()) {
String line = iterator.next();
String trimmedLine = line.trim();
if (!PostProcessor.isBlockDeclaration(trimmedLine)) {
processed.append(line);
if (iterator.hasNext()) {
processed.append("\n");
}
} else {
String name = PostProcessor.getProcessorName(trimmedLine);
String[] attributes = PostProcessor.getProcessorAttributes(trimmedLine);
PostProcessor postProcessor = getPostProcessor(name);
if (postProcessor == null) {
processed.append(line);
if (iterator.hasNext()) {
processed.append("\n");
}
} else {
// Extract content.
String block = PostProcessor.getBlockContent(iterator);
processed.append(postProcessor.process(name2, block, attributes));
if (iterator.hasNext()) {
processed.append("\n");
}
}
}
}
return processed.toString();
}
private void ensureDir(File dir) {
if (dir.exists()) {
if (!dir.isDirectory()) {
throw new DocGenException("File " + dir.getAbsolutePath() + " is not a dir");
}
} else if (!dir.mkdirs()) {
throw new DocGenException("could not create dir " + dir.getAbsolutePath());
}
}
/**
* Replace `@{var} by the variable value passed to the annotation processor.
*
* @param content the content
* @return the content with variable values
*/
public String applyVariableSubstitution(String content) {
for (Map.Entry<String, String> entry : processingEnv.getOptions().entrySet()) {
content = content.replace("${" + entry.getKey() + "}", entry.getValue());
}
return content;
}
}
|
package org.broadinstitute.hellbender.tools.exome;
import org.broadinstitute.hellbender.cmdline.*;
import org.broadinstitute.hellbender.cmdline.programgroups.CopyNumberProgramGroup;
import org.broadinstitute.hellbender.utils.segmenter.RCBSSegmenter;
import java.io.File;
@CommandLineProgramProperties(
summary = "Segment genomic data into regions of constant copy-ratio",
oneLineSummary = "Segment genomic data into regions of constant copy-ratio",
programGroup = CopyNumberProgramGroup.class
)
public final class PerformSegmentation extends CommandLineProgram {
public static final String TARGET_WEIGHT_FILE_LONG_NAME= "targetWeights";
public static final String TARGET_WEIGHT_FILE_SHORT_NAME = "tw";
public final static String ALPHA_LONG_NAME="alpha";
public final static String ALPHA_SHORT_NAME="alpha";
public final static String NPERM_LONG_NAME="nperm";
public final static String NPERM_SHORT_NAME="nperm";
public final static String PMETHOD_LONG_NAME="pmethod";
public final static String PMETHOD_SHORT_NAME="pmethod";
public final static String MINWIDTH_LONG_NAME="minWidth";
public final static String MINWIDTH_SHORT_NAME="minWidth";
public final static String KMAX_LONG_NAME="kmax";
public final static String KMAX_SHORT_NAME="kmax";
public final static String NMIN_LONG_NAME="nmin";
public final static String NMIN_SHORT_NAME="nmin";
public final static String ETA_LONG_NAME="eta";
public final static String ETA_SHORT_NAME="eta";
public final static String TRIM_LONG_NAME="trim";
public final static String TRIM_SHORT_NAME="trim";
public final static String UNDOSPLITS_LONG_NAME="undoSplits";
public final static String UNDOSPLITS_SHORT_NAME="undoSplits";
public final static String UNDOPRUNE_LONG_NAME="undoPrune";
public final static String UNDOPRUNE_SHORT_NAME="undoPrune";
public final static String UNDOSD_LONG_NAME="undoSD";
public final static String UNDOSD_SHORT_NAME="undoSD";
@Argument(
doc = "Name of the sample being segmented",
fullName = ExomeStandardArgumentDefinitions.SAMPLE_LONG_NAME,
optional = false
)
protected String sampleName;
@Argument(
doc = "Genomic targets file",
shortName = ExomeStandardArgumentDefinitions.TARGET_FILE_SHORT_NAME,
fullName = ExomeStandardArgumentDefinitions.TARGET_FILE_LONG_NAME,
optional = false
)
protected String tangentFile;
@Argument(
doc = "Full path to the outputted segment file",
shortName = StandardArgumentDefinitions.OUTPUT_SHORT_NAME,
fullName = StandardArgumentDefinitions.OUTPUT_LONG_NAME,
optional = false
)
protected String outFile;
@Argument(
doc = "If input data has had a log2 transform applied",
shortName = ExomeStandardArgumentDefinitions.LOG2_SHORT_NAME,
fullName = ExomeStandardArgumentDefinitions.LOG2_LONG_NAME,
optional = true
)
protected Boolean log = false;
@Argument(
doc = "File with target weights. This is the 1/var(post-projected targets for each normal). " +
"Listed one value per line in plain text. Values of zero or less, Nan, Inf, and -Inf are not " +
"acceptable. Must have the same number of values as there are in the tangentFile.",
shortName = TARGET_WEIGHT_FILE_SHORT_NAME,
fullName = TARGET_WEIGHT_FILE_LONG_NAME,
optional = true
)
protected File weightFile = null;
@Argument(
doc = "(Advanced) Please see https:
shortName = ALPHA_SHORT_NAME,
fullName = ALPHA_LONG_NAME,
optional = true
)
protected Double alpha = 0.01;
@Argument(
doc = "(Advanced) Please see https:
shortName = NPERM_SHORT_NAME,
fullName = NPERM_LONG_NAME,
optional = true
)
protected Integer nperm = 10000;
@Argument(
doc = "(Advanced) Please see https:
shortName = PMETHOD_SHORT_NAME,
fullName = PMETHOD_LONG_NAME,
optional = true
)
protected RCBSSegmenter.PMethod pmethod = RCBSSegmenter.PMethod.HYBRID;
@Argument(
doc = "(Advanced) Please see https:
shortName = MINWIDTH_SHORT_NAME,
fullName = MINWIDTH_LONG_NAME,
optional = true
)
protected Integer minWidth = 2;
@Argument(
doc = "(Advanced) Please see https:
shortName = KMAX_SHORT_NAME,
fullName = KMAX_LONG_NAME,
optional = true
)
protected Integer kmax = 25;
@Argument(
doc = "(Advanced) Please see https:
shortName = NMIN_SHORT_NAME,
fullName = NMIN_LONG_NAME,
optional = true
)
protected Integer nmin = 200;
@Argument(
doc = "(Advanced) Please see https:
shortName = ETA_SHORT_NAME,
fullName = ETA_LONG_NAME,
optional = true
)
protected Double eta = 0.05;
@Argument(
doc = "(Advanced) Please see https:
shortName = TRIM_SHORT_NAME,
fullName = TRIM_LONG_NAME,
optional = true
)
protected Double trim = 0.025;
@Argument(
doc = "(Advanced) Please see https:
shortName = UNDOSPLITS_SHORT_NAME,
fullName = UNDOSPLITS_LONG_NAME,
optional = true
)
protected RCBSSegmenter.UndoSplits undoSplits = RCBSSegmenter.UndoSplits.NONE;
@Argument(
doc = "(Advanced) Please see https:
shortName = UNDOPRUNE_SHORT_NAME,
fullName = UNDOPRUNE_LONG_NAME,
optional = true
)
protected Double undoPrune = 0.05;
@Argument(
doc = "(Advanced) Please see https:
shortName = UNDOSD_SHORT_NAME,
fullName = UNDOSD_LONG_NAME,
optional = true
)
protected Integer undoSD = 3;
@Override
protected Object doWork() {
applySegmentation(sampleName, tangentFile, outFile);
return "Success";
}
private void applySegmentation(String sampleName, String tangentFile, String outFile) {
RCBSSegmenter.writeSegmentFile(sampleName, tangentFile, outFile, log, weightFile, alpha, nperm, pmethod,
minWidth, kmax, nmin, eta, trim, undoSplits, undoPrune, undoSD);
}
}
|
package org.corpus_tools.annis.gui.admin.reflinks;
import com.vaadin.data.Binder;
import com.vaadin.data.Binder.Binding;
import com.vaadin.data.provider.Query;
import com.vaadin.data.provider.QuerySortOrder;
import com.vaadin.data.provider.Sort;
import com.vaadin.ui.Grid;
import com.vaadin.ui.Grid.Column;
import com.vaadin.ui.Panel;
import com.vaadin.ui.TextField;
import com.vaadin.ui.components.grid.HeaderRow;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import org.corpus_tools.annis.gui.AnnisUI;
import org.corpus_tools.annis.gui.components.ExceptionDialog;
import org.corpus_tools.annis.gui.query_references.UrlShortener;
import org.corpus_tools.annis.gui.query_references.UrlShortenerEntry;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.vaadin.artur.spring.dataprovider.FilterablePageableDataProvider;
public class ReferenceLinkEditor extends Panel {
private static final long serialVersionUID = 6191359393713574090L;
private final Grid<UrlShortenerEntry> grid;
private final TextField txtFilterId;
private FilterablePageableDataProvider<UrlShortenerEntry, Object> dataProvider;
public ReferenceLinkEditor() {
grid = new Grid<>();
grid.setSizeFull();
Binder<UrlShortenerEntry> binder = grid.getEditor().getBinder();
Column<UrlShortenerEntry, UUID> idColumn = grid.addColumn(UrlShortenerEntry::getId);
idColumn.setCaption("UUID");
Column<UrlShortenerEntry, Date> createdColumn = grid.addColumn(UrlShortenerEntry::getCreated);
createdColumn.setCaption("Timestamp");
Column<UrlShortenerEntry, String> ownerColumn = grid.addColumn(UrlShortenerEntry::getOwner);
ownerColumn.setCaption("Created by");
Column<UrlShortenerEntry, URI> temporaryColumn =
grid.addColumn(UrlShortenerEntry::getTemporaryUrl);
temporaryColumn.setCaption("Temporary URL");
TextField txtTemporary = new TextField();
Binding<UrlShortenerEntry, String> temporaryBinding =
binder.bind(txtTemporary, entry -> {
if (entry.getTemporaryUrl() == null) {
return "";
} else {
return entry.getTemporaryUrl().toString();
}
}, (entry, value) -> {
if (value == null || value.isEmpty()) {
entry.setTemporaryUrl(null);
} else {
try {
entry.setTemporaryUrl(new URI(value));
} catch (URISyntaxException ex) {
ExceptionDialog.show(ex, getUI());
}
}
if (getUI() instanceof AnnisUI) {
AnnisUI annisUI = (AnnisUI) getUI();
UrlShortener shortener = annisUI.getUrlShortener();
shortener.getRepo().save(entry);
}
});
temporaryColumn.setEditorBinding(temporaryBinding);
Column<UrlShortenerEntry, URI> urlColumn = grid.addColumn(UrlShortenerEntry::getUrl);
urlColumn.setCaption("URL");
HeaderRow filterRow = grid.appendHeaderRow();
txtFilterId = new TextField();
txtFilterId.setPlaceholder("Filter by UUID");
txtFilterId.setWidthFull();
txtFilterId.addValueChangeListener((e) -> {
if (dataProvider != null) {
dataProvider.refreshAll();
}
});
filterRow.getCell(idColumn).setComponent(txtFilterId);
grid.getEditor().setEnabled(true);
grid.getEditor().setBuffered(true);
}
@Override
public void attach() {
super.attach();
if (getUI() instanceof AnnisUI) {
AnnisUI annisUI = (AnnisUI) getUI();
UrlShortener shortener = annisUI.getUrlShortener();
dataProvider = new FilterablePageableDataProvider<UrlShortenerEntry, Object>() {
private static final long serialVersionUID = -1727729720680112512L;
@Override
protected Page<UrlShortenerEntry> fetchFromBackEnd(
Query<UrlShortenerEntry, Object> query, Pageable pageable) {
return shortener.getRepo().findAll(pageable);
}
@Override
protected List<QuerySortOrder> getDefaultSortOrders() {
return Sort.asc("id").build();
}
@Override
protected int sizeInBackEnd(Query<UrlShortenerEntry, Object> query) {
return (int) shortener.getRepo().count();
}
};
grid.setDataProvider(dataProvider);
}
setContent(grid);
}
}
|
package com.percero.amqp;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import javax.annotation.Resource;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpResponse;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.log4j.Logger;
import org.codehaus.jackson.map.ObjectMapper;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.springframework.amqp.AmqpIOException;
import org.springframework.amqp.core.AmqpAdmin;
import org.springframework.amqp.core.AmqpTemplate;
import org.springframework.amqp.core.Message;
import org.springframework.amqp.core.MessageProperties;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer;
import org.springframework.amqp.support.converter.ClassMapper;
import org.springframework.amqp.support.converter.DefaultClassMapper;
import org.springframework.amqp.support.converter.MessageConversionException;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.percero.agents.sync.access.IAccessManager;
import com.percero.agents.sync.access.RedisKeyUtils;
import com.percero.agents.sync.datastore.ICacheDataStore;
import com.percero.agents.sync.services.IPushSyncHelper;
import com.percero.agents.sync.vo.BaseDataObject;
import com.percero.agents.sync.vo.IJsonObject;
import com.percero.agents.sync.vo.PushUpdateResponse;
import com.percero.agents.sync.vo.SyncResponse;
import com.rabbitmq.client.ShutdownSignalException;
import edu.emory.mathcs.backport.java.util.Arrays;
import org.slf4j.*;
@Component
public class RabbitMQPushSyncHelper implements IPushSyncHelper, ApplicationContextAware {
private static Logger logger = Logger.getLogger(RabbitMQPushSyncHelper.class);
public static final String DEFAULT_CHARSET = "UTF-8";
private volatile String defaultCharset = DEFAULT_CHARSET;
private ClassMapper classMapper = new DefaultClassMapper();
@Autowired
ObjectMapper objectMapper;
@Autowired
AmqpTemplate template;
@Autowired @Value("$pf{gateway.rabbitmq.durable:false}")
Boolean durableQueues = false;
@Autowired
IAccessManager accessManager;
// RabbitMQ Components
@Resource
AmqpAdmin amqpAdmin;
AbstractMessageListenerContainer rabbitMessageListenerContainer;
@Resource @Qualifier("defaultListenerContainer")
public void setRabbitMessageListenerContainer(AbstractMessageListenerContainer container){
rabbitMessageListenerContainer = container;
}
// RabbitMQ environment variables.
@Autowired @Value("$pf{gateway.rabbitmq.admin_port:15672}")
int rabbitAdminPort = 15672;
@Autowired @Value("$pf{gateway.rabbitmq.login:guest}")
String rabbitLogin = "guest";
@Autowired @Value("$pf{gateway.rabbitmq.password:guest}")
String rabbitPassword = "guest";
@Autowired @Value("$pf{gateway.rabbitmq.host:localhost}")
String rabbitHost = null;
@Autowired @Value("$pf{gateway.rabbitmq.queue_timeout:43200000}") // 8 Hours
long rabbitQueueTimeout = 43200000;
@Autowired
ICacheDataStore cacheDataStore;
public void setCacheDataStore(ICacheDataStore cacheDataStore) {
this.cacheDataStore = cacheDataStore;
}
@SuppressWarnings("rawtypes")
protected void pushJsonToRouting(String objectJson, Class objectClass, String routingKey) {
try{
Message convertedMessage = toMessage(objectJson, objectClass, MessageProperties.CONTENT_TYPE_JSON);
template.send(routingKey, convertedMessage);
}
catch(Exception e){
logger.error(e.getMessage(), e);
}
}
protected void pushMessageToRouting(Message convertedMessage, String routingKey) {
try{
template.send(routingKey, convertedMessage);
}
catch(Exception e){
logger.error(e.getMessage(), e);
}
}
@SuppressWarnings("rawtypes")
protected void pushStringToRouting(String objectJson, Class objectClass, String routingKey) {
try{
Message convertedMessage = toMessage(objectJson, objectClass, MessageProperties.CONTENT_TYPE_BYTES);
template.send(routingKey, convertedMessage);
}
catch(Exception e){
logger.error(e.getMessage(), e);
}
}
@SuppressWarnings("rawtypes")
public final Message toMessage(String objectJson, Class objectClass, String contentEncoding)
throws MessageConversionException {
MessageProperties messageProperties = new MessageProperties();
messageProperties.setContentType(MessageProperties.CONTENT_TYPE_JSON);
messageProperties.setContentEncoding(this.defaultCharset);
return toMessage(objectJson, objectClass, messageProperties, contentEncoding);
}
@SuppressWarnings("rawtypes")
public final Message toMessage(String objectJson, Class objectClass, MessageProperties messageProperties, String contentEncoding)
throws MessageConversionException {
Message message = createMessage(objectJson, objectClass, messageProperties, contentEncoding);
return message;
}
@SuppressWarnings("rawtypes")
protected Message createMessage(String aString, Class objectClass, MessageProperties messageProperties, String contentEncoding)
throws MessageConversionException {
byte[] bytes = null;
try {
String jsonString = aString;
bytes = jsonString.getBytes(this.defaultCharset);
} catch (UnsupportedEncodingException e) {
throw new MessageConversionException("Failed to convert Message content", e);
}
if (bytes != null) {
messageProperties.setContentLength(bytes.length);
}
// String objectClassName = objectClass.getName();
// messageProperties.getHeaders().put("__TypeId__", objectClassName);
classMapper.fromClass(objectClass, messageProperties);
return new Message(bytes, messageProperties);
}
public void pushSyncResponseToClient(SyncResponse anObject, String clientId) {
if (anObject != null && StringUtils.hasText(clientId)) {
pushJsonToRouting(anObject.toJson(objectMapper), anObject.getClass(), clientId);
}
}
@SuppressWarnings("rawtypes")
public void pushSyncResponseToClients(SyncResponse syncResponse, Collection<String> clientIds) {
if ( syncResponse != null && clientIds != null && !clientIds.isEmpty() ) {
Class objectClass = syncResponse.getClass();
Iterator<String> itrClientIds = clientIds.iterator();
while (itrClientIds.hasNext()) {
String nextClientId = itrClientIds.next();
syncResponse.setClientId(nextClientId);
String objectJson = syncResponse.toJson(objectMapper);
pushJsonToRouting(objectJson, objectClass, nextClientId);
}
}
}
public void pushObjectToClients(Object anObject, Collection<String> listClients) {
if (anObject != null && listClients != null && !listClients.isEmpty() ) {
// Route to specific clients.
// Optimization: create the JSON string of the object.
String objectJson = null;
if (anObject instanceof IJsonObject) {
objectJson = ((IJsonObject)anObject).toJson();
}
PushUpdateResponse pushUpdateResponse = new PushUpdateResponse();
pushUpdateResponse.setObjectList(new ArrayList<BaseDataObject>(1));
pushUpdateResponse.getObjectList().add((BaseDataObject) anObject);
Iterator<String> itrClients = listClients.iterator();
while (itrClients.hasNext()) {
String nextClient = itrClients.next();
pushUpdateResponse.setClientId(nextClient);
pushJsonToRouting(pushUpdateResponse.toJson(objectJson, objectMapper), PushUpdateResponse.class, nextClient);
}
}
}
@Override
public void pushStringToRoute(String aString, String routeName) {
if (StringUtils.hasText(routeName)) {
pushStringToRouting(aString, String.class, routeName);
}
}
@Override
public Boolean removeClient(String clientId) {
try {
if (!cacheDataStore.getSetIsMember(RedisKeyUtils.eolClients(), clientId)) {
logger.debug("RabbitMQ Removing Client " + clientId);
Queue clientQueue = new Queue(clientId, durableQueues);
amqpAdmin.declareQueue(clientQueue);
// Remove ALL the messages from the queue, since this client is dead and gone.
amqpAdmin.purgeQueue(clientId, true);
// If this client hasn't already received an EOL message, send it now.
pushJsonToRouting("{\"EOL\":true, \"clientId\":\"" + clientId + "\"}", String.class, clientId);
cacheDataStore.addSetValue(RedisKeyUtils.eolClients(), clientId);
}
} catch(AmqpIOException e) {
// Most likely due to queue already being deleted.
if (e.getCause() instanceof IOException && e.getCause().getCause() instanceof ShutdownSignalException) {
ShutdownSignalException sse = (ShutdownSignalException) e.getCause().getCause();
String msg = e.getCause().getMessage();
msg = sse.getMessage();
if (msg.contains("reply-text=NOT_FOUND")) {
// This would indicate that the queue no longer exists, so we can also remove it from the cache for final termination
cacheDataStore.removeSetValue(RedisKeyUtils.eolClients(), clientId);
return true;
}
}
logger.debug("Unable to clear out AMQP queue: " + clientId + ": " + e.getMessage());
return false;
} catch(Exception e) {
// Most likely due to queue already being deleted.
logger.debug("Unable to clear out AMQP queue: " + clientId + ": " + e.getMessage());
return false;
}
return true;
}
protected Boolean deleteQueue(String queue) {
try {
logger.debug("RabbitMQ Deleting Queue " + queue);
Queue clientQueue = new Queue(queue, durableQueues);
amqpAdmin.declareQueue(clientQueue);
amqpAdmin.deleteQueue(queue);
} catch(Exception e) {
// Most likely due to queue already being deleted.
logger.debug("Unable to clear out AMQP queue: " + queue + " (most likely because it no longer exists)", e);
return false;
}
// Remove queue name from list of EOL Clients.
cacheDataStore.removeSetValue(RedisKeyUtils.eolClients(), queue);
return true;
}
@Override
public Boolean renameClient(String thePreviousClientId, String clientId) {
if (!StringUtils.hasText(thePreviousClientId)) {
logger.warn("RabbitMQ renameClient previous client not set");
return false;
}
else if (!StringUtils.hasText(clientId)) {
logger.warn("RabbitMQ renameClient client not set");
return false;
}
else if (clientId.equalsIgnoreCase(thePreviousClientId)) {
logger.warn("RabbitMQ renameClient previous client same as client");
return true;
}
// Attempt to move messages from the previous client queue to the new one.
try {
Message nextExistingMessage = null;
while ((nextExistingMessage = template.receive(thePreviousClientId)) != null) {
template.send(clientId, nextExistingMessage);
}
} catch(AmqpIOException e) {
// Most likely due to queue already being deleted.
Boolean queueDoesNotExist = false;
if (e.getCause() instanceof IOException && e.getCause().getCause() instanceof ShutdownSignalException) {
ShutdownSignalException sse = (ShutdownSignalException) e.getCause().getCause();
String msg = e.getCause().getMessage();
msg = sse.getMessage();
if (msg.contains("reply-text=NOT_FOUND")) {
queueDoesNotExist = true;
}
}
if (!queueDoesNotExist) {
logger.debug("Unable to move messages from AMQP queue " + thePreviousClientId + " to " + clientId + ": " + e.getMessage());
}
} catch(Exception e) {
// Most likely due to queue already being deleted.
logger.debug("Unable to move messages from AMQP queue " + thePreviousClientId + " to " + clientId, e);
}
return removeClient(thePreviousClientId);
}
// SCHEDULED TASKS
private Boolean validatingQueues = false;
// @Scheduled(fixedRate=600000) // 10 Minutes
// @Scheduled(fixedRate=30000) // 30 Seconds
@Scheduled(fixedRate=300000) // 5 Minutes
public void validateQueues() {
synchronized (validatingQueues) {
if (validatingQueues) {
// Currently running.
return;
}
else {
validatingQueues = true;
}
}
String host = rabbitHost;
if (!StringUtils.hasText(host)) {
// No Rabbit host configured? Very strange, but no sense in moving forward here...
logger.error("No RabbitMQ host configured?");
return;
}
String uri = "http://" + host + ":" + rabbitAdminPort + "/api/queues/";
DefaultHttpClient httpClient = new DefaultHttpClient();
httpClient.getCredentialsProvider().setCredentials(new AuthScope(host, rabbitAdminPort), new UsernamePasswordCredentials(rabbitLogin, rabbitPassword));
HttpGet httpGet = new HttpGet(uri);
try {
HttpResponse r = httpClient.execute(httpGet);
StringWriter writer = new StringWriter();
InputStream is = r.getEntity().getContent();
String encoding = null;
if (r.getEntity().getContentEncoding() != null) {
encoding = r.getEntity().getContentEncoding().getValue();
IOUtils.copy(is, writer, encoding);
}
else {
IOUtils.copy(is, writer);
}
String theString = writer.toString();
int numQueues = 0;
Set<String> queueNamesToCheck = null;
JsonParser parser = new JsonParser();
JsonElement jsonQueues = parser.parse(theString);
JsonArray jsonQueuesArray = jsonQueues.getAsJsonArray();
if (jsonQueuesArray != null) {
numQueues = jsonQueuesArray.size();
logger.debug("Found " + numQueues + " RabbitMQ Queues to validate...");
queueNamesToCheck = new HashSet<String>(numQueues - queueNames.size());
Iterator<JsonElement> itrJsonQueuesArray = jsonQueuesArray.iterator();
while (itrJsonQueuesArray.hasNext()) {
JsonElement nextJsonQueue = itrJsonQueuesArray.next();
JsonObject nextJsonQueueObject = nextJsonQueue.getAsJsonObject();
JsonElement nextJsonQueueName = nextJsonQueueObject.get("name");
String nextQueueName = null;
if (nextJsonQueueName != null) {
nextQueueName = nextJsonQueueName.getAsString();
}
else {
continue;
}
if (cacheDataStore.getSetIsMember(RedisKeyUtils.eolClients(), nextQueueName)) {
JsonElement nextJsonQueueMessages = nextJsonQueueObject.get("messages");
int nextQueueMessages = 0;
if (nextJsonQueueMessages != null) {
nextQueueMessages = nextJsonQueueMessages.getAsInt();
if (nextQueueMessages <= 0) {
logger.debug("Deleting EOL empty queue " + nextQueueName);
deleteQueue(nextQueueName);
continue;
}
}
}
JsonElement nextJsonQueueConsumers = nextJsonQueueObject.get("consumers");
if (nextJsonQueueConsumers != null) {
// If the queue has consumers, then leave it alone.
int numConsumers = nextJsonQueueConsumers.getAsInt();
if (numConsumers == 0) {
// If this queue is in the EOL list, then it can simply be deleted.
if (cacheDataStore.getSetIsMember(RedisKeyUtils.eolClients(), nextQueueName)) {
logger.debug("Deleting EOL no consumers queue " + nextQueueName);
deleteQueue(nextQueueName);
continue;
}
}
else {
// Queue has consumers, so leave alone for now.
continue;
}
}
JsonElement nextJsonQueueIdleSince = nextJsonQueueObject.get("idle_since");
if (nextJsonQueueIdleSince != null) {
try {
String strIdleSince = nextJsonQueueIdleSince.getAsString();
DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
DateTime dateTime = formatter.withOffsetParsed().parseDateTime(strIdleSince);
if (dateTime != null) {
DateTime currentDateTime = new DateTime(System.currentTimeMillis());
currentDateTime = currentDateTime.toDateTime(dateTime.getZone());
long timeDiffInMs = currentDateTime.toDate().getTime() - dateTime.toDate().getTime();
if (timeDiffInMs < rabbitQueueTimeout) {
// Queue has NOT timed out yet.
continue;
}
}
} catch(Exception e) {
// Do nothing
logger.debug("Error getting idle since for queue " + nextQueueName, e);
continue;
}
}
else {
logger.debug("Unable to determine idle since time, ignoring queue " + nextQueueName);
continue;
}
if (StringUtils.hasText(nextQueueName)) {
// Check to see if this queue still valid.
if (!queueNames.contains(nextQueueName)) {
// Valid Queue, used by system.
queueNamesToCheck.add(nextQueueName);
}
}
}
// Check to see if each queue name is a valid client.
if (!queueNamesToCheck.isEmpty()) {
Set<String> validClients = accessManager.validateClients(queueNamesToCheck);
// Remove all valid clients from the queue names.
queueNamesToCheck.removeAll(validClients);
// Now delete the remaining INVALID queues.
Iterator<String> itrQueuesToDelete = queueNamesToCheck.iterator();
while (itrQueuesToDelete.hasNext()) {
String nextQueueName = itrQueuesToDelete.next();
logger.debug("RabbitMQ Logging out client " + nextQueueName);
accessManager.logoutClient(nextQueueName, true);
}
}
}
} catch (ClientProtocolException e) {
logger.debug(e);
} catch (IOException e) {
logger.debug(e);
} catch (Exception e) {
logger.warn(e);
} finally {
synchronized (validatingQueues) {
validatingQueues = false;
}
}
// Loop through EOL queues and delete any that now have no clients.
}
private Collection<String> queueNames = null;
private ApplicationContext applicationContext = null;
@SuppressWarnings("unchecked")
@Override
public void setApplicationContext(ApplicationContext applicationContext)
throws BeansException {
this.applicationContext = applicationContext;
Map<String, Queue> queues = this.applicationContext.getBeansOfType(Queue.class);
// Make sure these queue names are protected.
String[] strSaveQueueNames = {"authenticateOAuthCode", "42", "authenticateOAuthAccessToken", "41",
"authenticateUserAccount", "getServiceUsers", "getOAuthRequestToken",
"getRegAppOAuths", "getRegisteredApplication", "getAllServiceProviders",
"logoutUser", "testCall", "validateUserByToken", "17", "disconnectAuth",
"reconnect", "connect", "hibernate", "upgradeClient", "disconnect", "logout", "create",
"update", "processTransaction", "getChangeWatcher", "findById", "findByIds",
"findByExample", "countAllByName", "getAllByName", "runQuery", "runProcess",
"createObject", "putObject", "removeObject", "updatesReceived", "deletesReceived",
"searchByExample", "delete", "getAccessor", "getHistory", "changeWatcher"};
queueNames = new HashSet<String>(queues.size());
queueNames.addAll( Arrays.asList(strSaveQueueNames) );
Iterator<Map.Entry<String, Queue>> itrMapEntries = queues.entrySet().iterator();
while (itrMapEntries.hasNext()) {
Map.Entry<String, Queue> nextMapEntry = itrMapEntries.next();
Queue nextQueue = nextMapEntry.getValue();
queueNames.add(nextQueue.getName());
}
}
}
|
package javax.time.period;
import java.io.Serializable;
import java.util.Arrays;
import javax.time.CalendricalException;
import javax.time.Duration;
import javax.time.MathUtils;
import javax.time.calendar.ISOChronology;
import javax.time.calendar.PeriodUnit;
/**
* A period of time measured using a single unit, such as '3 Days' or '65 Seconds'.
* <p>
* {@code PeriodField} is an immutable period that stores an amount of human-scale
* time for a single unit. For example, humans typically measure periods of time
* in units of years, months, days, hours, minutes and seconds. These concepts are
* defined by instances of {@link PeriodUnit} in the chronology classes. This class
* allows an amount to be specified for one of the units, such as '3 Days' or '65 Seconds'.
* <p>
* Basic mathematical operations are provided - plus(), minus(), multipliedBy(),
* dividedBy(), negated() and abs(), all of which return a new instance.
* <p>
* {@code PeriodField} can store rules of any kind which makes it usable with
* any calendar system.
* <p>
* PeriodField is immutable and thread-safe.
*
* @author Stephen Colebourne
*/
public final class PeriodField
implements PeriodProvider, Comparable<PeriodField>, Serializable {
/**
* The serialization version.
*/
private static final long serialVersionUID = 1L;
/**
* The amount of the period.
*/
private final long amount;
/**
* The unit the period is measured in.
*/
private final PeriodUnit unit;
/**
* Obtains a {@code PeriodField} from an amount and unit.
* <p>
* The parameters represent the two parts of a phrase like '6 Days'.
*
* @param amount the amount of the period, measured in terms of the unit, positive or negative
* @param unit the unit that the period is measured in, not null
* @return the {@code PeriodField} instance, never null
*/
public static PeriodField of(long amount, PeriodUnit unit) {
PeriodFields.checkNotNull(unit, "PeriodUnit must not be null");
return new PeriodField(amount, unit);
}
/**
* Constructor.
*
* @param amount the amount of the period, measured in terms of the unit, positive or negative
* @param unit the unit that the period is measured in, validated not null
*/
private PeriodField(long amount, PeriodUnit unit) {
// input pre-validated
this.amount = amount;
this.unit = unit;
}
/**
* Checks if this period is zero length.
* <p>
* A {@code PeriodField} can be positive, zero or negative.
* This method checks whether the length is zero.
*
* @return true if this period is zero length
*/
public boolean isZero() {
return amount == 0;
}
/**
* Checks if this period is positive, excluding zero.
* <p>
* A {@code PeriodField} can be positive, zero or negative.
* This method checks whether the length is greater than zero.
*
* @return true if this period is positive or zero
*/
public boolean isPositive() {
return amount > 0;
}
/**
* Checks if this period is negative, excluding zero.
* <p>
* A {@code PeriodField} can be positive, zero or negative.
* This method checks whether the length is less than zero.
*
* @return true if this period is negative
*/
public boolean isNegative() {
return amount < 0;
}
/**
* Gets the amount of this period.
* <p>
* For example, in the period '5 Days', the amount is '5'.
*
* @return the amount of time of this period, positive or negative
*/
public long getAmount() {
return amount;
}
/**
* Gets the amount of this period, converted to an {@code int}.
* <p>
* For example, in the period '5 Days', the amount is '5'.
*
* @return the amount of time of this period, positive or negative
* @throws ArithmeticException if the amount exceeds the capacity of an {@code int}
*/
public int getAmountInt() {
return MathUtils.safeToInt(amount);
}
/**
* Gets the unit of this period.
* <p>
* For example, in the period '5 Days', the unit is 'Days'.
*
* @return the period unit, never null
*/
public PeriodUnit getUnit() {
return unit;
}
/**
* Returns a copy of this period with a different amount of time.
* <p>
* Calling this method returns a new period with the same unit but different amount.
* For example, it could be used to change '3 Days' to '5 Days'.
*
* @param amount the amount of time to set in the returned period, positive or negative
* @return a {@code PeriodField} based on this period with the specified amount, never null
*/
public PeriodField withAmount(long amount) {
if (amount == this.amount) {
return this;
}
return new PeriodField(amount, unit);
}
/**
* Returns a copy of this period with a different unit.
* <p>
* Calling this method returns a new period with the same amount but different unit.
* For example, it could be used to change '3 Days' to '3 Months'.
*
* @param unit the unit to set in the returned period, positive or negative
* @return a {@code PeriodField} based on this period with the specified unit, never null
*/
public PeriodField withRule(PeriodUnit unit) {
PeriodFields.checkNotNull(unit, "PeriodUnit must not be null");
if (unit.equals(this.unit)) {
return this;
}
return new PeriodField(amount, unit);
}
public PeriodField plus(PeriodField period) {
PeriodFields.checkNotNull(period, "PeriodField must not be null");
if (period.getUnit().equals(unit) == false) {
throw new IllegalArgumentException("Cannot add '" + period + "' to '" + this + "' as the units differ");
}
return plus(period.getAmount());
}
/**
* Returns a copy of this period with the specified period added.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param amount the period to add, measured in the unit of the period, positive or negative
* @return a {@code PeriodField} based on this period with the specified amount added, never null
* @throws ArithmeticException if the calculation overflows
*/
public PeriodField plus(long amount) {
return withAmount(MathUtils.safeAdd(this.amount, amount));
}
public PeriodField minus(PeriodField period) {
PeriodFields.checkNotNull(period, "PeriodField must not be null");
if (period.getUnit().equals(unit) == false) {
throw new IllegalArgumentException("Cannot subtract '" + period + "' from '" + this + "' as the units differ");
}
return minus(period.getAmount());
}
/**
* Returns a copy of this period with the specified period subtracted.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param amount the period to subtract, measured in the unit of the period, positive or negative
* @return a {@code PeriodField} based on this period with the specified amount subtracted, never null
* @throws ArithmeticException if the calculation overflows
*/
public PeriodField minus(long amount) {
return withAmount(MathUtils.safeSubtract(this.amount, amount));
}
/**
* Returns a copy of this period with the amount multiplied by the specified scalar.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param scalar the value to multiply by, positive or negative
* @return a {@code PeriodField} based on this period multiplied by the specified scalar, never null
* @throws ArithmeticException if the calculation overflows
*/
public PeriodField multipliedBy(long scalar) {
return withAmount(MathUtils.safeMultiply(amount, scalar));
}
/**
* Returns a copy of this period with the amount divided by the specified divisor.
* The calculation uses integer division, thus 3 divided by 2 is 1.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param divisor the value to divide by, positive or negative
* @return a {@code PeriodField} based on this period divided by the specified divisor, never null
* @throws ArithmeticException if the divisor is zero
*/
public PeriodField dividedBy(long divisor) {
return withAmount(amount / divisor);
}
/**
* Returns a copy of this period with the amount negated.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @return a {@code PeriodField} based on this period with the amount negated, never null
* @throws ArithmeticException if the amount is {@code Long.MIN_VALUE}
*/
public PeriodField negated() {
return withAmount(MathUtils.safeNegate(amount));
}
/**
* Returns a copy of this period with a positive amount.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @return a {@code PeriodField} based on this period with an absolute amount, never null
* @throws ArithmeticException if the amount is {@code Long.MIN_VALUE}
*/
public PeriodField abs() {
return isNegative() ? negated() : this;
}
/**
* Converts this period to an equivalent in the specified unit.
* <p>
* This converts this period to one measured in the specified unit.
* This uses {@link PeriodUnit#getEquivalentPeriod(PeriodUnit)} to lookup
* the equivalent period for the unit.
* <p>
* For example, '3 Hours' could be converted to '180 Minutes'.
* <p>
* This method is equivalent to {@link #toEquivalent(PeriodUnit...)} with a single parameter.
*
* @param unit the unit to convert to, not null
* @return a {@code PeriodField} equivalent to this period, never null
* @throws CalendricalException if this period cannot be converted to the specified unit
* @throws ArithmeticException if the calculation overflows
*/
public PeriodField toEquivalent(PeriodUnit requiredUnit) {
PeriodField equivalent = unit.getEquivalentPeriod(requiredUnit);
if (equivalent != null) {
return equivalent.multipliedBy(amount);
}
throw new CalendricalException("Unable to convert " + getUnit() + " to " + requiredUnit);
}
/**
* Converts this period to an equivalent in <i>one</i> of the units specified.
* <p>
* This converts this period to one measured in one of the specified units.
* It operates by trying to convert to each unit in turn until one succeeds.
* As such, it is recommended to specify the units from largest to smallest.
* <p>
* For example, '3 Hours' can normally be converted to both minutes and seconds.
* If the units array contains both 'Minutes' and 'Seconds', then the result will
* be measured in whichever is first in the array, either '180 Minutes' or '10800 Seconds'.
*
* @param requiredUnits the required unit array, not altered, not null
* @return a {@code PeriodField} equivalent to this period, never null
* @throws CalendricalException if this period cannot be converted to any of the units
* @throws ArithmeticException if the calculation overflows
*/
public PeriodField toEquivalent(PeriodUnit... requiredUnits) {
for (PeriodUnit requiredUnit : requiredUnits) {
PeriodField equivalent = unit.getEquivalentPeriod(requiredUnit);
if (equivalent != null) {
return equivalent.multipliedBy(amount);
}
}
throw new CalendricalException("Unable to convert " + getUnit() + " to any requested unit: " + Arrays.toString(requiredUnits));
}
/**
* Converts this period to an estimated duration.
* <p>
* Each {@link PeriodUnit} contains an estimated duration for that unit.
* This method uses that estimate to calculate an estimated duration for
* this period.
*
* @return the estimated duration of this period, positive or negative
* @throws ArithmeticException if the calculation overflows
*/
public Duration toEstimatedDuration() {
return unit.getEstimatedDuration().multipliedBy(amount);
}
/**
* Converts this period to a {@code Duration} based on the standard durations of
* seconds and nanoseconds.
* <p>
* The conversion is based on the {@code ISOChronology} definition of the seconds and
* nanoseconds units. If this period can be converted to either seconds or nanoseconds
* then the conversion will succeed, subject to calculation overflow.
* <p>
* This conversion can only be used if the duration is being used in a manner
* compatible with the {@code ISOChronology} definitions of seconds and nanoseconds.
* This will be the case for most applications - care only needs to be taken if
* using explicit time-scales.
*
* @return the duration of this period based on {@code ISOChronology} fields, never null
* @throws ArithmeticException if the calculation overflows
*/
public Duration toDuration() {
PeriodField equivalent = unit.getEquivalentPeriod(ISOChronology.periodSeconds());
if (equivalent != null) {
return equivalent.multipliedBy(amount).toEstimatedDuration();
}
equivalent = unit.getEquivalentPeriod(ISOChronology.periodNanos());
if (equivalent != null) {
return equivalent.multipliedBy(amount).toEstimatedDuration();
}
throw new CalendricalException("Unable to convert " + getUnit() + " to a Duration");
}
/**
* Converts this period to a {@code PeriodFields}.
* <p>
* The returned {@code PeriodFields} will always contain the unit even
* if the amount is zero.
*
* @return the equivalent period, never null
*/
public PeriodFields toPeriodFields() {
return PeriodFields.of(this);
}
/**
* Compares this period to the specified {@code PeriodField}.
* <p>
* The comparison orders first by the unit, then by the amount.
*
* @param otherPeriod the other period to compare to, not null
* @return the comparator value, negative if less, positive if greater
*/
public int compareTo(PeriodField otherPeriod) {
// there are no isGreaterThan/isLessThan methods as they don't make sense
int cmp = unit.compareTo(otherPeriod.unit);
if (cmp != 0) {
return cmp;
}
return MathUtils.safeCompare(amount, otherPeriod.amount);
}
/**
* Checks if this period is equal to the specified {@code PeriodField}.
* <p>
* The comparison is based on the unit and amount.
*
* @param obj the object to check, null returns false
* @return true if this period is the same as that specified
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof PeriodField) {
PeriodField other = (PeriodField) obj;
return this.amount == other.amount &&
this.unit.equals(other.unit);
}
return false;
}
/**
* Returns the hash code for this period.
*
* @return a suitable hash code
*/
@Override
public int hashCode() {
return unit.hashCode() ^ (int)( amount ^ (amount >>> 32));
}
/**
* Returns a string representation of this period, such as '6 Days'.
* <p>
* The format consists of the amount, followed by a space, followed by the unit name.
*
* @return a descriptive representation of this period, not null
*/
@Override
public String toString() {
return amount + " " + unit.getName();
}
}
|
package com.philihp.weblabora.action;
import javax.persistence.EntityManager;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import com.philihp.weblabora.form.CreateGameForm;
import com.philihp.weblabora.jpa.Game;
import com.philihp.weblabora.jpa.State;
import com.philihp.weblabora.jpa.User;
import com.philihp.weblabora.model.Color;
import com.philihp.weblabora.model.GameCountry;
import com.philihp.weblabora.model.GameLength;
import com.philihp.weblabora.model.GamePlayers;
public class CreateGame extends BaseAction {
@Override
public ActionForward execute(ActionMapping mapping, ActionForm actionForm, HttpServletRequest request,
HttpServletResponse response, User user) throws Exception {
CreateGameForm form = (CreateGameForm)actionForm;
EntityManager em = (EntityManager)request.getAttribute("em");
State state = new State();
state.setExplorer(user);
em.persist(state);
Game game = new Game();
game.getPlayer1().setUser(user);
game.getPlayer1().setColor(Color.RED.toString());
game.getPlayer1().setMove("");
game.getPlayer2().setColor(Color.GREEN.toString());
game.getPlayer2().setMove("");
game.getPlayer3().setColor(Color.BLUE.toString());
game.getPlayer3().setMove("");
game.getPlayer4().setColor(Color.WHITE.toString());
game.getPlayer4().setMove("");
game.setCountry(form.getCountry());
game.setLength(form.getLength());
game.setPlayers(form.getPlayers());
user.setActiveGame(game);
game.setState(state);
em.persist(game);
return mapping.findForward("root");
}
}
|
package jfdi.ui;
import javafx.geometry.Side;
import javafx.scene.control.ContextMenu;
import javafx.scene.control.CustomMenuItem;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import java.util.LinkedList;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.stream.Collectors;
/**
* @author Xinan
*/
public class AutoCompleteTextField extends TextField {
private SortedSet<String> keywords;
private LinkedList<String> results;
private ContextMenu popup;
public AutoCompleteTextField() {
super();
keywords = new TreeSet<>(String::compareToIgnoreCase);
results = new LinkedList<>();
popup = new ContextMenu();
textProperty().addListener((observable, oldValue, newValue) -> {
if (getText().isEmpty()) {
popup.hide();
return;
}
results.clear();
results.addAll(keywords.subSet(getText(), getText() + Character.MAX_VALUE));
if (results.isEmpty()) {
popup.hide();
} else {
populatePopup(results);
}
});
focusedProperty().addListener((observable, oldValue, newValue) -> popup.hide());
popup.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent event) -> {
@SuppressWarnings("deprecation")
boolean isAnyItemSelected = popup.getItems().stream().map(item -> item.impl_styleableGetNode())
.anyMatch(node -> node.isFocused());
if (event.getCode() == KeyCode.ENTER && !isAnyItemSelected) {
UI.getInstance().triggerEnter();
results.clear();
}
});
}
public void setKeywords(SortedSet<String> keywords) {
this.keywords.clear();
this.keywords.addAll(keywords);
}
public void hidePopup() {
popup.hide();
}
public void selectFirst() {
if (!results.isEmpty()) {
select(results.get(0));
popup.hide();
}
}
private void select(String suggestion) {
setText(suggestion + " ");
this.positionCaret(this.getText().length());
}
private void populatePopup(LinkedList<String> results) {
List<CustomMenuItem> menuItems = results.stream()
.map(Label::new)
.map(label -> {
CustomMenuItem menuItem = new CustomMenuItem(label, true);
menuItem.setOnAction(action -> {
select(label.getText());
popup.hide();
});
return menuItem;
})
.collect(Collectors.toCollection(LinkedList::new));
popup.getItems().setAll(menuItems);
if (!popup.isShowing()) {
popup.show(this, Side.BOTTOM, 0, 0);
}
}
}
|
package com.raisonne.bd.action.donor;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Map;
import org.apache.log4j.Logger;
import com.opensymphony.xwork2.ActionContext;
import com.opensymphony.xwork2.ActionSupport;
import com.raisonne.bd.dto.bloodrequest.BloodRequestDTO;
import com.raisonne.bd.dto.bloodrequest.BloodRequestScrollerDTO;
import com.raisonne.bd.util.StateInfoUtils;
/**
*
* @author Himanshu Durgapal
*
*/
public class BaseAction extends ActionSupport{
Logger log = Logger.getLogger(BaseAction.class);
private static final long serialVersionUID = 1L;
private String applicationFrame="/WEB-INF/templates/application/applicationframe.jsp";
private String title="Save A Life Today| Welcome to Blood Donation";
private String workingTemplate;
private boolean hideRightPannel=false;
private boolean hideLeftPannel=false;
public String execute() throws Exception
{
return SUCCESS;
}
public void updateBloodRequestCache(BloodRequestDTO bloodRequestDTO){
if(bloodRequestDTO.isBloodRequestUpdated()){
log.info("Blood request has been updated.Preparing to update cache..");
@SuppressWarnings("unchecked")
Map<Integer,BloodRequestScrollerDTO> cache=(Map<Integer,BloodRequestScrollerDTO>)ActionContext.getContext().getApplication().get("bloodRequestCache");
BloodRequestScrollerDTO dto=new BloodRequestScrollerDTO();
dto.setBloodGroup(bloodRequestDTO.getBloodGroup());
dto.setContactNumber(bloodRequestDTO.getMobileNumber());
dto.setLocation(bloodRequestDTO.getLocation());
dto.setState(StateInfoUtils.getStates().get(bloodRequestDTO.getState()));
SimpleDateFormat sdf = new SimpleDateFormat("dd MMM yyyy");
final String date=sdf.format(bloodRequestDTO.getRequiredDate());
dto.setRequiredBlooddate(date);
dto.setUuid(bloodRequestDTO.getUuid());
cache.put(bloodRequestDTO.getUuid(), dto);
ActionContext.getContext().getApplication().put("bloodRequestCache", (Map<Integer, BloodRequestScrollerDTO>)Collections.synchronizedMap(cache));
log.info("Cache updated successfully.Total object in cache "+cache.size());
}
}
public String getApplicationFrame() {
return applicationFrame;
}
public void setApplicationFrame(String applicationFrame) {
this.applicationFrame = applicationFrame;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getWorkingTemplate() {
return workingTemplate;
}
public void setWorkingTemplate(String workingTemplate) {
this.workingTemplate = workingTemplate;
}
public boolean isHideRightPannel() {
return hideRightPannel;
}
public void setHideRightPannel(boolean hideRightPannel) {
this.hideRightPannel = hideRightPannel;
}
public boolean isHideLeftPannel() {
return hideLeftPannel;
}
public void setHideLeftPannel(boolean hideLeftPannel) {
this.hideLeftPannel = hideLeftPannel;
}
}
|
package org.ihtsdo.buildcloud.rest.controller;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.ihtsdo.buildcloud.rest.controller.helper.HypermediaGenerator;
import org.ihtsdo.buildcloud.core.entity.ReleaseCenter;
import org.ihtsdo.buildcloud.rest.security.IsAuthenticatedAsAdmin;
import org.ihtsdo.buildcloud.rest.security.IsAuthenticatedAsAdminOrReleaseManager;
import org.ihtsdo.buildcloud.rest.security.IsAuthenticatedAsAdminOrReleaseManagerOrReleaseLeadOrUser;
import org.ihtsdo.buildcloud.rest.security.IsAuthenticatedAsGlobalAdmin;
import org.ihtsdo.buildcloud.rest.security.*;
import org.ihtsdo.buildcloud.core.service.PermissionService;
import org.ihtsdo.buildcloud.core.service.PublishService;
import org.ihtsdo.buildcloud.core.service.ReleaseCenterService;
import org.ihtsdo.otf.rest.exception.BusinessServiceException;
import org.ihtsdo.otf.rest.exception.EntityAlreadyExistsException;
import org.ihtsdo.otf.rest.exception.ResourceNotFoundException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.authentication.AnonymousAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Controller;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import springfox.documentation.annotations.ApiIgnore;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
import java.util.stream.Collectors;
import static org.ihtsdo.buildcloud.core.service.PermissionService.GLOBAL_ROLE;
import static org.ihtsdo.buildcloud.core.service.PermissionService.Role.*;
@Controller
@RequestMapping("/centers")
@Api(value = "Release Center", position = 3)
public class ReleaseCenterController {
@Autowired
private ReleaseCenterService releaseCenterService;
@Autowired
private PermissionService permissionService;
@Autowired
private PublishService publishService;
@Autowired
private HypermediaGenerator hypermediaGenerator;
private static final String[] RELEASE_CENTER_LINKS = {"products", "published"};
@GetMapping
@ApiOperation(value = "Returns a list all release center for a logged in user",
notes = "Returns a list of all release centers visible to the currently logged in user.")
@ResponseBody
public List<Map<String, Object>> getReleaseCenters(@RequestParam(required = false) boolean includeRemoved, HttpServletRequest request) {
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication == null || authentication instanceof AnonymousAuthenticationToken) {
throw new AccessDeniedException("Access is denied");
}
List<ReleaseCenter> centers = releaseCenterService.findAll();
Map rolesMap = permissionService.getRolesForLoggedInUser();
centers = centers.stream().filter(center -> (includeRemoved || !center.isRemoved()) &&
((rolesMap.containsKey(GLOBAL_ROLE) && ((Set) rolesMap.get(GLOBAL_ROLE)).contains(RELEASE_ADMIN.name()))
|| (!StringUtils.isEmpty(center.getCodeSystem()) &&
((rolesMap.containsKey(GLOBAL_ROLE) && (((Set) rolesMap.get(GLOBAL_ROLE)).contains(RELEASE_MANAGER.name())
|| ((Set) rolesMap.get(GLOBAL_ROLE)).contains(RELEASE_LEAD.name())))
|| (rolesMap.containsKey(center.getCodeSystem()) &&
(((Set) rolesMap.get(center.getCodeSystem())).contains(RELEASE_ADMIN.name()) ||
((Set) rolesMap.get(center.getCodeSystem())).contains(RELEASE_MANAGER.name()) ||
((Set) rolesMap.get(center.getCodeSystem())).contains(RELEASE_LEAD.name()) ||
((Set) rolesMap.get(center.getCodeSystem())).contains(RELEASE_USER.name()) ||
((Set) rolesMap.get(center.getCodeSystem())).contains(AUTHOR.name()))))))
).collect(Collectors.toList());
return hypermediaGenerator.getEntityCollectionHypermedia(centers, request, RELEASE_CENTER_LINKS);
}
@PostMapping(value = "", consumes = MediaType.APPLICATION_JSON_VALUE)
@IsAuthenticatedAsGlobalAdmin
@ApiOperation(value = "Creates a new Release Center",
notes = " Creates a new Release Center and returns the newly created release center.")
public ResponseEntity<Map<String, Object>> createReleaseCenter(@RequestBody(required = false) Map<String, String> json,
HttpServletRequest request) throws IOException, EntityAlreadyExistsException {
String name = json.get("name");
String shortName = json.get("shortName");
String codeSystem = json.get("codeSystem");
ReleaseCenter center = releaseCenterService.create(name, shortName, codeSystem);
boolean currentResource = false;
Map<String, Object> entityHypermedia = hypermediaGenerator.getEntityHypermedia(center, currentResource, request, RELEASE_CENTER_LINKS);
return new ResponseEntity<>(entityHypermedia, HttpStatus.CREATED);
}
@PutMapping(value = "/{releaseCenterKey}", consumes = MediaType.APPLICATION_JSON_VALUE)
@IsAuthenticatedAsAdmin
@ApiOperation(value = "Updates a release center details",
notes = "Allows the name, shortName and the visibility of a release center (soft delete) to be changed. "
+ "Note that the short name is used in the formation of the ‘business key'")
@ResponseBody
public Map<String, Object> updateReleaseCenter(@PathVariable String releaseCenterKey,
@RequestBody(required = false) Map<String, String> json,
HttpServletRequest request) throws ResourceNotFoundException, BusinessServiceException {
ReleaseCenter center = releaseCenterService.find(releaseCenterKey);
String codeSystem = json.get("codeSystem");
if (codeSystem != center.getCodeSystem()) {
Map rolesMap = permissionService.getRolesForLoggedInUser();
if (!((Set) rolesMap.get(GLOBAL_ROLE)).contains(RELEASE_ADMIN.name())) {
throw new BusinessServiceException("You are not allowed to change Code System. Only Admin Global role has possibility to do this.");
}
}
center.setName(json.get("name"));
center.setShortName(json.get("shortName"));
center.setCodeSystem(codeSystem);
center.setRemoved("true".equalsIgnoreCase(json.get("removed")));
releaseCenterService.update(center);
return hypermediaGenerator.getEntityHypermedia(center, false, request, RELEASE_CENTER_LINKS);
}
@GetMapping(value = "/{releaseCenterKey}")
@IsAuthenticatedAsAdminOrReleaseManagerOrReleaseLeadOrUser
@ApiOperation(value = "Returns a single release center",
notes = "Returns a single release center for a given releaseCenterBusinessKey")
@ResponseBody
public Map<String, Object> getReleaseCenter(@PathVariable String releaseCenterKey, HttpServletRequest request) throws ResourceNotFoundException {
ReleaseCenter center = getReleaseCenterRequired(releaseCenterKey);
return hypermediaGenerator.getEntityHypermedia(center, true, request, RELEASE_CENTER_LINKS);
}
@GetMapping(value = "/{releaseCenterKey}/published")
@IsAuthenticatedAsAdminOrReleaseManagerOrReleaseLeadOrUser
@ApiOperation(value = "Returns a list published releases names",
notes = "Returns a list published releases names for a given release center")
@ResponseBody
public Map<String, Object> getReleaseCenterPublishedPackages(@PathVariable String releaseCenterKey, HttpServletRequest request) throws ResourceNotFoundException {
ReleaseCenter center = getReleaseCenterRequired(releaseCenterKey);
List<String> publishedPackages = publishService.getPublishedPackages(center);
Map<String, Object> representation = new HashMap<>();
representation.put("publishedPackages", publishedPackages);
return hypermediaGenerator.getEntityHypermedia(representation, true, request);
}
@PostMapping(value = "/{releaseCenterKey}/published", consumes = MediaType.ALL_VALUE)
@IsAuthenticatedAsAdminOrReleaseManager
@ResponseBody
@ApiIgnore
public ResponseEntity<Object> publishReleaseCenterPackage(@PathVariable String releaseCenterKey,
@RequestParam(value = "file") final MultipartFile file, @RequestParam(value = "isComponentIdPublishingRequired", defaultValue = "true") boolean publishComponentIds) throws BusinessServiceException, IOException {
ReleaseCenter center = getReleaseCenterRequired(releaseCenterKey);
try (InputStream inputStream = file.getInputStream()) {
publishService.publishAdHocFile(center, inputStream, file.getOriginalFilename(), file.getSize(), publishComponentIds);
}
return new ResponseEntity<>(HttpStatus.CREATED);
}
private ReleaseCenter getReleaseCenterRequired(String releaseCenterBusinessKey) throws ResourceNotFoundException {
ReleaseCenter center = releaseCenterService.find(releaseCenterBusinessKey);
if (center == null) {
throw new ResourceNotFoundException("Unable to find release center: " + releaseCenterBusinessKey);
}
return center;
}
}
|
package com.robertozagni.algoritmi.uf;
/**
* The API definition for the Union-Find ADT.<br>
* <br>
* We deal with N objects, identified by their id being integers in the range 0 to N-1.<br>
* Objects that are connected belong to the same connected component, identified by an integer id in the range 0 to N-1.
*
* @author roberto.zagni
*/
public interface UnionFind {
/**
* Connects objects p and q.
*
* @param p The id of object p, must be between 0 an N-1;
* @param q The id of object q, must be between 0 an N-1;
* @throws IndexOutOfBoundsException if the provided ids are out of the 0 - N-1 interval.
*/
void union(int p, int q);
/**
* Find the component identifier where p belongs.
*
* @param p The id of object p, must be between 0 an N-1;
* @return the id of the connected component the object is in.
* @throws IndexOutOfBoundsException if the provided id is out of the 0 - N-1 interval.
*/
int find(int p);
/**
* Checks if two objects are connected.
*
* @param p The id of object p, must be between 0 an N-1;
* @param q The id of object q, must be between 0 an N-1;
* @return <code>true</code> if the give objects are connected, <code>false</code> otherwise.
* @throws IndexOutOfBoundsException if the provided ids are out of the 0 - N-1 interval.
*/
boolean connected(int p, int q);
/**
* Counts the number of different connected component.
*
* @return the number of different connected component.
*/
int count();
}
|
package kr.co.vcnc.haeinsa;
import java.util.Map;
import java.util.NavigableSet;
import java.util.TreeMap;
import kr.co.vcnc.haeinsa.thrift.generated.TCellKey;
import kr.co.vcnc.haeinsa.thrift.generated.TKeyValue;
import kr.co.vcnc.haeinsa.thrift.generated.TMutation;
import kr.co.vcnc.haeinsa.thrift.generated.TMutationType;
import kr.co.vcnc.haeinsa.thrift.generated.TPut;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
/**
* Implementation of {@link HaeinsaMuation} which only contains HaeinsaKeyValue
* with {@link Type#Put} identifier. HaeinsaPut can be analogous to {@link Put}
* class in HBase.
* <p>
* HaeinsaPut only contains data of single row.
*/
public class HaeinsaPut extends HaeinsaMutation {
public HaeinsaPut(byte[] row) {
this.row = row;
}
/**
* Copy constructor. Creates a Put operation cloned from the specified Put.
*
* @param putToCopy put to copy
*/
public HaeinsaPut(HaeinsaPut putToCopy) {
this(putToCopy.getRow());
this.familyMap = new TreeMap<byte[], NavigableSet<HaeinsaKeyValue>>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], NavigableSet<HaeinsaKeyValue>> entry : putToCopy.getFamilyMap().entrySet()) {
this.familyMap.put(entry.getKey(), entry.getValue());
}
}
/**
* Add the specified column and value to this Put operation.
*
* @param family family name
* @param qualifier column qualifier
* @param value column value
* @return this
*/
public HaeinsaPut add(byte[] family, byte[] qualifier, byte[] value) {
NavigableSet<HaeinsaKeyValue> set = getKeyValueSet(family);
HaeinsaKeyValue kv = createPutKeyValue(family, qualifier, value);
// if new value is inserted for same family and qualifier pair,
// replace previous HaeinsaKeyValue with new one.
// Manually remove old one and substitute with new one
// because comparator of set only compare family and qualifier.
if (set.contains(kv)) {
set.remove(kv);
}
set.add(kv);
familyMap.put(kv.getFamily(), set);
return this;
}
/*
* Create a KeyValue with this objects row key and the Put identifier.
* @return a KeyValue with this objects row key and the Put identifier.
*/
private HaeinsaKeyValue createPutKeyValue(byte[] family, byte[] qualifier, byte[] value) {
return new HaeinsaKeyValue(this.row, family, qualifier, value, KeyValue.Type.Put);
}
/**
* Creates an empty set if one doesn't exist for the given column family or
* else it returns the associated set of KeyValue objects.
*
* @param family column family
* @return a set of KeyValue objects, returns an empty set if one doesn't
* exist.
*/
private NavigableSet<HaeinsaKeyValue> getKeyValueSet(byte[] family) {
NavigableSet<HaeinsaKeyValue> set = familyMap.get(family);
if (set == null) {
set = Sets.newTreeSet(HaeinsaKeyValue.COMPARATOR);
}
return set;
}
@Override
public void add(HaeinsaMutation newMutation) {
Preconditions.checkState(newMutation instanceof HaeinsaPut);
for (HaeinsaKeyValue newKV : Iterables.concat(newMutation.getFamilyMap().values())) {
add(newKV.getFamily(), newKV.getQualifier(), newKV.getValue());
}
}
@Override
public TMutation toTMutation() {
TMutation newTMutation = new TMutation();
newTMutation.setType(TMutationType.PUT);
TPut newTPut = new TPut();
for (HaeinsaKeyValue kv : Iterables.concat(familyMap.values())) {
TKeyValue newTKV = new TKeyValue();
newTKV.setKey(new TCellKey().setFamily(kv.getFamily()).setQualifier(kv.getQualifier()));
newTKV.setValue(kv.getValue());
newTPut.addToValues(newTKV);
}
newTMutation.setPut(newTPut);
return newTMutation;
}
}
|
package org.interestinglab.waterdrop.docutils;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.apache.commons.lang.StringUtils;
import org.interestinglab.waterdrop.configparser.PluginDocBaseVisitor;
import org.interestinglab.waterdrop.configparser.PluginDocLexer;
import org.interestinglab.waterdrop.configparser.PluginDocParser;
import java.util.Collections;
import java.util.Comparator;
public class PluginDocMarkdownRender extends PluginDocBaseVisitor<String> {
private PluginDoc pluginDoc = new PluginDoc();
private String buildMarkdown() {
StringBuilder str = new StringBuilder();
str.append("## " + pluginDoc.getPluginGroup() + " plugin : " + pluginDoc.getPluginName() + "\n\n");
str.append("* Author: " + pluginDoc.getPluginAuthor() + "\n");
str.append("* Homepage: " + pluginDoc.getPluginHomepage() + "\n");
str.append("* Version: " + pluginDoc.getPluginVersion() + "\n");
str.append("\n");
str.append("### Description\n\n");
str.append(pluginDoc.getPluginDesc() + "\n");
str.append("\n");
str.append("### Options\n\n");
Collections.sort(pluginDoc.getPluginOptions(), new Comparator<PluginDoc.PluginOption>() {
@Override
public int compare(PluginDoc.PluginOption o1, PluginDoc.PluginOption o2) {
return o1.getOptionName().compareTo(o2.getOptionName());
}
});
// append markdown table
str.append("| name | type | required | default value |\n");
str.append("|
for (PluginDoc.PluginOption option : pluginDoc.getPluginOptions()) {
str.append(String.format("| %s | %s | %s | %s |\n",
option.getOptionName(), option.getOptionType(), option.isRequired(), "null"));
}
// append option details
for (PluginDoc.PluginOption option : pluginDoc.getPluginOptions()) {
str.append("\n");
str.append("
str.append(option.getOptionDesc() + "\n");
}
return str.toString();
}
@Override
public String visitWaterdropPlugin(PluginDocParser.WaterdropPluginContext ctx) {
visitChildren(ctx);
return buildMarkdown();
}
@Override
public String visitDefinition(PluginDocParser.DefinitionContext ctx) {
if (ctx.pluginGroup() != null) {
pluginDoc.setPluginGroup(visit(ctx.pluginGroup()));
} else if (ctx.pluginName() != null) {
pluginDoc.setPluginName(visit(ctx.pluginName()));
} else if (ctx.pluginDesc() != null) {
pluginDoc.setPluginDesc(visit(ctx.pluginDesc()));
} else if (ctx.pluginAuthor() != null) {
pluginDoc.setPluginAuthor(visit(ctx.pluginAuthor()));
} else if (ctx.pluginHomepage() != null) {
pluginDoc.setPluginHomepage(visit(ctx.pluginHomepage()));
} else if (ctx.pluginVersion() != null) {
pluginDoc.setPluginVersion(visit(ctx.pluginVersion()));
} else if (ctx.pluginOption() != null) {
visit(ctx.pluginOption());
}
return null;
}
@Override
public String visitPluginGroup(PluginDocParser.PluginGroupContext ctx) {
if (ctx.INPUT() != null) {
return ctx.INPUT().getText();
} else if (ctx.FILTER() != null) {
return ctx.FILTER().getText();
} else if (ctx.OUTPUT() != null) {
return ctx.OUTPUT().getText();
}
throw new RuntimeException("visitPluginGroup 1");
}
@Override
public String visitPluginName(PluginDocParser.PluginNameContext ctx) {
if (ctx.IDENTIFIER() == null) {
throw new RuntimeException("visitPluginName 1");
}
return ctx.IDENTIFIER().getText();
}
@Override
public String visitPluginDesc(PluginDocParser.PluginDescContext ctx) {
if (ctx.IDENTIFIER() == null && ctx.TEXT() == null) {
throw new RuntimeException("visitPluginDesc 1");
}
TerminalNode node = ctx.IDENTIFIER();
if (node == null) {
node = ctx.TEXT();
}
final String unquoted = StringUtils.strip(node.getText(), "\"'");
return unquoted;
}
@Override
public String visitPluginAuthor(PluginDocParser.PluginAuthorContext ctx) {
if (ctx.TEXT() == null && ctx.IDENTIFIER() == null) {
throw new RuntimeException("visitPluginAuthor 1");
}
TerminalNode node = ctx.TEXT();
if (node == null) {
node = ctx.IDENTIFIER();
}
final String unquoted = StringUtils.strip(node.getText(), "\"'");
return unquoted;
}
@Override
public String visitPluginHomepage(PluginDocParser.PluginHomepageContext ctx) {
if (ctx.URL() == null) {
throw new RuntimeException("visitPluginHomepage 1");
}
return ctx.URL().getText();
}
@Override
public String visitPluginVersion(PluginDocParser.PluginVersionContext ctx) {
if (ctx.VERSION_NUMBER() == null) {
throw new RuntimeException("visitPluginVersion 1");
}
return ctx.VERSION_NUMBER().getText();
}
@Override
public String visitPluginOption(PluginDocParser.PluginOptionContext ctx) {
if (ctx.optionType() == null || ctx.optionName() == null) {
throw new RuntimeException("visitPluginOption 1");
}
final String optionType = visit(ctx.optionType());
final String optionName = visit(ctx.optionName());
String optionDesc = "";
if (ctx.optionDesc() != null) {
optionDesc = visit(ctx.optionDesc());
}
PluginDoc.PluginOption option = new PluginDoc.PluginOption(optionType, optionName, optionDesc);
pluginDoc.getPluginOptions().add(option);
return null;
}
@Override
public String visitOptionType(PluginDocParser.OptionTypeContext ctx) {
if (ctx.getText() == null) {
throw new RuntimeException("invalid option type in @" + PluginDocLexer.ruleNames[PluginDocLexer.PluginOption - 1]);
}
return ctx.getText();
}
@Override
public String visitOptionName(PluginDocParser.OptionNameContext ctx) {
if (ctx.IDENTIFIER() == null) {
throw new RuntimeException("invalid option name in @" + PluginDocLexer.ruleNames[PluginDocLexer.PluginOption - 1]);
}
return ctx.IDENTIFIER().getText();
}
@Override
public String visitOptionDesc(PluginDocParser.OptionDescContext ctx) {
if (ctx.TEXT() != null) {
final String unquoted = StringUtils.strip(ctx.TEXT().getText(), "\"'");
return unquoted;
}
return "";
}
}
|
package com.rojocarmesi.jStreamsAPI;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.socrata.api.Soda2Consumer;
import com.socrata.builders.SoqlQueryBuilder;
import com.socrata.exceptions.SodaError;
import com.socrata.model.soql.SoqlQuery;
import com.sun.jersey.api.client.GenericType;
public class jStreamsAPI {
static Logger logger = LoggerFactory.getLogger(jStreamsAPI.class);
public static void main(String[] args){
logger.info("Welcome to jStreamsAPI!");
List<NYC311ServiceRequest> requests = getNYC331ServiceRequests(
NYC311ServiceRequest.getOrderedKeysOfNYC331ServiceRequests().keySet(),
100);
String table = generateStringTable(requests);
System.out.println(table);
System.out.println("Size = " + requests.size());
String createTable = createTableQueryForCrossdata("nycRequests", "cassandra_prod");
System.out.println(createTable);
}
public static List<NYC311ServiceRequest> getNYC331ServiceRequests(Set<String> columns, int limit){
Soda2Consumer consumer = Soda2Consumer.newConsumer("https://data.cityofnewyork.us/");
LinkedList<String> cols = new LinkedList<>(columns);
SoqlQueryBuilder queryBuilder = new SoqlQueryBuilder();
if(!columns.isEmpty()){
queryBuilder = queryBuilder.setSelectPhrase(cols);
}
if(limit>0){
queryBuilder = queryBuilder.setLimit(limit);
}
SoqlQuery soqlQuery = queryBuilder.build();
List<NYC311ServiceRequest> result = null;
try {
logger.info("Getting data from NYC - 311 service");
result = consumer.query("erm2-nwe9", soqlQuery, new GenericType<List<NYC311ServiceRequest>>(){});
} catch (SodaError sodaError) {
logger.error("Cannot get data", sodaError);
} catch (InterruptedException e) {
logger.error("Cannot get data", e);
}
return result;
}
private static String generateStringTable(List<NYC311ServiceRequest> requests) {
StringBuilder sb = new StringBuilder();
Set<String> keys = NYC311ServiceRequest.getOrderedKeysOfNYC331ServiceRequests().keySet();
LinkedHashMap<String, Integer> widths = calculateWidths(
requests,
keys);
// Header
int totalWidth = 0;
for(String key: keys) {
int width = widths.get(key);
sb.append("| ").append(StringUtils.rightPad(key, width)).append(" ");
totalWidth+=width+3;
}
totalWidth+=2;
sb.append(" |").append(System.lineSeparator());
String separator = StringUtils.repeat("-", totalWidth);
sb.append(separator).append(System.lineSeparator());
// Body
for(NYC311ServiceRequest request: requests){
sb.append("| ").append(StringUtils.rightPad(
String.valueOf(request.getUnique_key()),
widths.get("unique_key"))).append(" ");
sb.append("| ").append(StringUtils.rightPad(
String.valueOf(request.getCreated_date()),
widths.get("created_date"))).append(" ");
sb.append("| ").append(StringUtils.rightPad(
String.valueOf(request.getClosed_date()),
widths.get("closed_date"))).append(" ");
sb.append("| ").append(StringUtils.rightPad(
request.getAgency(),
widths.get("agency"))).append(" ");
if(request.getAgency() == null){
sb.append(StringUtils.repeat(" ", widths.get("agency")-"null".length()));
}
sb.append("| ").append(StringUtils.rightPad(
request.getComplaint_type(),
widths.get("complaint_type"))).append(" ");
if(request.getComplaint_type() == null){
sb.append(StringUtils.repeat(" ", widths.get("complaint_type")-"null".length()));
}
sb.append("| ").append(StringUtils.rightPad(
request.getDescriptor(),
widths.get("descriptor"))).append(" ");
if(request.getDescriptor() == null){
sb.append(StringUtils.repeat(" ", widths.get("descriptor")-"null".length()));
}
sb.append("| ").append(StringUtils.rightPad(
String.valueOf(request.getIncident_zip()),
widths.get("incident_zip"))).append(" ");
sb.append("| ").append(StringUtils.rightPad(
request.getIncident_address(),
widths.get("incident_address"))).append(" ");
if(request.getIncident_address() == null){
sb.append(StringUtils.repeat(" ", widths.get("incident_address")-"null".length()));
}
sb.append("| ").append(StringUtils.rightPad(
request.getCity(),
widths.get("city"))).append(" ");
if(request.getCity() == null){
sb.append(StringUtils.repeat(" ", widths.get("city")-"null".length()));
}
sb.append("| ").append(StringUtils.rightPad(
request.getStatus(),
widths.get("status"))).append(" ");
if(request.getStatus() == null){
sb.append(StringUtils.repeat(" ", widths.get("status")-"null".length()));
}
sb.append("| ").append(StringUtils.rightPad(
request.getBorough(),
widths.get("borough"))).append(" ");
if(request.getBorough() == null){
sb.append(StringUtils.repeat(" ", widths.get("borough")-"null".length()));
}
sb.append("| ").append(StringUtils.rightPad(
String.valueOf(request.getX_coordinate_state_plane()),
widths.get("x_coordinate_state_plane"))).append(" ");
sb.append("| ").append(StringUtils.rightPad(
String.valueOf(request.getY_coordinate_state_plane()),
widths.get("y_coordinate_state_plane"))).append(" ");
sb.append("| ").append(StringUtils.rightPad(
String.valueOf(request.getLatitude()),
widths.get("latitude"))).append(" ");
sb.append("| ").append(StringUtils.rightPad(
String.valueOf(request.getLongitude()),
widths.get("longitude"))).append(" ");
sb.append(" |").append(System.lineSeparator());
}
sb.append(separator);
return sb.toString();
}
private static LinkedHashMap<String, Integer> calculateWidths(List<NYC311ServiceRequest> requests,
Set<String> columnNames) {
LinkedHashMap<String, Integer> widths = new LinkedHashMap<>();
for(String columnName: columnNames){
widths.put(columnName, columnName.length());
}
for(NYC311ServiceRequest request: requests){
int length;
String key;
Iterator<String> iter = columnNames.iterator();
length = String.valueOf(request.getUnique_key()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getCreated_date()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getClosed_date()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getAgency()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getComplaint_type()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getDescriptor()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getIncident_zip()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getIncident_address()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getCity()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getStatus()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getBorough()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getX_coordinate_state_plane()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getY_coordinate_state_plane()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getLatitude()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
length = String.valueOf(request.getLongitude()).length();
key = iter.next();
if(length > widths.get(key)){
widths.put(key, length);
}
}
return widths;
}
public static String createTableQueryForCrossdata(String tableName, String cluster){
StringBuilder sb = new StringBuilder("CREATE TABLE ");
sb.append(tableName).append(" ON CLUSTER ").append(cluster).append(" (");
LinkedHashMap<String, Class> columns = NYC311ServiceRequest.getOrderedKeysOfNYC331ServiceRequests();
LinkedList<String> primaryKey = NYC311ServiceRequest.getPrimaryKey();
Iterator<String> iter = columns.keySet().iterator();
while(iter.hasNext()){
String column = iter.next();
sb.append(column).append(" ").append(convertJavaTypeToCrossdataType(columns.get(column)));
if(iter.hasNext()){
sb.append(", ");
}
}
sb.append(", ").append("PRIMARY KEY (").append(
primaryKey.toString().replace("[", "").replace("]", "")).append(")");
sb.append(");");
return sb.toString();
}
private static String convertJavaTypeToCrossdataType(Class clazz) {
String type = "Text";
if(clazz.getCanonicalName().equalsIgnoreCase(Integer.class.getCanonicalName())) {
type = "Int";
} else if (clazz.getCanonicalName().equalsIgnoreCase(Long.class.getCanonicalName())){
type = "BigInt";
} else if (clazz.getCanonicalName().equalsIgnoreCase(Float.class.getCanonicalName())){
type = "Float";
} else if (clazz.getCanonicalName().equalsIgnoreCase(Double.class.getCanonicalName())){
type = "Double";
} else if (clazz.getCanonicalName().equalsIgnoreCase(Boolean.class.getCanonicalName())){
type = "Boolean";
}
return type;
}
}
|
package main.java.latexee.parsers;
import java.io.BufferedWriter;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.antlr.v4.runtime.tree.ParseTree;
import org.symcomp.openmath.OpenMathBase;
import main.java.latexee.declareast.DeclareNode;
import main.java.latexee.declareast.MacroDeclaration;
import main.java.latexee.declareast.OperatorDeclaration;
import main.java.latexee.docast.DeclareStatement;
import main.java.latexee.docast.FormulaStatement;
import main.java.latexee.docast.LemmaStatement;
import main.java.latexee.docast.ParsedStatement;
import main.java.latexee.docast.ProofStatement;
import main.java.latexee.docast.TheoremStatement;
import main.java.latexee.exceptions.DeclarationInitialisationException;
import main.java.latexee.exceptions.TemplateFillException;
import main.java.latexee.logging.Logger;
import main.java.latexee.utils.AmbiguityChecker;
import main.java.latexee.utils.GrammarCompiler;
import main.java.latexee.utils.GrammarGenerator;
import main.java.latexee.utils.OpenMathTranslator;
import main.java.latexee.utils.OutputWriter;
/**
* This class contains most of the main logic of the application.
* Class constructor is given a file where the xml or popcorn is later written.
* parseImpl method calls methods for declaration parsing, grammar generating and after
* parsing formulas to xml or popcorn.
*
* Also keeps count of parsed formulas and declarations.
*/
public class FormulaParser {
private Writer writer;
private GrammarCompiler cp;
private int nodeId;
private boolean ambiguityChecking;
private boolean popcornOutput;
private int parsedFormulas;
private int successfullyParsedFormulas;
private int parsedDeclarations;
private int successfullyParsedDeclarations;
/**
* FormulaParser constructor.
* Creates instance of output writer, grammar compiler.
* @param filename file name where the output of the program is written to
* @throws FileNotFoundException mostly thrown when output location cant be accessed
* @throws UnsupportedEncodingException thrown when encoding is not supported.
*/
public FormulaParser(String filename) throws FileNotFoundException, UnsupportedEncodingException{
this.writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filename), "utf-8"));
this.cp = new GrammarCompiler();
this.nodeId = 0;
this.ambiguityChecking=false;
this.popcornOutput=false;
this.parsedFormulas = 0;
this.successfullyParsedFormulas = 0;
this.parsedDeclarations = 0;
this.successfullyParsedDeclarations = 0;
}
/**
* After the Document is parsed for statements this method will be called.
* This method calls method parseImpl(), which does the statement tree parsing.
* which parses the document tree, looks for declarations, compiles grammar from them and
* then parses the formulas and writes to output file.
*
* @param root root
*/
public void parse(ParsedStatement root){
parseImpl(root,new HashMap<String,DeclareNode>());
try {
writer.close();
} catch (IOException e) {
System.out.println("Error: Could not close output writer.");
e.printStackTrace();
}
System.out.println(successfullyParsedDeclarations + "/" + parsedDeclarations + " declarations parsed successfully.");
System.out.println(successfullyParsedFormulas + "/" + parsedFormulas + " formulas parsed successfully.");
}
/**
*
* This is a recursive method, which does the main document tree parsing.
* looks for declarations, when finding a formula, compiles grammar and
* then parses the formulas and writes to output file.
* @param root ParsedStatement node, which is traversed.
* @param declarations operator and macro declarations in current scope
*/
public void parseImpl(ParsedStatement root,Map<String,DeclareNode> declarations){
if(root instanceof DeclareStatement){
parsedDeclarations++;
DeclareStatement castNode = (DeclareStatement) root;
ParseTree parseTree = DeclarationParser.parseDeclaration(castNode.getContent());
if (parseTree != null) {
boolean operatorStyle = DeclarationParser.isOperatorSyntax(parseTree);
DeclareNode node = null;
if (operatorStyle){
try {
Logger.log("Parsing an operator.");
node = new OperatorDeclaration(parseTree,nodeId);
nodeId++;
String id = node.getId();
declarations.put(id, node);
Logger.log("Parsing successful.\n");
successfullyParsedDeclarations++;
}
catch (DeclarationInitialisationException die) {
Logger.log("Parsing finished with errors.\n");
}
}
else {
try {
Logger.log("Parsing a macro.");
node = new MacroDeclaration(parseTree,nodeId);
nodeId++;
String id = node.getId();
declarations.put(id, node);
Logger.log("Parsing successful.\n");
successfullyParsedDeclarations++;
}
catch (DeclarationInitialisationException die) {
Logger.log("Parsing finished with errors.\n");
}
}
}
}
else if(root instanceof FormulaStatement){
List<DeclareNode> nodes = new ArrayList<DeclareNode>(declarations.values());
String grammar = GrammarGenerator.createGrammar(nodes);
try {
parsedFormulas++;
ParseTree formulaTree = cp.compile(grammar, root.getContent());
if (formulaTree != null) {
successfullyParsedFormulas++;
OpenMathBase formulaNode = OpenMathTranslator.parseToOM(formulaTree, declarations);
OpenMathBase wrapped = formulaNode.toOMObject();
if(popcornOutput){
//Currently adding a newline to each formula for readability
String line = wrapped.toPopcorn()+"\n";
writer.write(line);
} else {
String indented = OutputWriter.indentXML(wrapped.toXml());
writer.write(indented);
}
if(ambiguityChecking){
AmbiguityChecker.check(formulaTree,declarations);
}
}
} catch (IOException e) {
Logger.log("IO exception when parsing formula: "+root.getContent());
e.printStackTrace();
} catch (TemplateFillException e){
Logger.log("Template fill problem in formula: "+root.getContent());
}finally{
OpenMathTranslator.bracketFlags.clear();
}
}
else if(root instanceof TheoremStatement ||
root instanceof LemmaStatement ||
root instanceof ProofStatement){
declarations = new HashMap<String,DeclareNode>(declarations);
}
for(ParsedStatement child : root.getChildren()){
parseImpl(child,declarations);
}
}
/**
* Method to enable ambiguity checking
*/
public void enableAmbiguityChecking(){
this.ambiguityChecking=true;
}
/**
* Method to enable popcorn output.
*/
public void enablePopcornOutput(){
this.popcornOutput=true;
}
}
|
package com.salesforce.scmt.worker;
import java.util.List;
import com.salesforce.scmt.utils.Utils;
import com.salesforce.scmt.utils.SalesforceConstants;
import com.salesforce.scmt.utils.SalesforceConstants.DeskMigrationFields;
import com.salesforce.scmt.model.DeployException;
import com.salesforce.scmt.service.SalesforceService;
import com.sforce.async.AsyncApiException;
import com.sforce.async.JobInfo;
import com.sforce.ws.ConnectionException;
import com.sforce.soap.partner.sobject.SObject;
public class ClosedWorker implements Runnable {
/**
* The JobId to check for.
*/
private String jobId;
/**
* The MigrationId to update.
*/
private String migrationId;
/**
* The Server URL.
*/
private String serverUrl;
/**
* The Session Id.
*/
private String sessionId;
/**
* SO Type
*/
private String soType;
/**
* Bulk Connection
*/
private SalesforceService sf;
public ClosedWorker(String jobId, String migrationId, String serverUrl, String sessionId, String soType) {
this.jobId = jobId;
this.migrationId = migrationId;
this.serverUrl = serverUrl;
this.sessionId = sessionId;
this.soType = soType;
}
public void run() {
sf = new SalesforceService(serverUrl, sessionId);
try {
JobInfo job = sf.awaitCompletion(jobId);
SObject mig = getDeskMigration();
int failed = job.getNumberRecordsFailed() + Double.valueOf((String) mig.getField(DeskMigrationFields.RecordsFailed)).intValue();
int processed = job.getNumberRecordsProcessed() + Double.valueOf((String) mig.getField(DeskMigrationFields.RecordsTotal)).intValue();
sf.updateMigration(migrationId, failed, processed);
if (soType == null || soType == SalesforceConstants.OBJ_EMAIL_MESSAGE) {
Thread.sleep(30000L);
sf.updateCustomLabel("BypassProcessBuilder", "0");
}
} catch (AsyncApiException|ConnectionException|DeployException|InterruptedException e) {
Utils.logException(e);
}
}
/**
* Query the Desk Migration Record from Salesforce. If none is found we
* return a blank Desk Migration with zero failed and total.
*
* @throws ConnectionException
* @return SObject
*/
public SObject getDeskMigration() throws ConnectionException {
String query = String.format(
"Select %s, %s From %s Where %s = '%s'",
DeskMigrationFields.RecordsFailed,
DeskMigrationFields.RecordsTotal,
SalesforceConstants.OBJ_DESK_MIGRATION,
DeskMigrationFields.ID, migrationId
);
List<SObject> result = sf.query(query);
if (result != null || !result.isEmpty()) {
return result.get(0);
}
SObject migration = new SObject(SalesforceConstants.OBJ_DESK_MIGRATION);
migration.setId(migrationId);
migration.setField(DeskMigrationFields.RecordsFailed, 0);
migration.setField(DeskMigrationFields.RecordsTotal, 0);
return migration;
}
}
|
package mcjty.efab.blocks.grid;
import mcjty.efab.EFab;
import mcjty.efab.blocks.GenericEFabMultiBlockPart;
import mcjty.efab.blocks.IEFabEnergyStorage;
import mcjty.efab.blocks.ISpeedBooster;
import mcjty.efab.blocks.ModBlocks;
import mcjty.efab.blocks.boiler.BoilerTE;
import mcjty.efab.blocks.crafter.CrafterTE;
import mcjty.efab.blocks.monitor.AutoCraftingMonitorTE;
import mcjty.efab.blocks.monitor.MonitorTE;
import mcjty.efab.blocks.rfcontrol.RfControlTE;
import mcjty.efab.blocks.storage.StorageTE;
import mcjty.efab.blocks.tank.TankBlock;
import mcjty.efab.blocks.tank.TankTE;
import mcjty.efab.compat.botania.BotaniaSupportSetup;
import mcjty.efab.config.GeneralConfiguration;
import mcjty.efab.items.UpgradeItem;
import mcjty.efab.recipes.IEFabRecipe;
import mcjty.efab.recipes.RecipeTier;
import mcjty.efab.sound.SoundController;
import mcjty.lib.bindings.DefaultAction;
import mcjty.lib.bindings.IAction;
import mcjty.lib.container.DefaultSidedInventory;
import mcjty.lib.container.InventoryHelper;
import mcjty.lib.tileentity.GenericTileEntity;
import mcjty.lib.varia.NullSidedInvWrapper;
import net.minecraft.block.Block;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.ITickable;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.text.TextFormatting;
import net.minecraftforge.common.capabilities.Capability;
import net.minecraftforge.fluids.FluidRegistry;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.items.CapabilityItemHandler;
import net.minecraftforge.items.wrapper.SidedInvWrapper;
import net.minecraftforge.oredict.OreDictionary;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.IntStream;
import static mcjty.efab.blocks.grid.GridContainer.COUNT_UPDATES;
public class GridTE extends GenericTileEntity implements DefaultSidedInventory, ITickable {
private static final int[] SLOTS = new int[]{GridContainer.SLOT_CRAFTOUTPUT, GridContainer.SLOT_CRAFTOUTPUT + 1, GridContainer.SLOT_CRAFTOUTPUT + 2};
private InventoryHelper inventoryHelper = new InventoryHelper(this, GridContainer.factory, 9 + 3 + COUNT_UPDATES + 1);
public static final String ACTION_CRAFT = "craft";
public static final String ACTION_CRAFT_REPEAT = "craftRepeat";
public static final String ACTION_LEFT = "left";
public static final String ACTION_RIGHT = "right";
@Override
public IAction[] getActions() {
return new IAction[] {
new DefaultAction(ACTION_CRAFT, () -> this.startCraft(false)),
new DefaultAction(ACTION_CRAFT_REPEAT, () -> this.startCraft(true)),
new DefaultAction(ACTION_LEFT, this::left),
new DefaultAction(ACTION_RIGHT, this::right),
};
}
private MInteger ticksRemaining = new MInteger(-1);
private int totalTicks = 0;
private int errorTicks = 0; // Where there was an error this will be > 0
private boolean repeat = false;
private int rfWarning = 0; // If we don't have enough power during one tick we increase this. If it goes beyond some value we abort the crafting operation
private int manaWarning = 0; // If we don't have enough mana during one tick we increase this. If it goes beyond some value we abort the crafting operation
private int crafterDelay = 0;
// Client side only and contains the last error from the server
private List<String> errorsFromServer = Collections.emptyList();
private List<String> usageFromServer = Collections.emptyList();
// Transient information that is calculated on demand
private boolean dirty = true; // Our cached multiblock info is invalid
private final Set<BlockPos> boilers = new HashSet<>();
private final Set<BlockPos> steamEngines = new HashSet<>();
private final Set<BlockPos> tanks = new HashSet<>();
private final Set<BlockPos> gearBoxes = new HashSet<>();
private final Set<BlockPos> rfControls = new HashSet<>();
private final Set<BlockPos> rfStorages = new HashSet<>();
private final Set<BlockPos> manaReceptacles = new HashSet<>();
private final Set<BlockPos> processors = new HashSet<>();
private final Set<BlockPos> pipes = new HashSet<>();
private final Set<BlockPos> monitors = new HashSet<>();
private final Set<BlockPos> autoMonitors = new HashSet<>();
private final Set<BlockPos> crafters = new HashSet<>();
private final Set<BlockPos> storages = new HashSet<>();
private final Set<BlockPos> powerOptimizers = new HashSet<>();
private boolean isMaster = false; // Is this grid the 'master' grid which will handle autocrafting
private Set<RecipeTier> supportedTiers = null;
private final GridCrafterHelper crafterHelper = new GridCrafterHelper(this);
@Override
protected boolean needsCustomInvWrapper() {
return true;
}
private void updateMonitorStatus(String[] crafterStatus) {
if (!monitors.isEmpty()) {
String msg;
if (errorTicks > 0) {
msg = TextFormatting.DARK_RED + (((errorTicks / 20) % 2 == 0) ? " ERROR" : "");
} else if (totalTicks == 0) {
msg = TextFormatting.DARK_GREEN + (ticksRemaining.get() >= 0 ? (" 100%") : " idle");
} else {
msg = TextFormatting.DARK_GREEN + (ticksRemaining.get() >= 0 ? (" " + ((totalTicks - ticksRemaining.get()) * 100 / totalTicks + "%")) : " idle");
}
for (BlockPos monitorPos : monitors) {
TileEntity te = getWorld().getTileEntity(monitorPos);
if (te instanceof MonitorTE) {
((MonitorTE) te).setCraftStatus(msg, crafterStatus[0], crafterStatus[1]);
}
}
}
if (!autoMonitors.isEmpty()) {
Iterator<BlockPos> iterator = autoMonitors.iterator();
List<String> messages = new ArrayList<>();
for (BlockPos crafter : crafters) {
if (!iterator.hasNext()) {
// No more monitors
break;
}
if (messages.size() >= 8) {
BlockPos monitorPos = iterator.next();
TileEntity te = getWorld().getTileEntity(monitorPos);
if (te instanceof AutoCraftingMonitorTE) {
((AutoCraftingMonitorTE) te).setCraftStatus(messages);
messages.clear();
}
}
TileEntity te = world.getTileEntity(crafter);
if (te instanceof CrafterTE) {
CrafterTE crafterTE = (CrafterTE) te;
if (!crafterTE.isOn()) {
messages.add(TextFormatting.GREEN + "* OFF");
} else if (crafterTE.isCrafting()) {
messages.add(TextFormatting.GREEN + "* " + crafterTE.getProgress() + "%");
} else if (crafterTE.getLastError() == null || crafterTE.getLastError().trim().isEmpty()) {
messages.add(TextFormatting.GREEN + "* IDLE");
} else {
messages.add(TextFormatting.RED + "* ERROR");
}
List<ItemStack> outputs = crafterTE.getOutputs();
if (outputs.isEmpty()) {
messages.add(" (unknown)");
} else {
String display = outputs.get(0).getDisplayName();
if (display.length() > 11) {
display = display.substring(0, 11) + "...";
}
messages.add(" " + display);
}
} else {
messages.add(TextFormatting.RED + "* ?");
messages.add("");
}
}
while (iterator.hasNext()) {
BlockPos monitorPos = iterator.next();
TileEntity te = getWorld().getTileEntity(monitorPos);
if (te instanceof AutoCraftingMonitorTE) {
((AutoCraftingMonitorTE) te).setCraftStatus(messages);
messages.clear();
}
}
}
}
public boolean checkIngredients(List<ItemStack> ingredients, Predicate<String> testName) {
for (ItemStack ingredient : ingredients) {
int needed = ingredient.getCount();
for (BlockPos storagePos : storages) {
TileEntity te = getWorld().getTileEntity(storagePos);
if (te instanceof StorageTE) {
StorageTE storageTE = (StorageTE) te;
if (testName.test(storageTE.getCraftingName())) {
for (int ii = 0; ii < storageTE.getSizeInventory(); ii++) {
ItemStack storageStack = storageTE.getStackInSlot(ii);
if (!storageStack.isEmpty() && OreDictionary.itemMatches(ingredient, storageStack, false)) {
needed -= Math.min(storageStack.getCount(), needed);
if (needed <= 0) {
break;
}
}
}
if (needed <= 0) {
break;
}
}
}
}
if (needed > 0) {
return false;
}
}
return true;
}
public void consumeIngredients(List<ItemStack> ingredients, Predicate<String> testName) {
for (ItemStack ingredient : ingredients) {
int needed = ingredient.getCount();
for (BlockPos storagePos : storages) {
TileEntity te = getWorld().getTileEntity(storagePos);
if (te instanceof StorageTE) {
StorageTE storageTE = (StorageTE) te;
if (testName.test(storageTE.getCraftingName())) {
for (int ii = 0; ii < storageTE.getSizeInventory(); ii++) {
ItemStack storageStack = storageTE.getStackInSlot(ii);
if (!storageStack.isEmpty() && OreDictionary.itemMatches(ingredient, storageStack, false)) {
ItemStack extracted = storageTE.decrStackSize(ii, needed);
needed -= extracted.getCount();
if (needed <= 0) {
break;
}
}
}
if (needed <= 0) {
break;
}
}
}
}
}
}
private String[] craftingStatus = new String[] { "", "" };
private void updateCrafters() {
// Only the master does crafting
checkMultiBlockCache();
if (crafters.isEmpty()) {
return;
}
if (!getSupportedTiers().contains(RecipeTier.COMPUTING)) {
craftingStatus[0] = TextFormatting.DARK_RED + " PROCESSOR";
craftingStatus[1] = TextFormatting.DARK_RED + " MISSING!";
return;
}
markDirtyQuick();
int countBusy = 0;
int countOff = 0;
int countMissing = 0;
boolean startnewcrafts = false;
crafterDelay
if (crafterDelay <= 0) {
crafterDelay = GeneralConfiguration.crafterDelay;
startnewcrafts = true;
}
for (BlockPos crafterPos : new HashSet<>(crafters)) {
TileEntity te = getWorld().getTileEntity(crafterPos);
if (te instanceof CrafterTE) {
CrafterTE crafterTE = (CrafterTE) te;
if (crafterTE.isOn()) {
if (crafterTE.isCrafting()) {
crafterTE.setSpeedBoost(GeneralConfiguration.craftAnimationBoost);
crafterTE.handleCraft(this);
countBusy++;
} else {
IEFabRecipe recipe = crafterTE.checkCraft(this);
if (recipe == null) {
countMissing++;
} else if (startnewcrafts) {
crafterTE.startCraft(this, recipe);
countBusy++;
}
}
} else {
crafterTE.setLastError("No redstone signal");
countOff++;
}
}
}
int idx = 0;
if (countMissing > 0) {
craftingStatus[idx++] = TextFormatting.DARK_RED + " " + "fail " + countMissing;
}
if (countBusy > 0) {
craftingStatus[idx++] = TextFormatting.DARK_GREEN + " " + "busy " + countBusy;
}
if (countOff > 0 && idx <= 1) {
craftingStatus[idx++] = TextFormatting.DARK_GREEN + " " + "off " + countOff;
}
if (idx <= 1) {
craftingStatus[idx] = "";
}
}
@Override
public void update() {
if (!getWorld().isRemote) {
if (errorTicks > 0) {
errorTicks++;
markDirtyQuick();
}
if (isMaster) {
updateCrafters();
updateMonitorStatus(craftingStatus);
}
if (ticksRemaining.get() >= 0) {
markDirtyQuick();
IEFabRecipe recipe = crafterHelper.findRecipeForOutput(getCurrentGhostOutput(), world);
if (recipe == null) {
abortCraft();
errorTicks = 1;
return;
}
ticksRemaining.dec();
if (totalTicks - ticksRemaining.get() < 2) {
// Send to client so it knows that the craft is progressing and that ticksRemaining is no longer equal to totalTicks
markDirtyClient();
}
if (ticksRemaining.get() % 20 == 0 || ticksRemaining.get() < 0) {
// Every 20 ticks we check if the inventory still matches what we want to craft
if (!ItemStack.areItemsEqual(crafterHelper.getCraftingOutput(), getCurrentOutput(recipe))) {
// Reset craft
abortCraft();
errorTicks = 1;
return;
}
}
if (ticksRemaining.get() < 0) {
craftFinished(recipe);
} else {
CraftProgressResult result = craftInProgress(recipe, ticksRemaining);
if (result == CraftProgressResult.WAIT) {
ticksRemaining.inc();
} else if (result == CraftProgressResult.ABORT) {
abortCraft();
errorTicks = 1;
}
}
}
} else {
updateSound();
}
}
private void abortCraft() {
ticksRemaining.set(-1);
crafterHelper.abortCraft();
markDirtyClient();
}
public enum CraftProgressResult {
ABORT,
WAIT,
OK
}
// Return false if the craft should be aborted
public CraftProgressResult craftInProgress(@Nonnull IEFabRecipe recipe, MInteger ticksRemain) {
checkMultiBlockCache();
if (recipe.getRequiredTiers().contains(RecipeTier.STEAM)) {
// Consume a bit of water
int amount = GeneralConfiguration.waterSteamCraftingConsumption;
amount *= getSpeedBonus(recipe); // Consume more if the operation is faster
FluidStack stack = new FluidStack(FluidRegistry.WATER, amount);
TankTE tank = findSuitableTank(stack);
if (tank == null) {
return CraftProgressResult.ABORT;
}
FluidStack drained = tank.getHandler().drain(stack, true);
if (drained == null || drained.amount < amount) {
return CraftProgressResult.ABORT;
}
}
if (recipe.getRequiredRfPerTick() > 0) {
if (powerOptimizers.isEmpty()) {
int stillneeded = recipe.getRequiredRfPerTick();
stillneeded *= getSpeedBonus(recipe); // Consume more if the operation is faster
stillneeded = handlePowerPerTick(stillneeded, this.rfControls, GeneralConfiguration.rfControlMax);
if (stillneeded > 0) {
stillneeded = handlePowerPerTick(stillneeded, this.rfStorages, GeneralConfiguration.rfStorageInternalFlow);
if (stillneeded > 0) {
if (GeneralConfiguration.ticksAllowedWithoutRF >= 0) {
rfWarning++;
if (rfWarning > GeneralConfiguration.ticksAllowedWithoutRF) {
return CraftProgressResult.ABORT;
}
}
return CraftProgressResult.WAIT;
}
} else {
rfWarning = 0;
}
} else {
// Handle in multiple ticks for efficiency
ticksRemain.inc(); // We process things differently so put back our tick
handlePowerOptimized(recipe, 100, ticksRemain);
handlePowerOptimized(recipe, 10, ticksRemain);
handlePowerOptimized(recipe, 1, ticksRemain);
rfWarning = 0;
if (ticksRemain.get() > 0) {
ticksRemain.dec();
return CraftProgressResult.WAIT;
}
}
}
if (EFab.botania && recipe.getRequiredManaPerTick() > 0) {
int stillneeded = recipe.getRequiredManaPerTick();
stillneeded *= getSpeedBonus(recipe); // Consume more if the operation is faster
stillneeded = handleManaPerTick(stillneeded, this.manaReceptacles, GeneralConfiguration.maxManaUsage);
if (stillneeded > 0) {
if (GeneralConfiguration.ticksAllowedWithoutMana >= 0) {
manaWarning++;
if (manaWarning > GeneralConfiguration.ticksAllowedWithoutMana) {
return CraftProgressResult.ABORT;
}
}
return CraftProgressResult.WAIT;
} else {
manaWarning = 0;
}
}
return CraftProgressResult.OK;
}
private void handlePowerOptimized(@Nonnull IEFabRecipe recipe, int step, MInteger ticksRemain) {
while (ticksRemain.get() >= step) {
int needed = recipe.getRequiredRfPerTick() * step;
int available = getAvailablePower(this.rfControls) + getAvailablePower(this.rfStorages);
if (needed > available) {
return;
}
ticksRemain.dec(step);
needed = handlePowerPerTick(needed, this.rfControls, 1000000000);
handlePowerPerTick(needed, this.rfStorages, 1000000000);
}
}
private int handlePowerPerTick(int stillneeded, Set<BlockPos> poses, int maxUsage) {
for (BlockPos p : poses) {
TileEntity te = getWorld().getTileEntity(p);
if (te instanceof IEFabEnergyStorage) {
IEFabEnergyStorage energyStorage = (IEFabEnergyStorage) te;
int canUse = Math.min(maxUsage, energyStorage.getEnergyStored(null));
if (canUse >= stillneeded) {
energyStorage.extractEnergy(stillneeded);
stillneeded = 0;
break;
} else {
energyStorage.extractEnergy(canUse);
stillneeded -= canUse;
}
}
}
return stillneeded;
}
private int getAvailablePower(Set<BlockPos> poses) {
int power = 0;
for (BlockPos p : poses) {
TileEntity te = getWorld().getTileEntity(p);
if (te instanceof IEFabEnergyStorage) {
IEFabEnergyStorage energyStorage = (IEFabEnergyStorage) te;
power += energyStorage.getEnergyStored(null);
}
}
return power;
}
private int handleManaPerTick(int stillneeded, Set<BlockPos> poses, int maxUsage) {
for (BlockPos p : poses) {
if (BotaniaSupportSetup.isManaReceptacle(getWorld().getBlockState(p).getBlock())) {
int canUse = Math.min(maxUsage, BotaniaSupportSetup.getMana(getWorld(), p));
if (canUse >= stillneeded) {
BotaniaSupportSetup.consumeMana(getWorld(), p, stillneeded);
stillneeded = 0;
break;
} else {
BotaniaSupportSetup.consumeMana(getWorld(), p, canUse);
stillneeded -= canUse;
}
}
}
return stillneeded;
}
private void craftFinished(@Nonnull IEFabRecipe recipe) {
ticksRemaining.set(-1);
markDirtyClient();
// Craft finished. Consume items and do the actual crafting. If there is no room to place
// the craft result then nothing happens
if (!checkRoomForOutput(crafterHelper.getCraftingOutput().copy())) {
// Not enough room. Abort craft
return;
}
if (checkFinalCraftRequirements(recipe, Collections.emptyList(), s -> false)) {
return;
}
insertOutput(crafterHelper.getCraftingOutput().copy());
// Consume items
for (int i = GridContainer.SLOT_CRAFTINPUT; i < GridContainer.SLOT_CRAFTOUTPUT; i++) {
decrStackSize(i, 1);
}
if (repeat) {
startCraft(repeat);
}
}
// Returns true if the final craft requirements are not ok
public boolean checkFinalCraftRequirements(IEFabRecipe recipe, @Nonnull List<ItemStack> ingredients, Predicate<String> matcher) {
// Now check if we have secondary requirements like fluids
// First loop to check
for (FluidStack stack : recipe.getRequiredFluids()) {
TankTE tank = findSuitableTank(stack);
if (tank == null) {
// Abort!
return true;
}
}
if (!checkIngredients(ingredients, matcher)) {
// Ingredients are missing. Abort!
return true;
}
// Second loop to consume
for (FluidStack stack : recipe.getRequiredFluids()) {
TankTE tank = findSuitableTank(stack);
tank.getHandler().drain(stack, true);
}
consumeIngredients(ingredients, matcher);
return false;
}
private ItemStack getCurrentGhostOutput() {
return getStackInSlot(GridContainer.SLOT_GHOSTOUT);
}
private boolean checkRoomForOutput(ItemStack output) {
return crafterHelper.checkRoomForOutput(output, GridContainer.SLOT_CRAFTOUTPUT, GridContainer.SLOT_CRAFTOUTPUT+3);
}
// This function assumes there is room (i.e. check with checkRoomForOutput first)
private void insertOutput(ItemStack output) {
crafterHelper.insertOutput(output, GridContainer.SLOT_CRAFTOUTPUT, GridContainer.SLOT_CRAFTOUTPUT+3);
}
/**
* Set the ghost output slot to one of the possible outputs for the current
* grid. If the output is already one of the possible outputs then nothing happens
*/
private void setValidRecipeGhostOutput() {
ItemStack current = inventoryHelper.getStackInSlot(GridContainer.SLOT_GHOSTOUT);
List<IEFabRecipe> recipes = findCurrentRecipesSorted();
if (current.isEmpty()) {
if (!recipes.isEmpty()) {
inventoryHelper.setStackInSlot(GridContainer.SLOT_GHOSTOUT, recipes.get(0).cast().getRecipeOutput().copy());
totalTicks = getCraftTime(recipes.get(0));
markDirtyQuick();
}
} else {
if (recipes.isEmpty()) {
inventoryHelper.setStackInSlot(GridContainer.SLOT_GHOSTOUT, ItemStack.EMPTY);
markDirtyQuick();
} else {
for (IEFabRecipe recipe : recipes) {
if (mcjty.efab.tools.InventoryHelper.isItemStackConsideredEqual(current, recipe.cast().getRecipeOutput())) {
return; // Ok, already present
}
}
inventoryHelper.setStackInSlot(GridContainer.SLOT_GHOSTOUT, recipes.get(0).cast().getRecipeOutput().copy());
totalTicks = getCraftTime(recipes.get(0));
markDirtyQuick();
}
}
}
private ItemStack getCurrentOutput(@Nullable IEFabRecipe recipe) {
if (recipe == null) {
return ItemStack.EMPTY;
} else {
return recipe.cast().getCraftingResult(crafterHelper.getWorkInventory());
}
}
// Give all current recipes. Sort recipes that are possible first.
@Nonnull
private List<IEFabRecipe> findCurrentRecipesSorted() {
List<IEFabRecipe> recipes = crafterHelper.findCurrentRecipes(getWorld());
recipes.sort((r1, r2) -> {
boolean error1 = getErrorsForOutput(r1, null);
boolean error2 = getErrorsForOutput(r2, null);
return error1 == error2 ? 0 : (error2 ? -1 : 1);
});
return recipes;
}
@Override
public void setInventorySlotContents(int index, ItemStack stack) {
getInventoryHelper().setInventorySlotContents(getInventoryStackLimit(), index, stack);
if (index >= GridContainer.SLOT_CRAFTINPUT && index < GridContainer.SLOT_CRAFTOUTPUT) {
crafterHelper.invalidateCache();
setValidRecipeGhostOutput();
// We need to update the visual crafting grid client side
markDirtyClient();
} else if (index >= GridContainer.SLOT_UPDATES && index < GridContainer.SLOT_UPDATES + GridContainer.COUNT_UPDATES) {
invalidateMultiBlockCache();
}
}
@Override
public ItemStack decrStackSize(int index, int count) {
ItemStack stack = getInventoryHelper().decrStackSize(index, count);
if (index >= GridContainer.SLOT_CRAFTINPUT && index < GridContainer.SLOT_CRAFTOUTPUT) {
setValidRecipeGhostOutput();
crafterHelper.invalidateCache();
// We need to update the visual crafting grid client side
markDirtyClient();
} else if (index >= GridContainer.SLOT_UPDATES && index < GridContainer.SLOT_UPDATES + GridContainer.COUNT_UPDATES) {
invalidateMultiBlockCache();
}
return stack;
}
@Override
public ItemStack removeStackFromSlot(int index) {
ItemStack stack = getInventoryHelper().removeStackFromSlot(index);
if (index >= GridContainer.SLOT_CRAFTINPUT && index < GridContainer.SLOT_CRAFTOUTPUT) {
setValidRecipeGhostOutput();
crafterHelper.invalidateCache();
// We need to update the visual crafting grid client side
markDirtyClient();
} else if (index >= GridContainer.SLOT_UPDATES && index < GridContainer.SLOT_UPDATES + GridContainer.COUNT_UPDATES) {
invalidateMultiBlockCache();
}
return stack;
}
@Override
public boolean isItemValidForSlot(int index, ItemStack stack) {
if (index >= GridContainer.SLOT_UPDATES && index < GridContainer.SLOT_UPDATES + GridContainer.COUNT_UPDATES) {
if (!stack.isEmpty() && stack.getItem() instanceof UpgradeItem) {
return true;
}
}
return index < GridContainer.SLOT_UPDATES;
}
@Override
public int[] getSlotsForFace(EnumFacing side) {
return SLOTS;
}
@Override
public boolean canExtractItem(int index, ItemStack stack, EnumFacing direction) {
return index >= GridContainer.SLOT_CRAFTOUTPUT && index < GridContainer.SLOT_UPDATES;
}
@Override
public boolean canInsertItem(int index, ItemStack itemStackIn, EnumFacing direction) {
return false;
}
private Random random = new Random();
private void updateSound() {
if (ticksRemaining.get() >= 0) {
IEFabRecipe recipe = crafterHelper.findRecipeForOutput(getCurrentGhostOutput(), world);
if (recipe != null) {
Set<RecipeTier> requiredTiers = recipe.getRequiredTiers();
if (requiredTiers.contains(RecipeTier.STEAM)) {
if (!SoundController.isSteamPlaying(getWorld(), pos)) {
SoundController.playSteamSound(getWorld(), pos);
// @todo optimize this?
List<BlockPos> positions = new ArrayList<>();
findBoilers(pos, new HashSet<>(), positions);
if (!positions.isEmpty()) {
BlockPos p = positions.get(random.nextInt(positions.size()));
TileEntity te = getWorld().getTileEntity(p);
if (te instanceof BoilerTE) {
((BoilerTE) te).setTimer(3 * 20);
}
}
}
}
if (requiredTiers.contains(RecipeTier.COMPUTING)) {
if (!SoundController.isBeepsPlaying(getWorld(), pos)) {
if ((totalTicks - ticksRemaining.get() < 1) || (random.nextFloat() < 0.04)) {
if (random.nextInt(100) < 50) {
SoundController.playBeeps1Sound(getWorld(), pos);
} else {
SoundController.playBeeps2Sound(getWorld(), pos);
}
}
}
}
if (requiredTiers.contains(RecipeTier.GEARBOX) || requiredTiers.contains(RecipeTier.ADVANCED_GEARBOX)) {
if (!SoundController.isMachinePlaying(getWorld(), pos)) {
SoundController.playMachineSound(getWorld(), pos);
}
}
if (requiredTiers.contains(RecipeTier.RF)) {
if (!SoundController.isSparksPlaying(getWorld(), pos)) {
if ((totalTicks - ticksRemaining.get() < 1) || (random.nextFloat() < 0.04)) {
SoundController.playSparksSound(getWorld(), pos);
// @todo optimize this?
List<BlockPos> positions = new ArrayList<>();
findRFControlBlocks(pos, new HashSet<>(), positions);
if (!positions.isEmpty()) {
BlockPos p = positions.get(random.nextInt(positions.size()));
TileEntity te = getWorld().getTileEntity(p);
if (te instanceof RfControlTE) {
((RfControlTE) te).setSpark(25);
}
}
}
}
}
}
}
}
// Client-side. Find rf control blocks
private void findRFControlBlocks(BlockPos current, Set<BlockPos> visited, List<BlockPos> positions) {
if (visited.contains(current)) {
return;
}
visited.add(current);
for (EnumFacing dir : EnumFacing.VALUES) {
BlockPos p = current.offset(dir);
Block block = getWorld().getBlockState(p).getBlock();
if (block == ModBlocks.gridBlock) {
findRFControlBlocks(p, visited, positions);
} else if (block == ModBlocks.baseBlock) {
findRFControlBlocks(p, visited, positions);
} else if (block instanceof GenericEFabMultiBlockPart) {
if (block == ModBlocks.rfControlBlock) {
positions.add(p);
}
findRFControlBlocks(p, visited, positions);
}
}
}
// Client-side. Find boilers
private void findBoilers(BlockPos current, Set<BlockPos> visited, List<BlockPos> positions) {
if (visited.contains(current)) {
return;
}
visited.add(current);
for (EnumFacing dir : EnumFacing.VALUES) {
BlockPos p = current.offset(dir);
Block block = getWorld().getBlockState(p).getBlock();
if (block == ModBlocks.gridBlock) {
findBoilers(p, visited, positions);
} else if (block == ModBlocks.baseBlock) {
findBoilers(p, visited, positions);
} else if (block instanceof GenericEFabMultiBlockPart) {
if (block == ModBlocks.boilerBlock) {
positions.add(p);
}
findBoilers(p, visited, positions);
}
}
}
private void addTodo(Queue<BlockPos> todo, Set<BlockPos> visited, BlockPos pos) {
for (EnumFacing dir : EnumFacing.VALUES) {
BlockPos p = pos.offset(dir);
if (!visited.contains(p) && !todo.contains(p)) {
todo.add(p);
}
}
}
/// A sum of all priorities of the upgrades so that we can find the 'best' grid for autocrafting
public int calculateGridPriority() {
int total = 0;
for (int i = GridContainer.SLOT_UPDATES; i < GridContainer.SLOT_UPDATES + GridContainer.COUNT_UPDATES ; i++) {
ItemStack stack = getStackInSlot(i);
if (!stack.isEmpty()) {
if (stack.getItem() instanceof UpgradeItem) {
total += ((UpgradeItem) stack.getItem()).getPriority();
}
}
}
return total;
}
private void findMultiBlockParts() {
Set<BlockPos> visited = new HashSet<>();
Queue<BlockPos> todo = new ArrayDeque<>();
BlockPos bestGridSoFar = pos;
int bestPrioritySoFar = calculateGridPriority();
Set<BlockPos> grids = new HashSet<>();
grids.add(pos);
visited.add(pos);
addTodo(todo, visited, pos);
while (!todo.isEmpty()) {
BlockPos p = todo.poll();
visited.add(p);
Block block = getWorld().getBlockState(p).getBlock();
if (block == ModBlocks.gridBlock) {
// Find the 'master' grid used for crafting
TileEntity te = getWorld().getTileEntity(p);
if (te instanceof GridTE) {
int priority = ((GridTE) te).calculateGridPriority();
if (priority > bestPrioritySoFar) {
bestPrioritySoFar = priority;
bestGridSoFar = p;
}
}
grids.add(p);
addTodo(todo, visited, p);
} else if (block == ModBlocks.baseBlock) {
addTodo(todo, visited, p);
} else if (block instanceof GenericEFabMultiBlockPart) {
if (block == ModBlocks.boilerBlock) {
boilers.add(p);
} else if (block == ModBlocks.steamEngineBlock) {
steamEngines.add(p);
} else if (block == ModBlocks.gearBoxBlock) {
gearBoxes.add(p);
} else if (block == ModBlocks.rfControlBlock) {
rfControls.add(p);
} else if (block == ModBlocks.rfStorageBlock || block == ModBlocks.advancedRfStorageBlock) {
rfStorages.add(p);
} else if (block == ModBlocks.processorBlock) {
processors.add(p);
} else if (block == ModBlocks.pipeBlock) {
pipes.add(p);
} else if (block == ModBlocks.monitorBlock) {
monitors.add(p);
} else if (block == ModBlocks.autoCraftingMonitorBlock) {
autoMonitors.add(p);
} else if (block == ModBlocks.crafterBlock) {
crafters.add(p);
} else if (block == ModBlocks.storageBlock) {
storages.add(p);
} else if (block == ModBlocks.powerOptimizerBlock) {
powerOptimizers.add(p);
} else if (block instanceof TankBlock) {
tanks.add(p);
} else if (EFab.botania && BotaniaSupportSetup.isManaReceptacle(block)) {
manaReceptacles.add(p);
}
addTodo(todo, visited, p);
} else {
// Don't go further here
}
}
// Find the master grid
for (BlockPos grid : grids) {
TileEntity te = getWorld().getTileEntity(grid);
if (te instanceof GridTE) {
((GridTE)te).isMaster = grid.equals(bestGridSoFar);
}
}
}
private void checkMultiBlockCache() {
if (dirty) {
dirty = false;
boilers.clear();
steamEngines.clear();
tanks.clear();
gearBoxes.clear();
rfControls.clear();
rfStorages.clear();
processors.clear();
pipes.clear();
monitors.clear();
autoMonitors.clear();
crafters.clear();
storages.clear();
manaReceptacles.clear();
powerOptimizers.clear();
findMultiBlockParts();
}
}
public void invalidateMultiBlockCache() {
dirty = true;
supportedTiers = null;
}
/**
* Return all current outputs with the first outputs the ones that are actually possible
* given current configuration
*/
@Nonnull
public List<ItemStack> getOutputs() {
return crafterHelper.getOutputs(getWorld());
}
private void left() {
List<IEFabRecipe> sorted = findCurrentRecipesSorted();
OptionalInt first = findCurrentGhost(sorted);
if (first.isPresent()) {
int i = (first.getAsInt() - 1 + sorted.size()) % sorted.size();
IEFabRecipe recipe = sorted.get(i);
setInventorySlotContents(GridContainer.SLOT_GHOSTOUT, recipe.cast().getRecipeOutput().copy());
totalTicks = getCraftTime(recipe);
markDirtyQuick();
}
}
private void right() {
List<IEFabRecipe> sorted = findCurrentRecipesSorted();
OptionalInt first = findCurrentGhost(sorted);
if (first.isPresent()) {
int i = (first.getAsInt() + 1) % sorted.size();
IEFabRecipe recipe = sorted.get(i);
setInventorySlotContents(GridContainer.SLOT_GHOSTOUT, recipe.cast().getRecipeOutput().copy());
totalTicks = getCraftTime(recipe);
markDirtyQuick();
}
}
private OptionalInt findCurrentGhost(List<IEFabRecipe> sorted) {
ItemStack ghostOutput = getCurrentGhostOutput();
return IntStream.range(0, sorted.size())
.filter(i -> mcjty.efab.tools.InventoryHelper.isItemStackConsideredEqual(sorted.get(i).cast().getRecipeOutput(), ghostOutput))
.findFirst();
}
private void startCraft(boolean repeat) {
this.repeat = repeat;
errorTicks = 0;
rfWarning = 0;
manaWarning = 0;
markDirtyQuick();
IEFabRecipe recipe = crafterHelper.findRecipeForOutput(getCurrentGhostOutput(), world);
if (recipe != null) {
boolean error = getErrorsForOutput(recipe, null);
if (error) {
return; // Don't start
}
crafterHelper.setCraftingOutput(getCurrentOutput(recipe));
int craftTime = getCraftTime(recipe);
ticksRemaining.set(craftTime);
totalTicks = craftTime;
markDirtyClient();
if (recipe.getRequiredTiers().contains(RecipeTier.STEAM)) {
handleAnimationSpeed(GeneralConfiguration.steamWheelBoost, this.steamEngines);
}
if (EFab.botania && recipe.getRequiredTiers().contains(RecipeTier.MANA)) {
handleAnimationSpeed(GeneralConfiguration.manaRotationBoost, manaReceptacles);
}
}
}
public int getSpeedBonus(IEFabRecipe recipe) {
getSupportedTiers();
int bonus = 1;
if (recipe.getRequiredTiers().contains(RecipeTier.GEARBOX)) {
int cnt = gearBoxes.size();
if (cnt > 1 && bonus < cnt) {
bonus = Math.min(GeneralConfiguration.maxSpeedupBonus, cnt);
}
}
if (recipe.getRequiredTiers().contains(RecipeTier.STEAM)) {
int cnt = steamEngines.size();
if (cnt > 1 && bonus < cnt) {
bonus = Math.min(GeneralConfiguration.maxSpeedupBonus, cnt);
}
}
if (recipe.getRequiredTiers().contains(RecipeTier.RF)) {
int cnt = rfControls.size();
if (cnt > 1 && bonus < cnt) {
bonus = Math.min(GeneralConfiguration.maxSpeedupBonus, cnt);
}
}
if (recipe.getRequiredTiers().contains(RecipeTier.COMPUTING)) {
int cnt = processors.size();
if (cnt > 1 && bonus < cnt) {
bonus = Math.min(GeneralConfiguration.maxSpeedupBonus, cnt);
}
}
if (recipe.getRequiredTiers().contains(RecipeTier.LIQUID)) {
int cnt = pipes.size();
if (cnt > 1 && bonus < cnt) {
bonus = Math.min(GeneralConfiguration.maxPipeSpeedBonus, cnt);
}
}
return bonus;
}
private int getCraftTime(IEFabRecipe recipe) {
getSupportedTiers();
int craftTime = recipe.getCraftTime();
return craftTime / getSpeedBonus(recipe);
}
private void handleAnimationSpeed(int boost, Set<BlockPos> posSet) {
checkMultiBlockCache();
for (BlockPos enginePos : posSet) {
TileEntity te = getWorld().getTileEntity(enginePos);
if (te instanceof ISpeedBooster) {
ISpeedBooster speedBooster = (ISpeedBooster) te;
speedBooster.setSpeedBoost(boost);
}
}
}
@Override
public void readFromNBT(NBTTagCompound tagCompound) {
super.readFromNBT(tagCompound);
ticksRemaining.set(tagCompound.getInteger("ticks"));
errorTicks = tagCompound.getInteger("error");
totalTicks = tagCompound.getInteger("total");
crafterDelay = tagCompound.getInteger("crafterDelay");
repeat = tagCompound.getBoolean("repeat");
rfWarning = tagCompound.getInteger("rfWarning");
manaWarning = tagCompound.getInteger("manaWarning");
crafterHelper.readFromNBT(tagCompound);
}
@Override
public NBTTagCompound writeToNBT(NBTTagCompound tagCompound) {
super.writeToNBT(tagCompound);
tagCompound.setInteger("ticks", ticksRemaining.get());
tagCompound.setInteger("error", errorTicks);
tagCompound.setInteger("total", totalTicks);
tagCompound.setInteger("crafterDelay", crafterDelay);
tagCompound.setBoolean("repeat", repeat);
tagCompound.setInteger("rfWarning", rfWarning);
tagCompound.setInteger("manaWarning", manaWarning);
crafterHelper.writeToNBT(tagCompound);
return tagCompound;
}
@Override
public void readRestorableFromNBT(NBTTagCompound tagCompound) {
super.readRestorableFromNBT(tagCompound);
readBufferFromNBT(tagCompound, inventoryHelper);
}
@Override
public void writeRestorableToNBT(NBTTagCompound tagCompound) {
super.writeRestorableToNBT(tagCompound);
writeBufferToNBT(tagCompound, inventoryHelper);
}
@Override
public InventoryHelper getInventoryHelper() {
return inventoryHelper;
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public boolean isUsableByPlayer(EntityPlayer player) {
return canPlayerAccess(player);
}
public int getTicksRemaining() {
return ticksRemaining.get();
}
public int getTotalTicks() {
return totalTicks;
}
private TankTE findSuitableTank(@Nullable FluidStack stack) {
if (stack == null) {
return null;
}
checkMultiBlockCache();
for (BlockPos tank : tanks) {
TileEntity te = getWorld().getTileEntity(tank);
if (te instanceof TankTE) {
TankTE tankTE = (TankTE) te;
FluidStack fluid = tankTE.getFluid();
if (fluid != null && stack.getFluid() == fluid.getFluid()) {
if (fluid.amount >= stack.amount) {
return tankTE;
}
}
}
}
return null;
}
public List<String> getUsage() {
if (getWorld().isRemote) {
return usageFromServer;
}
ItemStack output = getCurrentGhostOutput();
IEFabRecipe recipe = crafterHelper.findRecipeForOutput(output, world);
List<String> usage = new ArrayList<>();
if (recipe != null) {
int speedBonus = getSpeedBonus(recipe);
if (speedBonus > 1) {
usage.add(TextFormatting.GOLD + "Speed up factor: " + speedBonus);
}
if (recipe.getRequiredRfPerTick() > 0) {
usage.add(TextFormatting.GRAY + "RF/t " + TextFormatting.BLUE + recipe.getRequiredRfPerTick() * speedBonus);
usage.add(TextFormatting.GRAY + "Total " + TextFormatting.BLUE + recipe.getRequiredRfPerTick() * recipe.getCraftTime());
}
if (recipe.getRequiredManaPerTick() > 0) {
usage.add(TextFormatting.GRAY + "Mana/t " + TextFormatting.BLUE + recipe.getRequiredManaPerTick() * speedBonus);
usage.add(TextFormatting.GRAY + "Total " + TextFormatting.BLUE + recipe.getRequiredManaPerTick() * recipe.getCraftTime());
}
for (FluidStack fluidStack : recipe.getRequiredFluids()) {
usage.add(TextFormatting.GRAY + "Liquid " + TextFormatting.BLUE + fluidStack.getLocalizedName() + " (" + fluidStack.amount + "mb)");
}
}
return usage;
}
public List<String> getErrorState() {
if (getWorld().isRemote) {
return errorsFromServer;
}
ItemStack output = getCurrentGhostOutput();
IEFabRecipe recipe = crafterHelper.findRecipeForOutput(output, world);
List<String> errors = new ArrayList<>();
getErrorsForOutput(recipe, errors);
return errors;
}
public boolean getErrorsForOutput(IEFabRecipe recipe, @Nullable List<String> errors) {
checkMultiBlockCache();
if (recipe == null) {
return false;
}
Set<RecipeTier> supported = getSupportedTiers();
for (RecipeTier tier : recipe.getRequiredTiers()) {
if (!supported.contains(tier)) {
if (errors != null) {
errors.add(tier.getMissingError());
} else {
return true;
}
}
}
for (FluidStack stack : recipe.getRequiredFluids()) {
if (findSuitableTank(stack) == null) {
if (errors != null) {
errors.add("Not enough liquid: " + stack.getLocalizedName());
errors.add(" " + stack.amount + " mb needed");
} else {
return true;
}
}
}
if (recipe.getRequiredRfPerTick() > 0) {
if (powerOptimizers.isEmpty()) {
int totavailable = 0;
int maxpertick = 0;
for (BlockPos p : rfControls) {
TileEntity te = getWorld().getTileEntity(p);
if (te instanceof IEFabEnergyStorage) {
IEFabEnergyStorage energyStorage = (IEFabEnergyStorage) te;
totavailable += energyStorage.getEnergyStored(null);
maxpertick += energyStorage.getMaxInternalConsumption();
}
}
for (BlockPos p : rfStorages) {
TileEntity te = getWorld().getTileEntity(p);
if (te instanceof IEFabEnergyStorage) {
IEFabEnergyStorage energyStorage = (IEFabEnergyStorage) te;
totavailable += energyStorage.getEnergyStored(null);
maxpertick += energyStorage.getMaxInternalConsumption();
}
}
if (recipe.getRequiredRfPerTick() > maxpertick) {
if (errors != null) {
errors.add("Not enough power capacity!");
errors.add(" " + recipe.getRequiredRfPerTick() + "RF/t needed but only " +
maxpertick + " possible");
} else {
return true;
}
} else if (recipe.getRequiredRfPerTick() > totavailable) {
if (errors != null) {
errors.add("Not enough power!");
} else {
return true;
}
}
}
}
if (EFab.botania && recipe.getRequiredManaPerTick() > 0) {
int totavailable = 0;
int maxpertick = 0;
for (BlockPos p : manaReceptacles) {
TileEntity te = getWorld().getTileEntity(p);
totavailable += BotaniaSupportSetup.getMana(getWorld(), p);
maxpertick += GeneralConfiguration.maxManaUsage;
}
if (recipe.getRequiredManaPerTick() > maxpertick) {
if (errors != null) {
errors.add("Not enough mana capacity!");
errors.add(" " + recipe.getRequiredManaPerTick() + "mana/t needed but only " +
maxpertick + " possible");
} else {
return true;
}
} else if (recipe.getRequiredManaPerTick() > totavailable) {
if (errors != null) {
errors.add("Not enough mana!");
} else {
return true;
}
}
}
if (recipe.getRequiredTiers().contains(RecipeTier.STEAM)) {
boolean ok = false;
for (BlockPos boiler : boilers) {
TileEntity te = getWorld().getTileEntity(boiler);
if (te instanceof BoilerTE) {
if (((BoilerTE) te).canMakeSteam()) {
ok = true;
break;
}
}
}
if (!ok) {
if (errors != null) {
errors.add("There are no boilers hot enough!");
} else {
return true;
}
}
if (findSuitableTank(new FluidStack(FluidRegistry.WATER, GeneralConfiguration.waterSteamStartAmount)) == null) {
if (errors != null) {
errors.add("Insufficient water to make steam!");
} else {
return true;
}
}
}
return errors != null && !errors.isEmpty();
}
private Set<RecipeTier> getSupportedTiers() {
if (supportedTiers == null) {
checkMultiBlockCache();
supportedTiers = EnumSet.noneOf(RecipeTier.class);
if (!boilers.isEmpty() && !steamEngines.isEmpty()) {
supportedTiers.add(RecipeTier.STEAM);
}
if (!gearBoxes.isEmpty()) {
supportedTiers.add(RecipeTier.GEARBOX);
}
if (!tanks.isEmpty()) {
supportedTiers.add(RecipeTier.LIQUID);
}
if (!rfControls.isEmpty()) {
supportedTiers.add(RecipeTier.RF);
}
if (!manaReceptacles.isEmpty()) {
supportedTiers.add(RecipeTier.MANA);
}
if (!processors.isEmpty()) {
supportedTiers.add(RecipeTier.COMPUTING);
}
for (int i = GridContainer.SLOT_UPDATES; i < GridContainer.SLOT_UPDATES + GridContainer.COUNT_UPDATES ; i++) {
ItemStack stack = getStackInSlot(i);
if (!stack.isEmpty()) {
if (stack.getItem() instanceof UpgradeItem) {
supportedTiers.add(((UpgradeItem) stack.getItem()).providesTier());
}
}
}
}
return supportedTiers;
}
// Called client-side only
public void syncFromServer(int ticks, int total, List<String> errors, List<ItemStack> outputs, List<String> usage) {
ticksRemaining.set(ticks);
totalTicks = total;
errorsFromServer = errors;
usageFromServer = usage;
crafterHelper.syncFromServer(outputs);
}
@Override
public <T> T getCapability(Capability<T> capability, EnumFacing facing) {
if (capability == CapabilityItemHandler.ITEM_HANDLER_CAPABILITY) {
if (needsCustomInvWrapper()) {
if (facing == null) {
if (invHandlerNull == null) {
invHandlerNull = new SidedInvWrapper(this, EnumFacing.DOWN);
}
return (T) invHandlerNull;
} else {
if (invHandlerSided == null) {
invHandlerSided = new NullSidedInvWrapper(this);
}
return (T) invHandlerSided;
}
}
}
return super.getCapability(capability, facing);
}
}
|
package com.secret.fastalign.main;
import java.io.File;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.Collections;
import com.secret.fastalign.general.FastaData;
import com.secret.fastalign.minhash.MinHashSearch;
import com.secret.fastalign.utils.FastAlignRuntimeException;
public final class FastAlignMain
{
private static final int DEFAULT_NUM_WORDS = 1024;
private static final int DEFAULT_KMER_SIZE = 16;
private static final double DEFAULT_THRESHOLD = 0.04;
private static final int DEFAULT_NUM_MIN_MATCHES = 2;
private static final int DEFAULT_SUB_SEQUENCE_SIZE = 5000;
private static final int DEFAULT_NUM_THREADS = Runtime.getRuntime().availableProcessors()*2;
private static final boolean DEFAULT_LARGE_MEMORY = true;
private static final boolean DEFAULT_NO_SELF = false;
public static void main(String[] args) throws Exception
{
String inFile = null;
String toFile = null;
int kmerSize = DEFAULT_KMER_SIZE;
double threshold = DEFAULT_THRESHOLD;
int numWords = DEFAULT_NUM_WORDS;
int numMinMatches = DEFAULT_NUM_MIN_MATCHES;
int subSequenceSize = DEFAULT_SUB_SEQUENCE_SIZE;
boolean storeInMemory = DEFAULT_LARGE_MEMORY;
int numThreads = DEFAULT_NUM_THREADS;
boolean noSelf = DEFAULT_NO_SELF;
for (int i = 0; i < args.length; i++) {
if (args[i].trim().equalsIgnoreCase("-k")) {
kmerSize = Integer.parseInt(args[++i]);
} else if (args[i].trim().equalsIgnoreCase("-s")) {
inFile = args[++i];
} else if (args[i].trim().equalsIgnoreCase("-q")) {
toFile = args[++i];
} else if (args[i].trim().equalsIgnoreCase("--num-hashes")) {
numWords = Integer.parseInt(args[++i]);
} else if (args[i].trim().equalsIgnoreCase("--threshold")) {
threshold = Double.parseDouble(args[++i]);
} else if (args[i].trim().equalsIgnoreCase("--num-min-matches")) {
numMinMatches = Integer.parseInt(args[++i]);
} else if (args[i].trim().equalsIgnoreCase("--subsequence-size")) {
subSequenceSize = Integer.parseInt(args[++i]);
} else if (args[i].trim().equalsIgnoreCase("--num-threads")) {
numThreads = Integer.parseInt(args[++i]);
} else if (args[i].trim().equalsIgnoreCase("--memory")) {
storeInMemory = false;
} else if (args[i].trim().equalsIgnoreCase("--no-self")) {
noSelf = true;
}
}
if (inFile == null) {
printUsage("Error: no input fasta file specified");
}
System.err.println("Running with input fasta: " + inFile);
System.err.println("kmer size:\t" + kmerSize);
System.err.println("threshold:\t" + threshold);
System.err.println("num hashed words:\t" + numWords);
System.err.println("num min matches:\t" + numMinMatches);
System.err.println("subsequence size:\t" + subSequenceSize);
System.err.println("number of threads:\t" + numThreads);
System.err.println("use large amount of memory:\t" + storeInMemory);
System.err.println("compute alignment to self of -s file:\t" + !noSelf);
long startTotalTime = System.nanoTime();
// read and index the kmers
FastaData data = new FastaData(inFile);
//System.err.println("Read in "+data.currentCacheSize()+" sequences.");
//System.err.println("Press Enter");
//System.in.read();
long startTime = System.nanoTime();
MinHashSearch hashSearch = new MinHashSearch(data, kmerSize, numWords, numMinMatches, subSequenceSize, numThreads, storeInMemory, false);
System.err.println("Processed "+data.getNumberProcessed()+" sequences.");
System.err.println("Time (s) to read and hash from file: " + (System.nanoTime() - startTime)*1.0e-9);
long startTotalScoringTime = System.nanoTime();
// now that we have the hash constructed, go through all sequences to recompute their min and score their matches
if (toFile==null)
{
startTime = System.nanoTime();
hashSearch.findMatches(threshold);
System.err.println("Time (s) to score and output to self: " + (System.nanoTime() - startTime)*1.0e-9);
}
else
{
File file = new File(toFile);
if (!file.exists())
throw new FastAlignRuntimeException("To-file does not exist.");
ArrayList<File> toFiles = new ArrayList<>();
//if not dictory just add the file
if (!file.isDirectory())
{
toFiles.add(file);
}
else
{
//read the directory content
File[] fileList = file.listFiles(new FilenameFilter()
{
@Override
public boolean accept(File dir, String name)
{
if (!name.startsWith("."))
return true;
return false;
}
});
for (File cf : fileList)
toFiles.add(cf);
}
//sort the files in alphabetical order
Collections.sort(toFiles);
//first perform to self
startTime = System.nanoTime();
if (!noSelf)
{
hashSearch.findMatches(threshold);
System.out.flush();
System.err.println("Time (s) to score and output to self: " + (System.nanoTime() - startTime)*1.0e-9);
}
//no do to all files
for (File cf : toFiles)
{
// read and index the kmers
data = new FastaData(cf.getCanonicalPath());
System.err.println("Opened fasta file "+cf.getCanonicalPath()+".");
//match the file
startTime = System.nanoTime();
hashSearch.findMatches(data, threshold);
System.out.flush();
System.err.println("Processed "+data.getNumberProcessed()+" to sequences.");
System.err.println("Time (s) to score, hash to-file, and output: " + (System.nanoTime() - startTime)*1.0e-9);
}
}
System.err.println("Total scoring time (s): " + (System.nanoTime() - startTotalScoringTime)*1.0e-9);
System.err.println("Total time (s): " + (System.nanoTime() - startTotalTime)*1.0e-9);
System.err.println("Total matches found: "+hashSearch.getMatchesProcessed());
System.err.println("Average number of matches per lookup: " + (double)hashSearch.getMatchesProcessed()/(double)hashSearch.getNumberSequencesSearched()*100.0);
System.err.println("Average % of hashed sequences hit per lookup: " + (double)hashSearch.getNumberSequencesHit()/(double)(hashSearch.size()*hashSearch.getNumberSequencesSearched())*100.0);
System.err.println("Average % of hashed sequences hit that are matches: " + (double)hashSearch.getMatchesProcessed()/(double)hashSearch.getNumberSequencesHit()*100.0);
System.err.println("Average % of hashed sequences fully compared that are matches: " + (double)hashSearch.getMatchesProcessed()/(double)hashSearch.getNumberSequencesFullyCompared()*100.0);
System.err.flush();
}
public static void printUsage(String error) {
if (error != null) {
System.err.println(error);
}
System.err.println("Usage FastAlignMain -s<fasta from/self file> [-q<fasta to file]");
System.err.println("Options: ");
System.err.println("\t -k [int merSize], default: " + DEFAULT_KMER_SIZE);
System.err.println("\t --memory [do not store kmers in memory]");
System.err.println("\t --num-hashes [int # hashes], default: " + DEFAULT_NUM_WORDS);
System.err.println("\t --threshold [int threshold for % matching minimums], default: " + DEFAULT_THRESHOLD);
System.err.println("\t --num-min-matches [int # hashes that maches before performing local alignment], default: " + DEFAULT_NUM_MIN_MATCHES);
System.err.println("\t --num-threads [int # threads to use for computation], default (2 x #cores): " + DEFAULT_NUM_THREADS);
System.err.println("\t --subsequence-size [int size of maximum minhashed sequence], default: " + DEFAULT_SUB_SEQUENCE_SIZE);
System.err.println("\t --no-self [do not compute results to self], default: "+DEFAULT_NO_SELF);
System.exit(1);
}
}
|
package me.danieleangelucci.main;
import java.util.List;
import me.danieleangelucci.commons.AppConfig;
import me.danieleangelucci.shopping.controller.ShoppingBasketHandler;
import me.danieleangelucci.shopping.controller.StoreHandler;
import me.danieleangelucci.shopping.model.EmptyShoppingBasketException;
import me.danieleangelucci.shopping.model.Item;
import me.danieleangelucci.shopping.model.ShoppingBasket;
import me.danieleangelucci.shopping.model.UnloadableStoreException;
import me.danieleangelucci.shopping.model.UnreadableInputFileException;
import me.danieleangelucci.shopping.view.ShoppingBasketViewer;
public class Main
{
public static void main(String[] args)
{
System.out.println("\nWelcome to \"Sales taxes problem\"!\n");
readConfiguration(args);
//Load the store (i.e. categories and products in the store)
StoreHandler storeHandler = new StoreHandler();
try
{
storeHandler.initializeStore();
} catch (UnloadableStoreException e)
{
e.printStackTrace();
System.exit(1);
}
//Create a controller and parse the shopping basket from the input file.
ShoppingBasketHandler sbHandler = new ShoppingBasketHandler(new ShoppingBasket(), new ShoppingBasketViewer());
try
{
sbHandler.parseShoppingBasketItemsFromInputFile();
} catch (UnreadableInputFileException e)
{
e.printStackTrace();
System.exit(1);
}
//Apply sales taxes to the shopping basket items.
sbHandler.computeFinalPriceOnShoppingBasketItems();
//Retrieve the shopping basket items and produce the receipt.
List<? extends Item> itemList = null;
try
{
itemList = sbHandler.getShoppingBasketItems();
} catch (EmptyShoppingBasketException e)
{
e.printStackTrace();
System.exit(1);
}
System.out.println("\nRECEIPT:\n");
sbHandler.showReceipt(itemList);
}
private static void readConfiguration(String[] args) {
checkCommandlineArgs(args);
AppConfig.categoriesFilePath = args[0];
AppConfig.inputFilePath = args[1];
System.out.println("Loading categories file: " + AppConfig.categoriesFilePath);
System.out.println("Using input file: " + AppConfig.inputFilePath + "\n");
}
private static void checkCommandlineArgs(String[] args) {
if(args.length == 2)
return;
if(args.length < 2)
System.err.println("Too few arguments.");
if(args.length > 2)
System.err.println("Too many arguments.");
System.err.println("Commandline should be:\njava -jar \"path_to_jar\" \"categories_filepath\" \"input_filepath\"");
System.exit(1);
}
}
|
package org.squiddev.cctweaks.integration.multipart.network;
import codechicken.lib.data.MCDataInput;
import codechicken.lib.data.MCDataOutput;
import codechicken.lib.raytracer.IndexedCuboid6;
import codechicken.lib.render.TextureUtils;
import codechicken.lib.vec.Cuboid6;
import codechicken.lib.vec.Vector3;
import codechicken.microblock.ISidedHollowConnect;
import codechicken.multipart.TMultiPart;
import codechicken.multipart.TSlottedPart;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import dan200.computercraft.ComputerCraft;
import dan200.computercraft.client.render.FixedRenderBlocks;
import dan200.computercraft.shared.peripheral.PeripheralType;
import dan200.computercraft.shared.peripheral.common.PeripheralItemFactory;
import dan200.computercraft.shared.peripheral.modem.TileCable;
import net.minecraft.block.Block;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.util.Facing;
import net.minecraft.util.IIcon;
import net.minecraft.util.MovingObjectPosition;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.common.util.ForgeDirection;
import org.squiddev.cctweaks.CCTweaks;
import org.squiddev.cctweaks.api.IWorldPosition;
import org.squiddev.cctweaks.api.network.INetworkNode;
import org.squiddev.cctweaks.api.network.IWorldNetworkNode;
import org.squiddev.cctweaks.api.network.IWorldNetworkNodeHost;
import org.squiddev.cctweaks.core.network.NetworkHelpers;
import org.squiddev.cctweaks.core.network.cable.CableWithInternalSidedParts;
import org.squiddev.cctweaks.core.utils.DebugLogger;
import org.squiddev.cctweaks.integration.multipart.PartBase;
import java.lang.reflect.Field;
import java.util.*;
public class PartCable extends PartBase implements IWorldNetworkNodeHost, TSlottedPart, ISidedHollowConnect {
public static final String NAME = CCTweaks.NAME + ":networkCable";
private static IIcon[] icons;
public static final double MIN = 0.375;
public static final double MAX = 1 - MIN;
/**
* Side we are testing the connection on
*
* @see #canCableExtendInDirection(ForgeDirection)
*/
private ForgeDirection connectionTestSide = ForgeDirection.UNKNOWN;
private CableWithInternalSidedParts cable = new CableImpl();
@SideOnly(Side.CLIENT)
private CableRenderer render;
@SideOnly(Side.CLIENT)
public CableRenderer getRender() {
if (render == null) {
render = new CableRenderer();
}
return render;
}
@Override
public String getType() {
return NAME;
}
@Override
public int getSlotMask() {
return 1 << 6;
}
@Override
public Iterable<Cuboid6> getOcclusionBoxes() {
if (connectionTestSide == ForgeDirection.UNKNOWN) {
return Collections.singletonList(new Cuboid6(MIN, MIN, MIN, MAX, MAX, MAX));
}
// In order to determine if this cable can pass in a certain direction,
// through things like covers and hollow covers,
// add an occlusion box in that direction, test if occlusion collisions occur,
// and only make the connection if no collision occurs.
// Then remove the added occlusion box.
List<Cuboid6> parts = new ArrayList<Cuboid6>();
if (tile() != null) {
if (connectionTestSide == ForgeDirection.WEST) {
parts.add(new Cuboid6(0, MIN, MIN, MIN, MAX, MAX));
}
if (connectionTestSide == ForgeDirection.EAST) {
parts.add(new Cuboid6(MAX, MIN, MIN, 1, MAX, MAX));
}
if (connectionTestSide == ForgeDirection.DOWN) {
parts.add(new Cuboid6(MIN, 0, MIN, MAX, MIN, MAX));
}
if (connectionTestSide == ForgeDirection.UP) {
parts.add(new Cuboid6(MIN, MAX, MIN, MAX, 1, MAX));
}
if (connectionTestSide == ForgeDirection.NORTH) {
parts.add(new Cuboid6(MIN, MIN, 0, MAX, MAX, MIN));
}
if (connectionTestSide == ForgeDirection.SOUTH) {
parts.add(new Cuboid6(MIN, MIN, MAX, MAX, MAX, 1));
}
}
return parts;
}
@Override
public Cuboid6 getBounds() {
double xMin = MIN;
double yMin = MIN;
double zMin = MIN;
double xMax = MAX;
double yMax = MAX;
double zMax = MAX;
if (tile() != null) {
if (cable.doesConnect(ForgeDirection.WEST)) xMin = 0.0D;
if (cable.doesConnect(ForgeDirection.EAST)) xMax = 1.0D;
if (cable.doesConnect(ForgeDirection.DOWN)) yMin = 0.0D;
if (cable.doesConnect(ForgeDirection.UP)) yMax = 1.0D;
if (cable.doesConnect(ForgeDirection.NORTH)) zMin = 0.0D;
if (cable.doesConnect(ForgeDirection.SOUTH)) zMax = 1.0D;
}
return new Cuboid6(xMin, yMin, zMin, xMax, yMax, zMax);
}
@Override
public Iterable<IndexedCuboid6> getSubParts() {
List<IndexedCuboid6> parts = new ArrayList<IndexedCuboid6>();
parts.add(new IndexedCuboid6(ForgeDirection.UNKNOWN, new Cuboid6(MIN, MIN, MIN, MAX, MAX, MAX)));
if (tile() != null) {
if (cable.doesConnect(ForgeDirection.WEST)) {
parts.add(new IndexedCuboid6(ForgeDirection.WEST, new Cuboid6(0, MIN, MIN, MIN, MAX, MAX)));
}
if (cable.doesConnect(ForgeDirection.EAST)) {
parts.add(new IndexedCuboid6(ForgeDirection.EAST, new Cuboid6(MAX, MIN, MIN, 1, MAX, MAX)));
}
if (cable.doesConnect(ForgeDirection.DOWN)) {
parts.add(new IndexedCuboid6(ForgeDirection.DOWN, new Cuboid6(MIN, 0, MIN, MAX, MIN, MAX)));
}
if (cable.doesConnect(ForgeDirection.UP)) {
parts.add(new IndexedCuboid6(ForgeDirection.UP, new Cuboid6(MIN, MAX, MIN, MAX, 1, MAX)));
}
if (cable.doesConnect(ForgeDirection.NORTH)) {
parts.add(new IndexedCuboid6(ForgeDirection.NORTH, new Cuboid6(MIN, MIN, 0, MAX, MAX, MIN)));
}
if (cable.doesConnect(ForgeDirection.SOUTH)) {
parts.add(new IndexedCuboid6(ForgeDirection.SOUTH, new Cuboid6(MIN, MIN, MAX, MAX, MAX, 1)));
}
}
return parts;
}
@Override
public void harvest(MovingObjectPosition hit, EntityPlayer player) {
World world = world();
int x = x(), y = y(), z = z();
super.harvest(hit, player);
if (!world.isRemote) {
cable.removeFromWorld();
}
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getBrokenIcon(int side) {
return ComputerCraft.Blocks.cable.getIcon(0, 0);
}
@Override
@SideOnly(Side.CLIENT)
public boolean renderStatic(Vector3 pos, int pass) {
if (pass == 0) {
TextureUtils.bindAtlas(0);
getRender().drawTile(world(), x(), y(), z());
return true;
}
return false;
}
@Override
public ItemStack getItem() {
return PeripheralItemFactory.create(PeripheralType.Cable, null, 1);
}
@Override
public void onPartChanged(TMultiPart part) {
// Fire a network changed event when the entire part is modified.
// This is because it may block a connection or release a new one
if (tile() != null) {
if (!world().isRemote && cable.updateConnections()) {
sendDescUpdate();
}
}
}
@Override
public void onWorldJoin() {
cable.updateConnections();
}
@Override
public int getHollowSize(int i) {
return 4;
}
@Override
public boolean doesTick() {
return false;
}
@Override
public void writeDesc(MCDataOutput data){
// TODO: Implement
}
@Override
public void readDesc(MCDataInput data){
// TODO: Implement
}
/**
* Tests if the cable can pass through a side.
* Uses TileMultipart.canReplacePart to see if a version of this cable with
* with a certain side's occlusion extended to the full length
* can be placed in the multipart.
* If not, there's a cover or something in the way.
* Else, there's no cover, or something like a hollow cover.
*
* @param dir The direction to test in
* @return whether the cable can extend in that direction.
*/
protected boolean canCableExtendInDirection(ForgeDirection dir) {
connectionTestSide = dir;
boolean occludes = tile().canReplacePart(this, this);
connectionTestSide = ForgeDirection.UNKNOWN;
return occludes;
}
@Override
public IWorldNetworkNode getNode() {
return cable;
}
private class CableImpl extends CableWithInternalSidedParts {
@Override
public Set<INetworkNode> getConnectedNodes() {
Set<INetworkNode> nodes = new HashSet<INetworkNode>();
nodes.addAll(super.getConnectedNodes());
for (TMultiPart part : tile().jPartList()) {
if (part != PartCable.this) {
if (part instanceof INetworkNode) {
nodes.add((INetworkNode) part);
} else if (part instanceof IWorldNetworkNodeHost) {
nodes.add(((IWorldNetworkNodeHost) part).getNode());
}
}
}
return nodes;
}
@Override
public boolean canConnectInternally(ForgeDirection direction) {
TMultiPart part = tile().partMap(direction.ordinal());
INetworkNode node = part instanceof INetworkNode ? (INetworkNode) part
: part instanceof IWorldNetworkNodeHost ? ((IWorldNetworkNodeHost) part).getNode()
: null;
return node != null;
}
@Override
public IWorldPosition getPosition() {
return PartCable.this;
}
@Override
public boolean canConnect(ForgeDirection direction) {
return canCableExtendInDirection(direction) && NetworkHelpers.canConnect(PartCable.this, direction);
}
}
public class CableRenderer extends FixedRenderBlocks {
/**
* When rendering with other nodes on the multipart, rendering overlaps,
* resulting in flickering between the two nodes.
*
* If we detect a node on one side, we add some padding so they don't overlap
* as much.
*
* There are probably better ways of doing this using {@link TMultiPart#getRenderBounds()}
*/
public static final double RENDER_PADDING = 0.1;
public IIcon[] getIcons() {
IIcon[] icons;
if ((icons = PartCable.icons) == null) {
try {
Field field = TileCable.class.getDeclaredField("s_cableIcons");
field.setAccessible(true);
icons = (IIcon[]) field.get(null);
} catch (ReflectiveOperationException e) {
DebugLogger.error("Cannot find TileCable texture", e);
icons = new IIcon[2];
}
PartCable.icons = icons;
}
return icons;
}
@Override
public IIcon getBlockIcon(Block block, IBlockAccess world, int x, int y, int z, int side) {
int dir = -1;
if (canVisuallyConnect(ForgeDirection.WEST) || canVisuallyConnect(ForgeDirection.EAST)) {
dir = dir == -1 ? 4 : -2;
}
if (canVisuallyConnect(ForgeDirection.UP) || canVisuallyConnect(ForgeDirection.DOWN)) {
dir = dir == -1 ? 0 : -2;
}
if (canVisuallyConnect(ForgeDirection.NORTH) || canVisuallyConnect(ForgeDirection.SOUTH)) {
dir = dir == -1 ? 2 : -2;
}
if (dir == -1) dir = 2;
if ((dir >= 0) && ((side == dir) || (side == Facing.oppositeSide[dir]))) {
return getIcons()[1];
}
return getIcons()[0];
}
public void drawTile(IBlockAccess world, int x, int y, int z) {
setWorld(world);
Block block = ComputerCraft.Blocks.cable;
setRenderBounds(MIN, MIN, MIN, MAX, MAX, MAX);
renderStandardBlock(block, x, y, z);
if (cable.doesConnect(ForgeDirection.DOWN)) {
setRenderBounds(MIN, 0, MIN, MAX, MIN, MAX);
renderStandardBlock(block, x, y, z);
} else if (cable.doesConnectInternally(ForgeDirection.DOWN)) {
setRenderBounds(MIN, 0 + RENDER_PADDING, MIN, MAX, MIN, MAX);
renderStandardBlock(block, x, y, z);
}
if (cable.doesConnect(ForgeDirection.UP)) {
setRenderBounds(MIN, MAX, MIN, MAX, 1, MAX);
renderStandardBlock(block, x, y, z);
} else if (cable.doesConnectInternally(ForgeDirection.UP)) {
setRenderBounds(MIN, MAX, MIN, MAX, 1 - RENDER_PADDING, MAX);
renderStandardBlock(block, x, y, z);
}
if (cable.doesConnect(ForgeDirection.NORTH)) {
setRenderBounds(MIN, MIN, 0, MAX, MAX, MIN);
renderStandardBlock(block, x, y, z);
} else if (cable.doesConnectInternally(ForgeDirection.NORTH)) {
setRenderBounds(MIN, MIN, 0 + RENDER_PADDING, MAX, MAX, MIN);
renderStandardBlock(block, x, y, z);
}
if (cable.doesConnect(ForgeDirection.SOUTH)) {
setRenderBounds(MIN, MIN, MAX, MAX, MAX, 1);
renderStandardBlock(block, x, y, z);
} else if (cable.doesConnectInternally(ForgeDirection.SOUTH)) {
setRenderBounds(MIN, MIN, MAX, MAX, MAX, 1 - RENDER_PADDING);
renderStandardBlock(block, x, y, z);
}
if (cable.doesConnect(ForgeDirection.WEST)) {
setRenderBounds(0, MIN, MIN, MIN, MAX, MAX);
renderStandardBlock(block, x, y, z);
} else if (cable.doesConnectInternally(ForgeDirection.WEST)) {
setRenderBounds(0 + RENDER_PADDING, MIN, MIN, MIN, MAX, MAX);
renderStandardBlock(block, x, y, z);
}
if (cable.doesConnect(ForgeDirection.EAST)) {
setRenderBounds(MAX, MIN, MIN, 1, MAX, MAX);
renderStandardBlock(block, x, y, z);
} else if (cable.doesConnectInternally(ForgeDirection.EAST)) {
setRenderBounds(MAX, MIN, MIN, 1 - RENDER_PADDING, MAX, MAX);
renderStandardBlock(block, x, y, z);
}
}
/**
* Tests to see if there is something to connect to, either in the
* same block space or out
*
* @param side The side to check
* @return If we should appear to connect on that side.
*/
public boolean canVisuallyConnect(ForgeDirection side) {
return cable.doesConnect(side) || cable.doesConnectInternally(side);
}
}
}
|
package com.superyass.superUrlTextSearcher;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*
* @author SuperYass
*/
public class Main {
@Parameter(names = {"-input", "-in"}, description = "Input folder :")
private static String inputFolder = "C:\\";
@Parameter(names = {"-search", "-s"}, description = "comma separated search keywords")
private static List<String> search;
@Parameter(names = {"-ExcludedSearch", "-xs"}, description = "comma separated keywords to be excluded")
private static List<String> ExcludedSearch;
@Parameter(names = {"-addExtensions", "-addExt", "-ae"}, description = "comma separated extensions to be added")
private static List<String> addedExtensions;
@Parameter(names = {"-excludExtensions", "-execExt", "-exce"}, description = "comma separated extensions to be excluded")
private static List<String> excludedExtensions;
@Parameter(names = {"-exclExtensions", "-exclExt", "-exle"}, description = "comma separated extensions to be exclusively looked for")
private static List<String> exclusifExtensions;
@Parameter(names = "-debug", description = "Debug mode")
private static boolean debug = false;
@Parameter(names = {"-h", "-help", "--help"}, description = "help mode")
private static boolean help = false;
public static final String[] defaultExtensions = {"txt", "md", "java", "php", "py", "c", "pl", "rb", "cs","json","smali"};
public static String urlRegex = "((https?|ftp|gopher|telnet|file):((//)|(\\\\))+[\\w\\d:
public static void main(String[] args) {
final String[] argv = {"-s", "chil3ba,chil3ba2"};
final Main options = new Main();
final JCommander jcmdr = new JCommander(options, args);
//args management
jcmdr.setProgramName("SuperURLTextSearcher");
jcmdr.setAllowAbbreviatedOptions(true);
if (help) {
jcmdr.usage();
}
if (debug) {
Logger.getGlobal().setLevel(Level.INFO);
} else {
Logger.getGlobal().setLevel(Level.SEVERE);
}
File dir = new File(inputFolder);
if (dir.isDirectory()) {
getFiles(dir);
} else {
Logger.getLogger(Main.class.getName()).log(Level.SEVERE, "Input path not a valid folder");
}
}
public static List<File> getFiles(File f) {
List<File> out = new ArrayList<>();
List<File> files = Arrays.asList(f.listFiles());
files.stream().forEach((aFile) -> {
if (aFile.isDirectory()) {
getFiles(aFile);
} else {
String extension = getFileExtension(aFile);
if (isExtensionSupported(extension)) {
try (Scanner scanner = new Scanner(aFile)) {
int lineNumber = 0;
while (scanner.hasNext()) {
lineNumber++;
List<String> urls = extractUrls(scanner.nextLine());
if (urls != null && !urls.isEmpty()) {
System.out.print("[" + aFile.getCanonicalPath().replace(inputFolder, "") + "](line:" + lineNumber + "): ");
for (String url : urls) {
System.out.print(url + " ");
}
System.out.println("");
}
}
} catch (IOException e) {
Logger.getLogger(Main.class.getName()).log(Level.SEVERE, e.getMessage());
}
}
}
});
return out;
}
public static boolean isExtensionSupported(String ext) {
//if exclusif search is empty test for the default extensions
if (exclusifExtensions==null || exclusifExtensions.isEmpty()) {
//Logger.getLogger(Main.class.getName()).log(Level.INFO, "Extensions looked for: " + defaultExtensions);
for (String extension : defaultExtensions) {
if (extension.equalsIgnoreCase(ext)) {
return true;
}
}
} else {
Logger.getLogger(Main.class.getName()).log(Level.INFO, "Extensions looked for: " + exclusifExtensions);
for (String extension : exclusifExtensions) {
if (extension.equalsIgnoreCase(ext)) {
return true;
}
}
}
return false;
}
public static List<String> extractUrls(String text) {
List<String> containedUrls = new ArrayList<>();
Pattern pattern = Pattern.compile(urlRegex, Pattern.CASE_INSENSITIVE);
Matcher urlMatcher = pattern.matcher(text);
while (urlMatcher.find()) {
containedUrls.add(text.substring(urlMatcher.start(0),
urlMatcher.end(0)));
}
return containedUrls;
}
public static String getFileExtension(File file) {
String name = file.getName();
try {
return name.substring(name.lastIndexOf(".") + 1);
} catch (Exception e) {
return "";
}
}
}
|
package net.etalia.client.utils;
import java.util.Collection;
public class Utils {
public static void assertFalse(String string, boolean b) {
if (b) throw new IllegalStateException(string);
}
public static void assertTrue(String string, boolean b) {
if (!b) throw new IllegalStateException(string);
}
/**
* Joins the given path segments together, adding slashes where appropriate.
* @param segments A list of strings representing path segments.
* @return the segments joined with forward slash properly to form a valid path.
*/
public static String pathConcat(String... segments) {
StringBuilder sb = new StringBuilder();
boolean lastEndsWithSlash = false;
boolean first = true;
for(String s : segments) {
if(first) {
sb.append(s);
first = false;
} else {
if((! lastEndsWithSlash) && (! s.startsWith("/"))) {
sb.append("/");
sb.append(s);
} else if(lastEndsWithSlash && s.startsWith("/")) {
sb.append(s.substring(1));
} else {
sb.append(s);
}
}
if(s.endsWith("/")) {
lastEndsWithSlash = true;
}
}
return sb.toString();
}
/**
* @see #pathConcat(String...)
*/
public static String pathConcat(Collection<String> segments) {
return pathConcat(segments.toArray(new String[segments.size()]));
}
public static void assertNull(String string, Object obj) {
assertTrue(string, obj == null);
}
}
|
package py.com.sodep.notificationserver.business;
import org.apache.log4j.Logger;
import com.fasterxml.jackson.databind.node.ObjectNode;
import java.io.File;
import java.sql.SQLException;
import java.util.Iterator;
import javapns.json.JSONException;
import javapns.notification.Payload;
import javapns.notification.PushNotificationPayload;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import org.hibernate.HibernateException;
import py.com.sodep.notificationserver.db.dao.AplicacionDao;
import py.com.sodep.notificationserver.db.dao.DeviceRegistrationDao;
import py.com.sodep.notificationserver.db.dao.EventoDao;
import py.com.sodep.notificationserver.db.entities.Aplicacion;
import py.com.sodep.notificationserver.db.entities.Evento;
import py.com.sodep.notificationserver.db.entities.AndroidNotification;
import py.com.sodep.notificationserver.db.entities.AndroidResponse;
import py.com.sodep.notificationserver.db.entities.DeviceRegistration;
import py.com.sodep.notificationserver.db.entities.IosResponse;
import py.com.sodep.notificationserver.db.entities.Result;
import py.com.sodep.notificationserver.exceptions.handlers.BusinessException;
import py.com.sodep.notificationserver.exceptions.handlers.ExceptionMapperHelper;
import py.com.sodep.notificationserver.facade.ApnsFacade;
import py.com.sodep.notificationserver.facade.GcmFacade;
import py.com.sodep.notificationserver.rest.entities.EventoResponse;
@ApplicationScoped
public class NotificationBusiness {
@Inject
AplicacionDao appDao;
@Inject
ApnsFacade facade;
@Inject
GcmFacade service;
@Inject
AndroidNotification notification;
@Inject
EventoDao eventoDao;
@Inject
DeviceRegistrationDao deviceDao;
@Inject
Logger logger;
public EventoResponse crearEvento(Evento e, String appName) throws BusinessException, HibernateException, SQLException {
Aplicacion a = appDao.getByName(appName);
if (a != null) {
e.setAplicacion(a);
validate(e);
eventoDao.create(e);
EventoResponse er = new EventoResponse(e);
//verificarNotificacionBloqueada(e);
return er;
} else {
throw new BusinessException(ExceptionMapperHelper.appError.APLICACION_NOT_FOUND.ordinal(), "La aplicacion " + appName + " no existe.");
}
}
public Evento actualizarEvento(Evento e) throws HibernateException, SQLException {
eventoDao.update(e);
return e;
}
public Evento notificar(Evento e) throws BusinessException, HibernateException, SQLException, Exception {
Aplicacion app = appDao.getByName(e.getAplicacion().getNombre());
if (app != null) {
if (e.isProductionMode()) {
if (app.getApiKeyProd() != null) {
e.setAndroidResponse(notificarAndroid(app.getApiKeyProd(), e));
}
if (app.getCertificadoProd() != null && app.getKeyFileProd() != null) {
e.setIosResponse(notificarIos(app.getCertificadoProd(), app.getKeyFileProd(), e, true));
}
} else {
if (app.getApiKeyDev() != null) {
e.setAndroidResponse(notificarAndroid(app.getApiKeyDev(), e));
}
if (app.getCertificadoDev() != null && app.getKeyFileDev() != null) {
e.setIosResponse(notificarIos(app.getCertificadoDev(), app.getKeyFileDev(), e, false));
}
}
} else {
throw new BusinessException(ExceptionMapperHelper.appError.APLICACION_NOT_FOUND.ordinal(), "La aplicacion " + e.getAplicacion().getNombre() + " no existe.");
}
e.setEstadoAndroid("ENVIADO");
e.setEstadoIos("ENVIADO");
return e;
}
@SuppressWarnings("rawtypes")
public IosResponse notificarIos(String certifadoPath, String keyFile,
Evento evento, Boolean productionMode) throws BusinessException, HibernateException, SQLException {
logger.info("[Evento: " + evento.getId() + "]: Notificando iOs");
File certificado = new File(certifadoPath);
Payload payload = PushNotificationPayload.complex();
ObjectNode pay = evento.getObjectNodePayLoad();
try {
if (evento.isSendToSync()) {
((PushNotificationPayload) payload).addCustomDictionary("content-available", "1");
} else {
((PushNotificationPayload) payload).addAlert(evento
.getAlert());
((PushNotificationPayload) payload).addSound("default");
if (evento.isSendToSync()) {
((PushNotificationPayload) payload).addSound("default");
}
Iterator it = pay.fieldNames();
while (it.hasNext()) {
String pair = (String) it.next();
logger.info(pair + " = " + pay.get(pair));
payload.addCustomDictionary((String) pair,
pay.get(pair).asText());
}
}
} catch (JSONException e) {
throw new BusinessException(ExceptionMapperHelper.appError.BAD_REQUEST.ordinal(), "Error al parsear payload en notificacion iOs.");
}
return facade.send(payload, certificado, keyFile, productionMode, evento.getIosDevicesList());
}
public AndroidResponse notificarAndroid(String apiKey, Evento evento) throws BusinessException, HibernateException, SQLException, Exception {
logger.info("[Evento: " + evento.getId() + "]: notificando android");
if (evento.getAndroidDevicesList().size() == 1) {
logger.info("[Evento: " + evento.getId() + "]: Un solo device. Notificando android");
notification.setTo(evento.getAndroidDevicesList().get(0));
} else {
logger.info("[Evento: " + evento.getId() + "]: Lista. Notificando android");
notification.setRegistration_ids(evento.getAndroidDevicesList());
}
notification.setData(evento.getObjectNodePayLoad().put("alert", evento.getAlert()));
AndroidResponse ar = service.send(apiKey, notification);
if (ar.getFailure() > 0) {
for (int i = 0; i < ar.getResults().size(); i++) {
Result r = ar.getResults().get(i);
logger.info("Analizando resultado: " + r);
if (r.getError() != null && (r.getError().equals("NotRegistered")
|| r.getError().equals("DeviceMessageRate")
|| r.getError().equals("InvalidRegistration")
|| r.getError().equals("MissingRegistration"))) {
DeviceRegistration d = new DeviceRegistration(
evento.getAndroidDevicesList().get(i), r.getRegistration_id(),
"NUEVO", r.getError(), evento.getAplicacion());
deviceDao.create(d);
}
if (r.getError() != null && (r.getError().equals("InvalidPackageName")
|| r.getError().equals("MismatchSenderId"))) {
logger.info("Se bloquea la aplicación: " + r.getError());
Aplicacion a = evento.getAplicacion();
a.setError(r.getError());
a.setEstadoAndroid("BLOQUEADA");
appDao.create(a);
}
}
}
return ar;
}
public void validate(Evento e) throws BusinessException {
String s = e.getAlert() + e.getPayload().asText();
if (s.getBytes().length > e.getAplicacion().getPayloadSize()) {
throw new BusinessException(500, "El tamaño del payload supera el "
+ "configurado para la aplicación: "
+ e.getAplicacion().getPayloadSize());
}
}
public void verificarNotificacionBloqueada(Evento e) throws BusinessException {
if (e.getAplicacion().getEstadoAndroid() != null
&& e.getAplicacion().getEstadoAndroid().equals("BLOQUEADA")
&& (e.getAndroidDevicesList() != null
&& e.getAndroidDevicesList().size() > 0)) {
throw new BusinessException(
ExceptionMapperHelper.appError.APLICACION_BLOCKED.ordinal(),
"La aplicacion " + e.getAplicacion().getNombre()
+ " esta bloqueada para notificaciones Android. Error: " + e.getAplicacion().getError());
}
if (e.getAplicacion().getEstadoIos() != null
&& e.getAplicacion().getEstadoIos().equals("BLOQUEADA")
&& (e.getIosDevicesList() != null
&& e.getIosDevicesList().size() > 0)) {
throw new BusinessException(
ExceptionMapperHelper.appError.APLICACION_BLOCKED.ordinal(),
"La aplicacion " + e.getAplicacion().getNombre()
+ " esta bloqueada para notificaciones iOs. Error: " + e.getAplicacion().getError());
}
}
}
|
package com.techcavern.wavetact.utils;
import org.apache.commons.lang3.StringUtils;
import org.pircbotx.Channel;
import org.pircbotx.PircBotX;
import org.pircbotx.User;
import org.pircbotx.hooks.events.WhoisEvent;
import static com.techcavern.wavetactdb.Tables.*;
public class PermUtils {
public static String getAccount(PircBotX network, String userObject, String hostmask) { //gets account of user using hostmask
String authtype = DatabaseUtils.getNetwork(IRCUtils.getNetworkNameByNetwork(network)).getValue(NETWORKS.AUTHTYPE);
switch (authtype) {
case "nickserv":
return getNickServAccountName(network, userObject, hostmask);
case "account":
return getAuthedUser(network, hostmask);
default:
return hostmask;
}
}
public static String authUser(PircBotX network, String userObject) { //gets hostmask of userObject and calls getAccount using it
String hostmask = IRCUtils.getHostmask(network, userObject, false);
if (hostmask != null) {
return getAccount(network, userObject, hostmask);
} else {
return null;
}
}
public static String getNickServAccountName(PircBotX network, String userObject, String hostmask) { //calls getAccoutName() IF its not already found in get AuthedUser
String userString = getAuthedUser(network, hostmask);
if (userString == null) {
userString = getAccountName(network, hostmask,userObject);
if (userString != null)
Registry.authedUsers.get(network).put(hostmask, userString);
}
return userString;
}
public static String getAuthedUser(PircBotX network, String hostmask) { //gets Authenticated Account Name found in the Authed User db.
String userString = Registry.authedUsers.get(network).get(hostmask);
if (hostmask == null) {
return null;
} else {
return userString;
}
}
@SuppressWarnings("unchecked")
public static String getAccountName(PircBotX network, String hostmask, String userObject) { //gets the actual NickServ ACcount Name
WhoisEvent whois = IRCUtils.WhoisEvent(network, userObject, true);
String userString;
if (whois != null) {
String hostmask2 = IRCUtils.getLoginmask(whois.getHostname(), whois.getLogin());
boolean HostmaskMatch = hostmask2.equals(hostmask);
if (!HostmaskMatch) {
whois = IRCUtils.WhoisEvent(network, userObject, false);
}
if (whois != null) {
userString = whois.getRegisteredAs();
if (userString != null) {
userString = userString.toLowerCase();
if (userString.isEmpty()) {
userString = userObject.toLowerCase();
}
} else {
userString = null;
}
} else {
userString = null;
}
}else{
userString = null;
}
return userString;
}
private static int getAutomaticPermLevel(User userObject, Channel channelObject) { //gets the Auto Detected Perm Level
if (userObject.isIrcop() && DatabaseUtils.getNetwork(IRCUtils.getNetworkNameByNetwork(userObject.getBot())).getValue(NETWORKS.NETWORKADMINACCESS)) {
return 20;
} else if (channelObject.isOwner(userObject)) {
return 15;
} else if (channelObject.isSuperOp(userObject)) {
return 13;
} else if (channelObject.isOp(userObject)) {
return 10;
} else if (channelObject.isHalfOp(userObject)) {
return 7;
} else if (channelObject.hasVoice(userObject)) {
return 5;
} else {
return 0;
}
}
private static int getManualPermLevel(String userObject, PircBotX network, Channel channelObject, String account) { //gets Manual Perm Level using the account name
if (isIgnored(IRCUtils.getHostmask(network, userObject, false), IRCUtils.getNetworkNameByNetwork(network))) {
return -2;
} else if (account != null && (channelObject == null || (channelObject != null && IRCUtils.getUserByNick(network,userObject).getChannels().contains(channelObject)))) {
String channelName = null;
if (channelObject != null) {
channelName = channelObject.getName();
}
if (isNetworkAdmin(account, IRCUtils.getNetworkNameByNetwork(network))) {
return 20;
} else if (DatabaseUtils.getChannelUserProperty(IRCUtils.getNetworkNameByNetwork(network), channelName, account, "permlevel") != null) {
int permlevel = 0;
try {
permlevel = Integer.parseInt(DatabaseUtils.getChannelUserProperty(IRCUtils.getNetworkNameByNetwork(network), channelName, account, "permlevel").getValue(CHANNELUSERPROPERTY.VALUE));
} catch (Exception e) {
}
if (permlevel > 18) {
permlevel = 18;
}
return permlevel;
} else if (DatabaseUtils.getNetworkUserProperty(IRCUtils.getNetworkNameByNetwork(network), account, "permlevel") != null) {
int permlevel = 0;
try {
permlevel = Integer.parseInt(DatabaseUtils.getNetworkUserProperty(IRCUtils.getNetworkNameByNetwork(network), account, "permlevel").getValue(NETWORKUSERPROPERTY.VALUE));
} catch (Exception e) {
}
if (permlevel > 18) {
permlevel = 18;
}
return permlevel;
} else {
return 1;
}
} else {
return 0;
}
}
public static int getPermLevel(PircBotX network, String userObject, Channel channelObject) { //gets the permlevel of the user in question
String auth = PermUtils.authUser(network, userObject);
return getLevel(network, userObject, channelObject, auth);
}
public static int getLevel(PircBotX network, String userObject, Channel channelObject, String account) { //gets the actual Perm Level
if (channelObject != null) {
int mpermlevel = getManualPermLevel(userObject, network, channelObject, account);
User user = IRCUtils.getUserByNick(network, userObject);
int apermlevel = 0;
if (user != null) {
apermlevel = getAutomaticPermLevel(user, channelObject);
}
if (mpermlevel < 0) {
return mpermlevel;
} else if (apermlevel < mpermlevel) {
return mpermlevel;
} else {
return apermlevel;
}
} else {
return getManualPermLevel(userObject, network, channelObject, account);
}
}
public static boolean isAccountEnabled(PircBotX network) { //checks if account authentication is enabled
return DatabaseUtils.getNetwork(IRCUtils.getNetworkNameByNetwork(network)).getValue(NETWORKS.AUTHTYPE).equalsIgnoreCase("account");
}
public static boolean isNetworkAdmin(String account, String network) {
for (String c : StringUtils.split(DatabaseUtils.getNetwork(network).getValue(NETWORKS.NETWORKADMINS), ", ")) {
if (c.equalsIgnoreCase(account))
return true;
}
return false;
}
public static boolean isIgnored(String hostmask, String network) {
if (DatabaseUtils.getNetworkProperty(network, "ignoredhosts") == null) {
return false;
} else
for (String c : StringUtils.split(DatabaseUtils.getNetworkProperty(network, "ignoredhosts").getValue(NETWORKPROPERTY.VALUE), ",")) {
if (c.equalsIgnoreCase(hostmask))
return true;
}
return false;
}
}
|
package net.zero918nobita.Xemime;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Map;
import java.util.TreeMap;
/**
*
* @author Kodai Matsumoto
*/
public class Main {
private static Parser parser;
static X_Default defaultObj = new X_Default();
private static TreeMap<X_Address, X_Code> entities = new TreeMap<X_Address, X_Code>() {{
put(new X_Address(0, 0), X_Bool.Nil);
put(new X_Address(0, 1), X_Bool.T);
}};
static Frame frame = new Frame();
private static VirtualMemoryMonitor vmm = null;
private static Thread vmmThread = null;
/**
*
* @param table
*/
static void loadLocalFrame(X_Handler table) {
frame.loadLocalFrame(table);
}
static void unloadLocalFrame() {
frame.unloadLocalFrame();
}
/**
*
* @param sym
* @return true false
*/
static boolean hasSymbol(X_Symbol sym) {
return frame.hasSymbol(sym) || defaultObj.hasMember(sym);
}
/**
*
* @param sym
* @return
*/
static X_Address getAddressOfSymbol(X_Symbol sym) throws Exception {
return (frame.hasSymbol(sym)) ?
frame.getAddressOfSymbol(sym) :
defaultObj.getAddressOfMember(sym);
}
/**
*
* @param sym
* @return
*/
static X_Code getValueOfSymbol(X_Symbol sym) throws Exception {
if (frame.hasSymbol(sym)) {
return frame.getValueOfSymbol(sym);
} else {
return (defaultObj.hasMember(sym)) ?
defaultObj.message(0, sym) : null;
}
}
/**
*
* @param address
* @return
*/
static X_Code getValueOfReference(X_Address address) {
return entities.get(address);
}
/**
*
* @param sym
* @param ref
*/
static void setAddress(X_Symbol sym, X_Address ref) throws Exception {
if (frame.hasSymbol(sym)) { frame.setAddress(sym, ref); return; }
if (!defaultObj.hasMember(sym)) throw new Exception(parser.getLocation() + ": `" + sym.getName() + "` ");
defaultObj.setMember(sym, ref);
}
/**
*
* @param sym
* @param obj
* @throws Exception
*/
static void setValue(X_Symbol sym, X_Code obj) throws Exception {
if (frame.hasSymbol(sym)) { frame.setValue(sym, obj); return; }
X_Address ref = register(obj);
if (!defaultObj.hasMember(sym)) throw new Exception(parser.getLocation() + ": `" + sym.getName() + "` ");
defaultObj.setMember(sym, ref);
}
/**
*
* @param sym
* @param ref
*/
static void defAddress(X_Symbol sym, X_Address ref) throws Exception {
if (frame.numberOfLayers() != 0) { frame.defAddress(sym, ref); return; }
defaultObj.setMember(sym, ref);
}
/**
*
* @param sym
* @param obj
*/
static void defValue(X_Symbol sym, X_Code obj) throws Exception {
if (frame.numberOfLayers() != 0) { frame.defValue(sym, obj); return; }
X_Address ref = register(obj);
defaultObj.setMember(sym, ref);
}
/**
* X_Address
* @param obj
* @return X_Address
*/
static X_Address register(X_Code obj) {
entities.put(new X_Address(0,entities.lastKey().getAddress() + 1), obj);
return new X_Address(0, entities.lastKey().getAddress());
}
/**
* Xemime
* ( or )
* <br>
* -debug
* @param args
*/
public static void main(String[] args) {
boolean debug = Arrays.asList(args).contains("-debug");
if ((debug && args.length >= 3) || (!debug && args.length >= 2)) {
usage();
System.out.println(System.lineSeparator() + "Usage: java -jar Xemime.jar [source file name]");
return;
}
if (debug) {
vmm = new VirtualMemoryMonitor();
vmmThread = new Thread(vmm);
vmmThread.start();
}
X_Address addressOfDefaultObj = Main.register(defaultObj);
defaultObj.setMember(X_Symbol.intern(0, "this"), addressOfDefaultObj);
defaultObj.setMember(X_Symbol.intern(0, "THIS"), addressOfDefaultObj);
defaultObj.setMember(X_Symbol.intern(0, "Default"), addressOfDefaultObj);
defaultObj.setMember(X_Symbol.intern(0, "Core"), register(new X_Core()));
defaultObj.setMember(X_Symbol.intern(0, "Object"), register(new X_Object()));
try {
parser = new Parser();
BufferedReader in;
if ((debug && args.length == 1) || (!debug && args.length == 0)) {
usage();
in = new BufferedReader(new InputStreamReader(System.in));
System.out.print(System.lineSeparator() + "[1]> ");
String line;
while (true) {
line = in.readLine();
if (line != null && !line.equals("")) {
ArrayList<X_Code> result = parser.parse(line);
for (X_Code c : result) System.out.println(c.run());
System.out.print("[" + (parser.getLocation() + 1) + "]> ");
parser.goDown(1);
} else if (line == null) {
break;
}
}
} else {
in = new BufferedReader(new FileReader(args[0]));
StringBuilder stringBuilder = new StringBuilder();
String line;
while ((line = in.readLine()) != null) {
stringBuilder.append(line);
stringBuilder.append('\n');
}
ArrayList<X_Code> result = parser.parse(stringBuilder.toString());
for (X_Code c : result) c.run();
}
in.close();
} catch(Exception e) {
e.printStackTrace();
}
}
private static void usage() {
System.out.println(" _ __ _ \n" +
" | |/ /__ ____ ___ (_)___ ___ ___ \n" +
" | / _ \\/ __ `__ \\/ / __ `__ \\/ _ \\\n" +
" / / __/ / / / / / / / / / / / __/\n" +
"/_/|_\\___/_/ /_/ /_/_/_/ /_/ /_/\\___/ \n\n" +
"Xemime Version 1.0.0 2017-08-07");
}
/**
* Object <br>
* <br>
*
*/
private static class X_Object extends X_Handler {
X_Object() {
super(0);
setMember(X_Symbol.intern(0, "clone"), new X_Clone());
setMember(X_Symbol.intern(0, "new"), new X_New());
setMember(X_Symbol.intern(0, "proto"), new X_Bool(0, false));
}
/**
* Object.clone <br>
* clone
*/
private static class X_Clone extends X_Native {
X_Clone() {
super(0, 0);
}
@Override
protected X_Address exec(ArrayList<X_Code> params, X_Address self) throws Exception {
return Main.register(params.get(0).run());
}
}
private static class X_New extends X_Native {
X_New() {
super(0, 0);
}
@Override
protected X_Code exec(ArrayList<X_Code> params, X_Address self) throws Exception {
X_Handler obj1 = (X_Handler) params.get(0).run();
X_Handler obj2 = new X_Handler(0);
obj2.setMember(X_Symbol.intern(0, "proto"), new X_Bool(0, false));
if (obj1.hasMember(X_Symbol.intern(0, "proto"))) {
X_Handler proto = (X_Handler) obj1.getMember(X_Symbol.intern(0, "proto"));
for (Map.Entry<X_Symbol, X_Address> entry : proto.getMembers().entrySet()) {
obj2.setMember(entry.getKey(), entry.getValue());
}
}
return Main.register(obj2);
}
}
}
/**
* Core <br>
*
*/
private static class X_Core extends X_Handler {
X_Core() {
super(0);
setMember(X_Symbol.intern(0, "if"), new X_If());
setMember(X_Symbol.intern(0, "print"), new X_Print());
setMember(X_Symbol.intern(0, "println"), new X_Println());
setMember(X_Symbol.intern(0, "exit"), new X_Exit());
}
/**
* Core.exit <br>
* Xemime
*/
private static class X_Exit extends X_Native {
X_Exit() {
super(0, 0);
}
@Override
protected X_Code exec(ArrayList<X_Code> params, X_Address self) throws Exception {
System.exit(0);
return new X_Int(0, 0);
}
}
/**
* Core.print <br>
* 1
*/
private static class X_Print extends X_Native {
X_Print() {
super(0, 1);
}
@Override
protected X_Code exec(ArrayList<X_Code> params, X_Address self) throws Exception {
X_Code o = params.get(1).run();
System.out.print(o);
return o;
}
}
/**
* Core.println <br>
* 1
*/
private static class X_Println extends X_Native {
X_Println() {
super(0, 1);
}
@Override
protected X_Code exec(ArrayList<X_Code> params, X_Address self) throws Exception {
X_Code o = params.get(1).run();
System.out.println(o);
return o;
}
}
/**
* Core.if <br>
* 2NIL2
* NIL3
*/
private static class X_If extends X_Native {
X_If(){
super(0, 3);
}
@Override
protected X_Code exec(ArrayList<X_Code> params, X_Address self) throws Exception {
return (params.get(1).run().equals(X_Bool.Nil)) ? params.get(3).run() : params.get(2).run();
}
}
}
}
|
// Nenya library - tools for developing networked games
// This library is free software; you can redistribute it and/or modify it
// (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.threerings.tools;
import java.util.ArrayList;
import java.util.regex.Pattern;
import java.io.File;
import java.io.Serializable;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.FileSet;
import com.google.common.collect.Lists;
import com.samskivert.util.FileUtil;
import com.threerings.util.CompiledConfig;
import com.threerings.tools.xml.CompiledConfigParser;
/**
* Used to parse configuration information from an XML file and create the
* serialized representation that is used by the client and server.
*/
public class CompiledConfigTask extends Task
{
public void setParser (String parser)
{
_parser = parser;
}
public void setConfigdef (File configdef)
{
_configdef = configdef;
}
public void setTarget (File target)
{
_target = target;
}
public void setDest (File dest)
{
_dest = dest;
}
public void addFileset (FileSet set)
{
_filesets.add(set);
}
@Override
public void execute () throws BuildException
{
// instantiate and sanity check the parser class
Object pobj = null;
try {
Class<?> pclass = Class.forName(_parser);
pobj = pclass.newInstance();
} catch (Exception e) {
throw new BuildException("Error instantiating config parser", e);
}
if (!(pobj instanceof CompiledConfigParser)) {
throw new BuildException("Invalid parser class: " + _parser);
}
CompiledConfigParser parser = (CompiledConfigParser)pobj;
// if we have a single file and target specified, do those
if (_configdef != null) {
parse(parser, _configdef, _target == null ? getTarget(_configdef) : _target);
}
// deal with the filesets
for (FileSet fs : _filesets) {
DirectoryScanner ds = fs.getDirectoryScanner(getProject());
File fromDir = fs.getDir(getProject());
for (String file : ds.getIncludedFiles()) {
File source = new File(fromDir, file);
parse(parser, source, getTarget(source));
}
}
}
protected File getTarget (File source)
{
if (_dest == null) {
return null;
}
String baseDir = getProject().getBaseDir().getPath();
File target = new File(source.getPath().replaceAll(Pattern.quote(baseDir), _dest.getPath()));
target = new File(FileUtil.resuffix(target, ".xml", ".dat"));
return target;
}
protected void parse (CompiledConfigParser parser, File confdef, File target)
throws BuildException
{
// make sure the source file exists
if (!confdef.exists()) {
String errmsg = "Config definition file not found: " + confdef;
throw new BuildException(errmsg);
}
// if no target was specified, resuffix the source file as to .dat
if (target == null) {
target = new File(FileUtil.resuffix(confdef, ".xml", ".dat"));
}
System.out.println("Compiling " + target + "...");
Serializable config = null;
try {
// parse it on up
config = parser.parseConfig(confdef);
} catch (Exception e) {
throw new BuildException("Failure parsing config definition", e);
}
// create the target directory if necessary
File parent = target.getParentFile();
if (!parent.isDirectory() && !parent.mkdirs()) {
throw new BuildException("Failed to create parent directory '" + parent + "'.");
}
try {
// and write it on out
CompiledConfig.saveConfig(target, config);
} catch (Exception e) {
throw new BuildException("Failure writing serialized config", e);
}
}
protected File _configdef;
protected File _target;
protected File _dest;
protected String _parser;
protected ArrayList<FileSet> _filesets = Lists.newArrayList();
}
|
package nl.mpi.kinnate.svg;
import nl.mpi.kinnate.kindata.GraphSorter;
import nl.mpi.kinnate.kindata.EntityData;
import nl.mpi.kinnate.ui.GraphPanelContextMenu;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.Rectangle;
import java.awt.event.MouseWheelEvent;
import java.awt.event.MouseWheelListener;
import java.awt.geom.AffineTransform;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import javax.swing.JPanel;
import javax.xml.parsers.DocumentBuilderFactory;
import nl.mpi.arbil.data.ArbilComponentBuilder;
import nl.mpi.arbil.ui.ArbilTableModel;
import nl.mpi.arbil.ui.GuiHelper;
import nl.mpi.kinnate.KinTermSavePanel;
import nl.mpi.kinnate.entityindexer.IndexerParameters;
import nl.mpi.kinnate.SavePanel;
import nl.mpi.kinnate.kindata.DataTypes;
import nl.mpi.kinnate.kindata.EntityRelation;
import nl.mpi.kinnate.kintypestrings.KinTermGroup;
import nl.mpi.kinnate.ui.KinTypeEgoSelectionTestPanel;
import org.apache.batik.dom.svg.SAXSVGDocumentFactory;
import org.apache.batik.dom.svg.SVGDOMImplementation;
import org.apache.batik.swing.JSVGCanvas;
import org.apache.batik.swing.JSVGScrollPane;
import org.apache.batik.swing.svg.LinkActivationEvent;
import org.apache.batik.swing.svg.LinkActivationListener;
import org.apache.batik.util.XMLResourceDescriptor;
import org.w3c.dom.DOMException;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.events.EventTarget;
import org.w3c.dom.svg.SVGDocument;
import org.w3c.dom.svg.SVGLocatable;
import org.w3c.dom.svg.SVGRect;
public class GraphPanel extends JPanel implements SavePanel {
private JSVGScrollPane jSVGScrollPane;
protected JSVGCanvas svgCanvas;
protected SVGDocument doc;
protected ArbilTableModel arbilTableModel;
private boolean requiresSave = false;
private File svgFile = null;
protected GraphPanelSize graphPanelSize;
protected ArrayList<String> selectedGroupId;
protected String svgNameSpace = SVGDOMImplementation.SVG_NAMESPACE_URI;
public DataStoreSvg dataStoreSvg;
protected EntitySvg entitySvg;
// private URI[] egoPathsTemp = null;
public SvgUpdateHandler svgUpdateHandler;
private int currentZoom = 0;
private int currentWidth = 0;
private int currentHeight = 0;
private AffineTransform zoomAffineTransform = null;
public GraphPanel(KinTermSavePanel egoSelectionPanel) {
dataStoreSvg = new DataStoreSvg();
entitySvg = new EntitySvg();
dataStoreSvg.setDefaults();
svgUpdateHandler = new SvgUpdateHandler(this, egoSelectionPanel);
selectedGroupId = new ArrayList<String>();
graphPanelSize = new GraphPanelSize();
this.setLayout(new BorderLayout());
svgCanvas = new JSVGCanvas();
// svgCanvas.setMySize(new Dimension(600, 400));
svgCanvas.setDocumentState(JSVGCanvas.ALWAYS_DYNAMIC);
// drawNodes();
svgCanvas.setEnableImageZoomInteractor(false);
svgCanvas.setEnablePanInteractor(false);
svgCanvas.setEnableRotateInteractor(false);
svgCanvas.setEnableZoomInteractor(false);
svgCanvas.addMouseWheelListener(new MouseWheelListener() {
public void mouseWheelMoved(MouseWheelEvent e) {
currentZoom = currentZoom + e.getUnitsToScroll();
if (currentZoom > 8) {
currentZoom = 8;
}
if (currentZoom < -6) {
currentZoom = -6;
}
double scale = 1 - e.getUnitsToScroll() / 10.0;
double tx = -e.getX() * (scale - 1);
double ty = -e.getY() * (scale - 1);
AffineTransform at = new AffineTransform();
at.translate(tx, ty);
at.scale(scale, scale);
at.concatenate(svgCanvas.getRenderingTransform());
svgCanvas.setRenderingTransform(at);
// zoomDrawing();
}
});
// svgCanvas.setEnableResetTransformInteractor(true);
// svgCanvas.setDoubleBufferedRendering(true); // todo: look into reducing the noticable aliasing on the canvas
MouseListenerSvg mouseListenerSvg = new MouseListenerSvg(this);
svgCanvas.addMouseListener(mouseListenerSvg);
svgCanvas.addMouseMotionListener(mouseListenerSvg);
jSVGScrollPane = new JSVGScrollPane(svgCanvas);
// svgCanvas.setBackground(Color.LIGHT_GRAY);
this.add(BorderLayout.CENTER, jSVGScrollPane);
if (egoSelectionPanel instanceof KinTypeEgoSelectionTestPanel) {
svgCanvas.setComponentPopupMenu(new GraphPanelContextMenu((KinTypeEgoSelectionTestPanel) egoSelectionPanel, this, graphPanelSize));
} else {
svgCanvas.setComponentPopupMenu(new GraphPanelContextMenu(null, this, graphPanelSize));
}
}
private void zoomDrawing() {
AffineTransform scaleTransform = new AffineTransform();
scaleTransform.scale(1 - currentZoom / 10.0, 1 - currentZoom / 10.0);
System.out.println("currentZoom: " + currentZoom);
// svgCanvas.setRenderingTransform(scaleTransform);
Rectangle canvasBounds = this.getBounds();
SVGRect bbox = ((SVGLocatable) doc.getRootElement()).getBBox();
if (bbox != null) {
System.out.println("previousZoomedWith: " + bbox.getWidth());
}
// SVGElement rootElement = doc.getRootElement();
// if (currentWidth < canvasBounds.width) {
float drawingCenter = (currentWidth / 2);
// float drawingCenter = (bbox.getX() + (bbox.getWidth() / 2));
float canvasCenter = (canvasBounds.width / 2);
zoomAffineTransform = new AffineTransform();
zoomAffineTransform.translate((canvasCenter - drawingCenter), 1);
zoomAffineTransform.concatenate(scaleTransform);
svgCanvas.setRenderingTransform(zoomAffineTransform);
}
public void setArbilTableModel(ArbilTableModel arbilTableModelLocal) {
arbilTableModel = arbilTableModelLocal;
}
public EntityData[] readSvg(File svgFilePath) {
svgFile = svgFilePath;
String parser = XMLResourceDescriptor.getXMLParserClassName();
SAXSVGDocumentFactory documentFactory = new SAXSVGDocumentFactory(parser);
try {
doc = (SVGDocument) documentFactory.createDocument(svgFilePath.toURI().toString());
svgCanvas.setDocument(doc);
dataStoreSvg = DataStoreSvg.loadDataFromSvg(doc);
requiresSave = false;
entitySvg.readEntityPositions(doc.getElementById("EntityGroup"));
} catch (IOException ioe) {
GuiHelper.linorgBugCatcher.logError(ioe);
}
// set up the mouse listeners that were lost in the save/re-open process
for (String groupForMouseListener : new String[]{"EntityGroup", "LabelsGroup"}) {
Element parentElement = doc.getElementById(groupForMouseListener);
if (parentElement == null) {
Element requiredGroup = doc.createElementNS(svgNameSpace, "g");
requiredGroup.setAttribute("id", groupForMouseListener);
Element svgRoot = doc.getDocumentElement();
svgRoot.appendChild(requiredGroup);
} else {
Node currentNode = parentElement.getFirstChild();
while (currentNode != null) {
((EventTarget) currentNode).addEventListener("mousedown", new MouseListenerSvg(this), false);
currentNode = currentNode.getNextSibling();
}
}
}
dataStoreSvg.indexParameters.symbolFieldsFields.setAvailableValues(entitySvg.listSymbolNames(doc));
if (dataStoreSvg.graphData == null) {
return null;
}
return dataStoreSvg.graphData.getDataNodes();
}
public void generateDefaultSvg() {
try {
Element svgRoot;
Element relationGroupNode;
Element entityGroupNode;
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
// set up a kinnate namespace so that the ego list and kin type strings can have more permanent storage places
// in order to add the extra namespaces to the svg document we use a string and parse it
// other methods have been tried but this is the most readable and the only one that actually works
// I think this is mainly due to the way the svg dom would otherwise be constructed
// others include:
// doc.getDomConfig()
// doc.getDocumentElement().setAttributeNS(DataStoreSvg.kinDataNameSpaceLocation, "kin:version", "");
// doc.getDocumentElement().setAttribute("xmlns:" + DataStoreSvg.kinDataNameSpace, DataStoreSvg.kinDataNameSpaceLocation); // this method of declaring multiple namespaces looks to me to be wrong but it is the only method that does not get stripped out by the transformer on save
// Document doc = impl.createDocument(svgNS, "svg", null);
// SVGDocument doc = svgCanvas.getSVGDocument();
String templateXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<svg xmlns:xlink=\"http:
+ "xmlns=\"http:
+ " zoomAndPan=\"magnify\" contentStyleType=\"text/css\" "
+ "preserveAspectRatio=\"xMidYMid meet\" version=\"1.0\"/>";
// DOMImplementation impl = SVGDOMImplementation.getDOMImplementation();
// doc = (SVGDocument) impl.createDocument(svgNameSpace, "svg", null);
String parser = XMLResourceDescriptor.getXMLParserClassName();
SAXSVGDocumentFactory documentFactory = new SAXSVGDocumentFactory(parser);
doc = (SVGDocument) documentFactory.createDocument(svgNameSpace, new StringReader(templateXml));
entitySvg.insertSymbols(doc, svgNameSpace);
// Get the root element (the 'svg' element)
svgRoot = doc.getDocumentElement();
// add the relation symbols in a group below the relation lines
relationGroupNode = doc.createElementNS(svgNameSpace, "g");
relationGroupNode.setAttribute("id", "RelationGroup");
svgRoot.appendChild(relationGroupNode);
// add the entity symbols in a group on top of the relation lines
entityGroupNode = doc.createElementNS(svgNameSpace, "g");
entityGroupNode.setAttribute("id", "EntityGroup");
svgRoot.appendChild(entityGroupNode);
// add the labels group on top, also added on svg load if missing
Element labelsGroup = doc.createElementNS(svgNameSpace, "g");
labelsGroup.setAttribute("id", "LabelsGroup");
svgRoot.appendChild(labelsGroup);
dataStoreSvg.indexParameters.symbolFieldsFields.setAvailableValues(entitySvg.listSymbolNames(doc));
} catch (IOException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
}
private void saveSvg(File svgFilePath) {
svgFile = svgFilePath;
// todo: make sure the file path ends in .svg lowercase
drawNodes(); // re draw the nodes so that any data changes such as the title/description in the kin term groups get updated into the file
ArbilComponentBuilder.savePrettyFormatting(doc, svgFilePath);
requiresSave = false;
}
private void printNodeNames(Node nodeElement) {
System.out.println(nodeElement.getLocalName());
System.out.println(nodeElement.getNamespaceURI());
Node childNode = nodeElement.getFirstChild();
while (childNode != null) {
printNodeNames(childNode);
childNode = childNode.getNextSibling();
}
}
public String[] getKinTypeStrigs() {
return dataStoreSvg.kinTypeStrings;
}
public void setKinTypeStrigs(String[] kinTypeStringArray) {
// strip out any white space, blank lines and remove duplicates
// this has set has been removed because it creates a discrepancy between what the user types and what is processed
// HashSet<String> kinTypeStringSet = new HashSet<String>();
// for (String kinTypeString : kinTypeStringArray) {
// if (kinTypeString != null && kinTypeString.trim().length() > 0) {
// kinTypeStringSet.add(kinTypeString.trim());
// dataStoreSvg.kinTypeStrings = kinTypeStringSet.toArray(new String[]{});
dataStoreSvg.kinTypeStrings = kinTypeStringArray;
}
public IndexerParameters getIndexParameters() {
return dataStoreSvg.indexParameters;
}
public KinTermGroup[] getkinTermGroups() {
return dataStoreSvg.kinTermGroups;
}
public void addKinTermGroup() {
ArrayList<KinTermGroup> kinTermsList = new ArrayList<KinTermGroup>(Arrays.asList(dataStoreSvg.kinTermGroups));
kinTermsList.add(new KinTermGroup());
dataStoreSvg.kinTermGroups = kinTermsList.toArray(new KinTermGroup[]{});
}
// public String[] getEgoUniquiIdentifiersList() {
// return dataStoreSvg.egoIdentifierSet.toArray(new String[]{});
// public String[] getEgoIdList() {
// return dataStoreSvg.egoIdentifierSet.toArray(new String[]{});
// public URI[] getEgoPaths() {
// if (egoPathsTemp != null) {
// return egoPathsTemp;
// ArrayList<URI> returnPaths = new ArrayList<URI>();
// for (String egoId : dataStoreSvg.egoIdentifierSet) {
// try {
// String entityPath = getPathForElementId(egoId);
//// if (entityPath != null) {
// returnPaths.add(new URI(entityPath));
// } catch (URISyntaxException ex) {
// GuiHelper.linorgBugCatcher.logError(ex);
// // todo: warn user with a dialog
// return returnPaths.toArray(new URI[]{});
// public void setRequiredEntities(URI[] egoPathArray, String[] egoIdentifierArray) {
//// egoPathsTemp = egoPathArray; // egoPathsTemp is only required if the ego nodes are not already on the graph (otherwise the path can be obtained from the graph elements)
// dataStoreSvg.requiredEntities = new HashSet<String>(Arrays.asList(egoIdentifierArray));
// public void addRequiredEntity(URI[] egoPathArray, String[] egoIdentifierArray) {
//// egoPathsTemp = egoPathArray; // egoPathsTemp is only required if the ego nodes are not already on the graph (otherwise the path can be obtained from the graph elements)
// dataStoreSvg.requiredEntities.addAll(Arrays.asList(egoIdentifierArray));
// public void removeEgo(String[] egoIdentifierArray) {
// dataStoreSvg.egoIdentifierSet.removeAll(Arrays.asList(egoIdentifierArray));
public String[] getSelectedIds() {
return selectedGroupId.toArray(new String[]{});
}
// public boolean selectionContainsEgo() {
// for (String selectedId : selectedGroupId) {
// if (dataStoreSvg.egoIdentifierSet.contains(selectedId)) {
// return true;
// return false;
public String getPathForElementId(String elementId) {
// NamedNodeMap namedNodeMap = doc.getElementById(elementId).getAttributes();
// for (int attributeCounter = 0; attributeCounter < namedNodeMap.getLength(); attributeCounter++) {
// System.out.println(namedNodeMap.item(attributeCounter).getNodeName());
// System.out.println(namedNodeMap.item(attributeCounter).getNamespaceURI());
// System.out.println(namedNodeMap.item(attributeCounter).getNodeValue());
Element entityElement = doc.getElementById(elementId);
if (entityElement == null) {
return null;
} else {
return entityElement.getAttributeNS(DataStoreSvg.kinDataNameSpaceLocation, "path");
}
}
public String getKinTypeForElementId(String elementId) {
Element entityElement = doc.getElementById(elementId);
return entityElement.getAttributeNS(DataStoreSvg.kinDataNameSpaceLocation, "kintype");
}
public void resetZoom() {
AffineTransform at = new AffineTransform();
at.scale(1, 1);
at.setToTranslation(1, 1);
svgCanvas.setRenderingTransform(at);
}
public void drawNodes() {
drawNodes(dataStoreSvg.graphData);
}
public void drawNodes(GraphSorter graphDataLocal) {
// todo: resolve threading issue and update issue so that imdi nodes that update can update the diagram
requiresSave = true;
dataStoreSvg.graphData = graphDataLocal;
int vSpacing = graphPanelSize.getVerticalSpacing(dataStoreSvg.graphData.gridHeight);
int hSpacing = graphPanelSize.getHorizontalSpacing(dataStoreSvg.graphData.gridWidth);
currentWidth = graphPanelSize.getWidth(dataStoreSvg.graphData.gridWidth, hSpacing);
currentHeight = graphPanelSize.getHeight(dataStoreSvg.graphData.gridHeight, vSpacing);
try {
Element svgRoot;
// Get the root element (the 'svg' element)
svgRoot = doc.getDocumentElement();
Element labelsGroup = doc.getElementById("LabelsGroup");
if (labelsGroup == null) {
labelsGroup = doc.createElementNS(svgNameSpace, "g");
labelsGroup.setAttribute("id", "LabelsGroup");
svgRoot.appendChild(labelsGroup);
}
Element relationGroupNode;
Element entityGroupNode;
// if (doc == null) {
// } else {
Node relationGroupNodeOld = doc.getElementById("RelationGroup");
Node entityGroupNodeOld = doc.getElementById("EntityGroup");
// remove the old relation lines
relationGroupNode = doc.createElementNS(svgNameSpace, "g");
relationGroupNode.setAttribute("id", "RelationGroup");
svgRoot.insertBefore(relationGroupNode, labelsGroup);
if (relationGroupNodeOld != null) {
svgRoot.removeChild(relationGroupNodeOld);
}
// remove the old entity symbols making sure the entities sit above the relations but below the labels
entityGroupNode = doc.createElementNS(svgNameSpace, "g");
entityGroupNode.setAttribute("id", "EntityGroup");
svgRoot.insertBefore(entityGroupNode, labelsGroup);
if (entityGroupNodeOld != null) {
svgRoot.removeChild(entityGroupNodeOld);
}
// remove old kin diagram data
NodeList dataNodes = doc.getElementsByTagNameNS("http://mpi.nl/tla/kin", "KinDiagramData");
for (int nodeCounter = 0; nodeCounter < dataNodes.getLength(); nodeCounter++) {
dataNodes.item(nodeCounter).getParentNode().removeChild(dataNodes.item(nodeCounter));
}
// Set the width and height attributes on the root 'svg' element.
svgRoot.setAttribute("width", Integer.toString(currentWidth)); // todo: calculate the correct size / width getting it from the GraphPlacementHandler
svgRoot.setAttribute("height", Integer.toString(currentHeight));
// svgRoot.setAttribute("width", "100%");
// svgRoot.setAttribute("height", "100%");
// svgRoot.removeAttribute("width");
// svgRoot.removeAttribute("height");
this.setPreferredSize(new Dimension(graphPanelSize.getHeight(dataStoreSvg.graphData.gridHeight, vSpacing), graphPanelSize.getWidth(dataStoreSvg.graphData.gridWidth, hSpacing)));// entitySvg.removeOldEntities(entityGroupNode);
// entitySvg.removeOldEntities(relationGroupNode);
// todo: find the real text size from batik
// store the selected kin type strings and other data in the dom
dataStoreSvg.storeAllData(doc);
new GraphPlacementHandler().placeAllNodes(this, dataStoreSvg.graphData.getDataNodes(), entityGroupNode, hSpacing, vSpacing);
for (EntityData currentNode : dataStoreSvg.graphData.getDataNodes()) {
if (currentNode.isVisible) {
for (EntityRelation graphLinkNode : currentNode.getVisiblyRelateNodes()) {
if ((dataStoreSvg.showKinTermLines || graphLinkNode.relationLineType != DataTypes.RelationLineType.kinTermLine)
&& (dataStoreSvg.showSanguineLines || graphLinkNode.relationLineType != DataTypes.RelationLineType.sanguineLine)) {
new RelationSvg().insertRelation(this, svgNameSpace, relationGroupNode, currentNode, graphLinkNode, hSpacing, vSpacing);
}
}
}
}
// todo: allow the user to set an entity as the provider of new dat being entered, this selected user can then be added to each field that is updated as the providence for that data. this would be best done in a cascading fashon so that there is a default informant for the entity and if required for sub nodes and fields
svgCanvas.setSVGDocument(doc);
//ArbilComponentBuilder.savePrettyFormatting(doc, new File("/Users/petwit/Documents/SharedInVirtualBox/mpi-co-svn-mpi-nl/LAT/Kinnate/trunk/src/main/resources/output.svg"));
// svgCanvas.revalidate();
// svgUpdateHandler.updateSvgSelectionHighlights(); // todo: does this rsolve the issue after an update that the selection highlight is lost but the selection is still made?
selectedGroupId.clear();
// zoomDrawing();
if (zoomAffineTransform != null) {
// re apply the last zoom
// todo: asses why this does not work
svgCanvas.setRenderingTransform(zoomAffineTransform);
}
svgCanvas.addLinkActivationListener(new LinkActivationListener() {
public void linkActivated(LinkActivationEvent lae) {
// todo: find a better way to block the built in hyper link handler that tries to load the url into the canvas
throw new UnsupportedOperationException("Not supported yet.");
}
});
} catch (DOMException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
}
public boolean hasSaveFileName() {
return svgFile != null;
}
public File getFileName() {
return svgFile;
}
public boolean requiresSave() {
return requiresSave;
}
public void setRequiresSave() {
requiresSave = true;
}
public void saveToFile() {
saveSvg(svgFile);
}
public void saveToFile(File saveAsFile) {
saveSvg(saveAsFile);
}
public void updateGraph() {
throw new UnsupportedOperationException("Not supported yet.");
}
}
|
package nl.orangeflamingo;
import nl.orangeflamingo.domain.Song;
import nl.orangeflamingo.domain.SongRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
@SpringBootApplication
public class Application {
private static final Logger log = LoggerFactory.getLogger(Application.class);
public static void main(String[] args) {
SpringApplication.run(Application.class);
}
@Bean
public CommandLineRunner demo(SongRepository repository) {
return args -> {
// fetch all customers
log.info("Songs found with findAll():");
log.info("
for (Song song : repository.findAll()) {
log.info(song.toString());
}
log.info("");
// fetch an individual customer by ID
Song song = repository.findById(1L).orElseThrow(() -> new RuntimeException("Song not found"));
log.info("Song found with findOne(1L):");
log.info("
log.info(song.toString());
log.info("");
// fetch customers by artist
log.info("Song found with findByArtist('Nirvana'):");
log.info("
for (Song nirvana : repository.findByArtistLikeIgnoreCase("Nirvana")) {
log.info(nirvana.toString());
}
log.info("");
};
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.