answer
stringlengths
17
10.2M
package com.noprestige.kanaquiz; import android.content.Context; import android.util.AttributeSet; import android.view.LayoutInflater; import android.widget.LinearLayout; import android.widget.TextView; import java.text.DecimalFormat; import java.util.Date; import java.util.GregorianCalendar; import java.util.Locale; import static java.util.Calendar.DAY_OF_MONTH; import static java.util.Calendar.DAY_OF_WEEK; import static java.util.Calendar.LONG; import static java.util.Calendar.MONTH; import static java.util.Calendar.YEAR; public class DailyLogItem extends LinearLayout { private TextView lblDate; private TextView lblRatio; private TextView lblPercentage; private int correctAnswers = 0; private int totalAnswers = 0; private static final DecimalFormat PERCENT_FORMATTER = new DecimalFormat(" public DailyLogItem(Context context) { super(context); init(null, 0); } public DailyLogItem(Context context, AttributeSet attrs) { super(context, attrs); init(attrs, 0); } public DailyLogItem(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(attrs, defStyle); } private void init(AttributeSet attrs, int defStyle) { // Set up initial objects LayoutInflater.from(getContext()).inflate(R.layout.daily_log_item, this); lblDate = findViewById(R.id.lblDate); lblRatio = findViewById(R.id.lblRatio); lblPercentage = findViewById(R.id.lblPercentage); } public void setFromRecord(LogDailyRecord record) { setDate(record.date); setCorrectAnswers(record.correct_answers); setTotalAnswers(record.correct_answers + record.incorrect_answers); } public void setDate(Date date) { GregorianCalendar calendar = new GregorianCalendar(); calendar.setTime(date); lblDate.setText(calendar.getDisplayName(DAY_OF_WEEK, LONG, Locale.getDefault())); lblDate.append(System.getProperty("line.separator")); lblDate.append(calendar.getDisplayName(MONTH, LONG, Locale.getDefault())); lblDate.append(" "); lblDate.append(Integer.toString(calendar.get(DAY_OF_MONTH))); lblDate.append(System.getProperty("line.separator")); lblDate.append(Integer.toString(calendar.get(YEAR))); } public void setCorrectAnswers(int correctAnswers) { this.correctAnswers = correctAnswers; updateAnswers(); } public void setTotalAnswers(int totalAnswers) { this.totalAnswers = totalAnswers; updateAnswers(); } private void updateAnswers() { if (correctAnswers > 0 && totalAnswers > 0) { lblRatio.setText(Integer.toString(correctAnswers) + "/" + Integer.toString(totalAnswers)); float percentage = (float) correctAnswers / (float) totalAnswers; lblPercentage.setText(PERCENT_FORMATTER.format(percentage)); if (percentage < 0.6) lblPercentage.setTextColor(getResources().getColor(R.color.below_sixty)); else if (percentage < 0.7) lblPercentage.setTextColor(getResources().getColor(R.color.sixty_to_seventy)); else if (percentage < 0.8) lblPercentage.setTextColor(getResources().getColor(R.color.seventy_to_eighty)); else if (percentage < 0.9) lblPercentage.setTextColor(getResources().getColor(R.color.eighty_to_ninty)); else lblPercentage.setTextColor(getResources().getColor(R.color.above_ninty)); } } }
package com.veyndan.hermes.home; import android.os.Bundle; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.Menu; import android.view.MenuItem; import com.ryanharter.auto.value.moshi.AutoValueMoshiAdapterFactory; import com.squareup.moshi.Moshi; import com.veyndan.hermes.BaseActivity; import com.veyndan.hermes.Comic; import com.veyndan.hermes.R; import java.util.ArrayList; import java.util.List; import butterknife.BindView; import butterknife.ButterKnife; import retrofit2.Retrofit; import retrofit2.adapter.rxjava.RxJavaCallAdapterFactory; import retrofit2.converter.moshi.MoshiConverterFactory; import retrofit2.http.GET; import retrofit2.http.Path; import rx.Observable; import rx.Subscriber; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; public class HomeActivity extends BaseActivity { @BindView(R.id.recycler_view) RecyclerView recyclerView; private final List<Comic> comics = new ArrayList<>(); private HomeAdapter adapter; private Moshi moshi = new Moshi.Builder().add(new AutoValueMoshiAdapterFactory()).build(); private Retrofit retrofit = new Retrofit.Builder() .addCallAdapterFactory(RxJavaCallAdapterFactory.create()) .addConverterFactory(MoshiConverterFactory.create(moshi)) .baseUrl("https://xkcd.com/") .build(); private interface XKCDService { @GET("info.0.json") Observable<Comic> latest(); @GET("{num}/info.0.json") Observable<Comic> num(@Path("num") int num); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.home_activity); ButterKnife.bind(this); adapter = new HomeAdapter(comics); recyclerView.setLayoutManager(new LinearLayoutManager(this)); recyclerView.setAdapter(adapter); XKCDService xkcdService = retrofit.create(XKCDService.class); Observable<Comic> latest = xkcdService.num(1); latest.subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(new Subscriber<Comic>() { @Override public void onCompleted() { } @Override public void onError(Throwable e) { } @Override public void onNext(Comic comic) { comics.add(comic); adapter.notifyDataSetChanged(); } }); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_home, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.action_settings: return true; default: return false; } } }
package com.zfdang.zsmth_android.models; import android.text.Html; import com.zfdang.zsmth_android.Settings; import com.zfdang.zsmth_android.helpers.StringUtils; import com.zfdang.zsmth_android.newsmth.SMTHHelper; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Locale; import org.jsoup.Jsoup; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; public class Post { private static final String ATTACHMENT_MARK = " public static int ACTION_DEFAULT = 0; public static int ACTION_FIRST_POST_IN_SUBJECT = 1; public static int ACTION_PREVIOUS_POST_IN_SUBJECT = 2; public static int ACTION_NEXT_POST_IN_SUBJECT = 3; private String postID; private String title; private String author; private String nickName; private Date date; private String position; private List<String> likes; private List<Attachment> attachFiles; private String htmlContent; // likes are not included private String htmlContentAndLikes; // contents and likes are merged together private List<ContentSegment> mSegments; // parsed from htmlCompleteContent public Post() { date = new Date(); } public String getPostID() { return postID; } public void setPostID(String postID) { this.postID = postID; } public String getTitle() { return this.title; } public void setTitle(String title) { this.title = title; } public String getAuthor() { if (nickName == null || nickName.length() == 0) { return this.author; } else { return String.format("%s(%s)", this.author, this.nickName); } } public void setAuthor(String author) { this.author = author; } public String getRawAuthor() { return this.author; } public void setNickName(String nickName) { final int MAX_NICKNAME_LENGTH = 12; if (nickName.length() > MAX_NICKNAME_LENGTH) { nickName = nickName.substring(0, MAX_NICKNAME_LENGTH) + ".."; } this.nickName = nickName; } public void setDate(Date date) { this.date = date; } public String getFormatedDate() { return StringUtils.getFormattedString(this.date); } public String getPosition() { return position; } public void setPosition(String position) { this.position = position; } public void parsePostContent(Element content, boolean isPost) { Element pureContent = null; if (isPost) { // 1. parse likes node first // <div class="likes"> Elements likeNodes = content.select("div.likes"); if (likeNodes.size() == 1) { Element likeNode = likeNodes.first(); ParseLikeElementInPostContent(likeNode); likeNode.remove(); } // 2. remove like button // <button class="button add_like" Elements likeButtons = content.select("button.add_like"); if(likeButtons.size() == 1) { Element likeButton = likeButtons.get(0); likeButton.remove(); } // on Oct.19, SMTH add topic id in front of author // <font style="display: none">1026567117</font>: areshuang (), : SecondDigi // remove this meaningless block for (Element font : content.select("font[style='display: none']")) { font.remove(); } // 4. take td content as the pure post content pureContent = Jsoup.parse(content.html()); } else { // from email, content not enclosed by <p> tag pureContent = content; } // 2. parse post pure content, result in htmlContent parsePostPureContent(pureContent); // 3. merge contents and likes, result in htmlContentAndLikes mergePureContentAndLikes(); // 4. parse contents into segments parseContentToSegments(); } // parse like list in post content public void ParseLikeElementInPostContent(Element likeNode) { if(likes == null) { likes = new ArrayList<>(); } likes.clear(); // <div class="like_name">36</div> Elements nodes = likeNode.select("div.like_name"); if (nodes.size() == 1) { Element node = nodes.first(); likes.add(node.text()); } // <li><span class="like_score_0">[&nbsp;&nbsp;]</span><span class="like_user">fly891198061:</span> // <span class="like_msg">~</span> // <span class="like_time">(2016-03-27 15:04)</span></li> nodes = likeNode.select("li"); for (Element n : nodes) { likes.add(n.text()); } } // parse post pure content, then merge them to htmlCompleteContent, then split it into htmlSegments public void parsePostPureContent(Element content) { // find all attachment from node // special image attachment: webp. newsmth does not support webp now, so it's not displayed as img // new image URL after 2017-06-28 // <a target="_blank" href="//att.newsmth.net/nForum/att/Picture/1726713/228774"> // <img border="0" title="" src="//att.newsmth.net/nForum/att/Picture/1726713/228774/large" class="resizeable" /></a> // new image url after 2020-06-10 // <a target="_blank" href="//static.mysmth.net/nForum/att/FamilyLife/1763462541/17096"> // <img border="0" title="" src="//static.mysmth.net/nForum/att/FamilyLife/1763462541/17096/large" class="resizeable" /></a> // other attachment // <a href="//static.mysmth.net/nForum/att/Test/943486/245" target="_blank">_()_.epub</a> Elements as = content.select("a[href]"); for (Element a : as) { // process each a|href Elements imgs = a.select("img[src]"); if (imgs.size() == 1) { // find one image attachment String origImageSrc = a.attr("href"); Element img = imgs.first(); String resizedImageSrc = img.attr("src"); Attachment attach = new Attachment(origImageSrc, resizedImageSrc); this.addAttachFile(attach); // replace a[href] with MARK // we will split the string with MARK, so make sure no two MAKR will stay together a.html(ATTACHMENT_MARK + " "); } else if (imgs.size() == 0) { // does not find any image element, handle the special webp String attachName = a.text(); if (attachName != null && attachName.endsWith(".webp")) { // this is a webp attachment, show it as image String origImageSrc = a.attr("href"); Attachment attach = new Attachment(origImageSrc, origImageSrc); this.addAttachFile(attach); a.html(ATTACHMENT_MARK + " "); } else { // other attachment, add link for downloading String downloadURL = a.attr("href"); downloadURL = SMTHHelper.preprocessSMTHImageURL(downloadURL); if (downloadURL.contains("/nForum/att/")){ a.append("<br>" + downloadURL); } } } } // process pure post content // it's important to know that not all HTML tags are supported by Html.fromHtml, see the supported list String formattedPlainText = Html.fromHtml(content.html()).toString(); this.htmlContent = this.parsePostPureContentFormat(formattedPlainText); } public void mergePureContentAndLikes(){ htmlContentAndLikes = this.htmlContent; if (likes != null && likes.size() > 0) { StringBuilder wordList = new StringBuilder(); wordList.append("<br/><small><cite>"); for (String word : likes) { wordList.append(word).append("<br/>"); } wordList.append("</cite></small>"); htmlContentAndLikes += new String(wordList); } } // split complete content with ATTACHMENT_MARK private void parseContentToSegments() { if (mSegments == null) { mSegments = new ArrayList<>(); } mSegments.clear(); if (attachFiles == null || attachFiles.size() == 0) { // no attachment, add all content as one segment mSegments.add(new ContentSegment(ContentSegment.SEGMENT_TEXT, htmlContentAndLikes)); } else { // when there are attachments here, separate them one by one String[] segments = htmlContentAndLikes.split(ATTACHMENT_MARK); // add segments and attachments together int attachIndex = 0; for (String segment : segments) { // Log.d("Splited Result:", String.format("{%s}", segment)); // add segment to results if it's not empty, // MARK are seperated by several <br />, we should skip these seperated text if (!StringUtils.isEmptyString(segment) || attachIndex == 0) { // since we expect there will always be a textview before imageview // even the first text segment is empty, we still add it mSegments.add(new ContentSegment(ContentSegment.SEGMENT_TEXT, segment)); } // add next image attachment to results if (attachFiles != null && attachIndex < attachFiles.size()) { Attachment attach = attachFiles.get(attachIndex); String imageURL = null; if (Settings.getInstance().isLoadOriginalImage()) { imageURL = attach.getOriginalImageSource(); } else { imageURL = attach.getResizedImageSource(); } ContentSegment img = new ContentSegment(ContentSegment.SEGMENT_IMAGE, imageURL); img.setImgIndex(attachIndex); mSegments.add(img); } attachIndex++; } } // Log.d("ContentSegment", String.format("Total segments here: %d", mSegments.size())); // for (ContentSegment content : mSegments) { // if (content.getType() == ContentSegment.SEGMENT_IMAGE) { // Log.d("ContentSegment", String.format("Image %s, index = %d", content.getUrl(), content.getImgIndex())); // } else if (content.getType() == ContentSegment.SEGMENT_TEXT) { // Log.d("ContentSegment", String.format("Text, {%s}", content.getSpanned().toString())); } /* the expected input is formatted plain text, no html tag is expected line break by \n, but not <br> */ private String parsePostPureContentFormat(String content) { // &nbsp; is converted as code=160, but not a whitespace (ascii=32) content = content.replace(String.valueOf((char) 160), " "); // it's important to know that not all HTML tags are supported by Html.fromHtml, see the supported list String[] lines = content.split("\n"); // find signature start line int signatureStartLine = -1; for (int i = lines.length - 1; i >= 0; i String line = lines[i]; if (line.startsWith("--") && line.length() <= 3) { // find the first "--" from the last to the first signatureStartLine = i; break; } } // process content line by line StringBuilder sb = new StringBuilder(); int linebreak = 0; int signatureMode = 0; for (int i = 0; i < lines.length; i++) { String line = lines[i]; if ((line.startsWith(":") || line.startsWith(":")) && i <= 3) { // find nickname for author here, skip the line // : schower (schower), : WorkLife String nickName = StringUtils.subStringBetween(line, "(", ")"); if (nickName != null && nickName.length() > 0) { this.setNickName(nickName); } continue; } else if (line.startsWith(" :") && i <= 3) { // skip this line continue; } else if (line.startsWith(":") && i <= 3) { // find post date here, skip the line // <br /> : (Fri Mar 25 11:52:04 2016), line = StringUtils.subStringBetween(line, "(", ")"); SimpleDateFormat simpleFormat = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy", Locale.US); try { Date localdate = simpleFormat.parse(line); this.setDate(localdate); continue; } catch (ParseException e1) { e1.printStackTrace(); } } // handle ATTACH_MARK if (line.contains(ATTACHMENT_MARK)) { sb.append(line); continue; } // handle quoted content if (line.startsWith(":")) { line = "<font color=#00b4ae>" + line + "</font>"; sb.append(line).append("<br />"); continue; } if (line.trim().length() == 0) { linebreak++; if (linebreak >= 2) { // continuous linebreak, skip extra linebreak continue; } else { sb.append(line).append("<br />"); continue; } } else { // reset counter linebreak = 0; } // handle siguature // we have to make sure "--" is the last one, it might appear in post content body if (i == signatureStartLine) { // entering signature mode signatureMode = 1; sb.append(line).append("<br />"); continue; } if (line.contains("※ :·")) { // jump out of signature mode signatureMode = 0; line = line.replace("·", "") .replace("http: .replace("http://m.newsmth.net", "") .replace("http://newsmth.net", "") .replace("newsmth.net", "") .replace("m.newsmth.net", "") .replace("", "") .replace("", ""); line = "<font color=#727272>" + StringUtils.lookupIPLocation(line) + "</font>"; sb.append(line).append("<br />"); continue; } else if (line.contains("※ :·")) { // jump out of signature mode signatureMode = 0; line = line.replace("·", "").replace("", ""); line = "<font color=#727272>" + StringUtils.lookupIPLocation(line) + "</font>"; sb.append(line).append("<br />"); continue; } // after handle last part of post content, if it's still in signature mode, add signature if (signatureMode == 1) { line = "<small><font color=#727272>" + line + "</font></small>"; sb.append(line).append("<br />"); continue; } // for other normal line, add it directly sb.append(line).append("<br />"); } return sb.toString().trim(); } public List<ContentSegment> getContentSegments() { return mSegments; } // used by copy post content menu, or quoted content while replying public String getRawContent() { return Html.fromHtml(this.htmlContent.replace(ATTACHMENT_MARK, "")).toString(); } // this method will be called when post content can't be fetched properly // error message is returned in this case public void setRawContent(String rawContent) { this.htmlContent = rawContent; this.htmlContentAndLikes = rawContent; parseContentToSegments(); } public void addAttachFile(Attachment attach) { if (attachFiles == null) { attachFiles = new ArrayList<>(); } if (attach != null) { attachFiles.add(attach); } } public List<Attachment> getAttachFiles() { return attachFiles; } @Override public String toString() { return "Post{" + "postID='" + postID + '\'' + ", title='" + title + '\'' + ", author='" + author + '\'' + ", nickName='" + nickName + '\'' + ", date=" + date + ", position='" + position + '\'' + '}'; } }
package me.devsaki.hentoid; import android.annotation.SuppressLint; import android.app.SearchManager; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v4.view.MenuItemCompat; import android.support.v7.widget.SearchView; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.InputMethodManager; import android.widget.Button; import android.widget.ImageButton; import android.widget.ListView; import android.widget.Toast; import java.util.ArrayList; import java.util.List; import me.devsaki.hentoid.adapters.ContentAdapter; import me.devsaki.hentoid.components.HentoidActivity; import me.devsaki.hentoid.components.HentoidFragment; import me.devsaki.hentoid.database.domains.Content; import me.devsaki.hentoid.util.Constants; import me.devsaki.hentoid.util.ConstantsPreferences; public class DownloadsActivity extends HentoidActivity<DownloadsActivity.DownloadsFragment> { private static final String TAG = DownloadsActivity.class.getName(); private SearchView searchView; @Override protected DownloadsFragment buildFragment() { return new DownloadsFragment(); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_content_list, menu); // Associate searchable configuration with the SearchView SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE); searchView = (SearchView) MenuItemCompat.getActionView(menu.findItem(R.id.action_search)); searchView.setSearchableInfo(searchManager.getSearchableInfo(getComponentName())); searchView.setIconifiedByDefault(true); searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String s) { InputMethodManager imm = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(searchView.getWindowToken(), 0); getFragment().setQuery(s.trim()); getFragment().searchContent(); return true; } @Override public boolean onQueryTextChange(String s) { getFragment().setQuery(s.trim()); getFragment().searchContent(); return true; } }); if (getFragment().order == 0) { menu.getItem(1).setVisible(false); menu.getItem(2).setVisible(true); } else { menu.getItem(1).setVisible(true); menu.getItem(2).setVisible(false); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_order_alphabetic) { getFragment().order = ConstantsPreferences.PREF_ORDER_CONTENT_ALPHABETIC; getFragment().searchContent(); invalidateOptionsMenu(); return true; } else if (id == R.id.action_order_by_date) { getFragment().order = ConstantsPreferences.PREF_ORDER_CONTENT_BY_DATE; getFragment().searchContent(); invalidateOptionsMenu(); return true; } return super.onOptionsItemSelected(item); } public static class DownloadsFragment extends HentoidFragment { private static String query = ""; private Toast mToast; private int currentPage = 1; private int prevPage = 0; private int qtyPages; private int order; private Button btnPage; private List<Content> contents; private int index = -1; public void setQuery(String query) { DownloadsFragment.query = query; currentPage = 1; } @Override public void onResume() { super.onResume(); int tempIndex = index; searchContent(); ListView list = getListView(); if (tempIndex > -1) list.setSelectionFromTop(tempIndex, 0); } @SuppressLint("ShowToast") @Override public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_downloads, container, false); qtyPages = Integer.parseInt(getSharedPreferences() .getString(ConstantsPreferences.PREF_QUANTITY_PER_PAGE_LISTS, ConstantsPreferences.PREF_QUANTITY_PER_PAGE_DEFAULT + "")); order = getSharedPreferences() .getInt(ConstantsPreferences.PREF_ORDER_CONTENT_LISTS, ConstantsPreferences.PREF_ORDER_CONTENT_BY_DATE); // Initialize toast if needed if (mToast == null) { mToast = Toast.makeText(getActivity(), "", Toast.LENGTH_SHORT); } btnPage = (Button) rootView.findViewById(R.id.btnPage); ImageButton btnRefresh = (ImageButton) rootView.findViewById(R.id.btnRefresh); ImageButton btnNext = (ImageButton) rootView.findViewById(R.id.btnNext); ImageButton btnPrevious = (ImageButton) rootView.findViewById(R.id.btnPrevious); btnRefresh.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { searchContent(); } }); btnNext.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (qtyPages <= 0) { mToast.setText(R.string.not_limit_per_page); mToast.show(); } else { currentPage++; if (!searchContent()) { btnPage.setText("" + --currentPage); mToast.setText(R.string.not_next_page); mToast.show(); } } } }); btnPrevious.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (currentPage > 1) { currentPage searchContent(); } else if (qtyPages > 0) { mToast.setText(R.string.not_previous_page); mToast.show(); } else { mToast.setText(R.string.not_limit_per_page); mToast.show(); } } }); String settingDir = getSharedPreferences().getString(Constants.SETTINGS_FOLDER, ""); if (settingDir.isEmpty()) { Intent intent = new Intent(getActivity(), SelectFolderActivity.class); startActivity(intent); getActivity().finish(); } else searchContent(); return rootView; } @Override public void onPause() { SharedPreferences.Editor editor = getSharedPreferences().edit(); editor.putInt(ConstantsPreferences.PREF_ORDER_CONTENT_LISTS, order).apply(); ListView list = getListView(); index = list.getFirstVisiblePosition(); super.onPause(); } private boolean searchContent() { index = -1; List<Content> result = getDB() .selectContentByQuery(query, currentPage, qtyPages, order == ConstantsPreferences.PREF_ORDER_CONTENT_ALPHABETIC); if (result != null && !result.isEmpty()) contents = result; else if (contents == null) contents = new ArrayList<>(0); if (query.isEmpty()) { getActivity().setTitle(R.string.title_activity_downloads); } else { getActivity().setTitle(getResources() .getString(R.string.title_activity_search) .replace("@search", query)); } if (contents == result || contents.isEmpty()) { ContentAdapter adapter = new ContentAdapter(getActivity(), contents); setListAdapter(adapter); } if (prevPage != currentPage) { btnPage.setText("" + currentPage); } prevPage = currentPage; return result != null && !result.isEmpty(); } } }
package nl.davinci.davinciquest; import android.Manifest; import android.app.AlertDialog; import android.app.Dialog; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.Color; import android.location.Location; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.preference.PreferenceManager; import android.support.design.widget.FloatingActionButton; import android.support.v4.app.ActivityCompat; import android.support.v4.app.FragmentActivity; import android.support.v4.content.ContextCompat; import android.text.InputFilter; import android.text.InputType; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.ImageView; import android.widget.RadioButton; import android.widget.RadioGroup; import android.widget.TextView; import com.google.android.gms.appindexing.Action; import com.google.android.gms.appindexing.AppIndex; import com.google.android.gms.appindexing.Thing; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.location.LocationServices; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.OnMapReadyCallback; import com.google.android.gms.maps.SupportMapFragment; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.CameraPosition; import com.google.android.gms.maps.model.Circle; import com.google.android.gms.maps.model.CircleOptions; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.MarkerOptions; import com.google.android.gms.maps.model.Polygon; import com.google.android.gms.maps.model.PolygonOptions; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.BufferedReader; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; import nl.davinci.davinciquest.Controllers.LocationUserController; import nl.davinci.davinciquest.Controllers.QuestController; import nl.davinci.davinciquest.Controllers.QuestUserController; import nl.davinci.davinciquest.Entity.LocationUser; import nl.davinci.davinciquest.Entity.Marker; import nl.davinci.davinciquest.Entity.Quest; public class MapsActivity extends FragmentActivity implements OnMapReadyCallback ,GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener { private GoogleMap mMap; GoogleApiClient mGoogleApiClient; Button markerButton, speurtochtButton, answerButton; FloatingActionButton qrButton, startButton; int markerCount = 1; ArrayList<Marker> markerLocations; int speurtochtId, user_id; Quest quest = new Quest(); ArrayList<Quest> userQuestList = new ArrayList<>(); QuestUserController questUserController = new QuestUserController(); TextView questionText; RadioGroup answerRadioGroup; RadioButton answerRadio1, answerRadio2, answerRadio3, answerRadio4; String correctAnswer; @Override protected void onCreate(Bundle savedInstanceState) { ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_FINE_LOCATION}, 1); super.onCreate(savedInstanceState); setContentView(R.layout.activity_maps); Bundle extras = getIntent().getExtras(); speurtochtId = extras.getInt("id"); GetQuestData(speurtochtId); user_id = extras.getInt("user_id"); // Obtain the SupportMapFragment and get notified when the map is ready to be used. SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager() .findFragmentById(R.id.map); // ATTENTION: This "addApi(AppIndex.API)"was auto-generated to implement the App Indexing API. mGoogleApiClient = new GoogleApiClient.Builder(this) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .addApi(LocationServices.API).addApi(AppIndex.API).build(); mapFragment.getMapAsync(this); AddButtonOnClickListeners(); if (speurtochtId > 0) { GetSpeurtochtJsonData gs = new GetSpeurtochtJsonData(); gs.execute("http: } } /** * Manipulates the map once available. * This callback is triggered when the map is ready to be used. * This is where we can add markers or lines, add listeners or move the camera. In this case, * we just add a marker near Sydney, Australia. * If Google Play services is not installed on the device, the user will be prompted to install * it inside the SupportMapFragment. This method will only be triggered once the user has * installed Google Play services and returned to the app. */ @Override public void onMapReady(GoogleMap googleMap) { mMap = googleMap; if (ContextCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) { mMap.setMyLocationEnabled(true); } mMap.setOnMarkerClickListener(new GoogleMap.OnMarkerClickListener() { @Override public boolean onMarkerClick(final com.google.android.gms.maps.model.Marker marker) { final Dialog dialog = new Dialog(MapsActivity.this); dialog.setContentView(R.layout.custom_marker_dialog); dialog.setTitle(marker.getTitle()); ImageView img = (ImageView) dialog.findViewById(R.id.custom_dialog_image); img.setImageResource(R.drawable.paardenbloem); TextView infoTextView = (TextView) dialog.findViewById(R.id.custom_dialog_info); infoTextView.setText(marker.getSnippet()); questionText = (TextView) dialog.findViewById(R.id.QuestionText); answerRadio1 = (RadioButton) dialog.findViewById(R.id.answerRadio1); answerRadio2 = (RadioButton) dialog.findViewById(R.id.answerRadio2); answerRadio3 = (RadioButton) dialog.findViewById(R.id.answerRadio3); answerRadio4 = (RadioButton) dialog.findViewById(R.id.answerRadio4); answerButton = (Button) dialog.findViewById(R.id.answerButton); answerButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Marker currentLocation = (Marker) marker.getTag(); answerRadioGroup = (RadioGroup) dialog.findViewById(R.id.answerRadioGroup); int selectedRadiobuttonId = answerRadioGroup.getCheckedRadioButtonId(); RadioButton selectedRadioButton = (RadioButton) answerRadioGroup.findViewById(selectedRadiobuttonId); String answer = selectedRadioButton.getText().toString(); LocationUser locationUser = new LocationUser(); locationUser.setUser_id(PreferenceManager.getDefaultSharedPreferences(getApplicationContext()).getInt("user_id", 0)); locationUser.setLocation_id(currentLocation.getId()); LocationUserController locationUserController = new LocationUserController(); if (correctAnswer.equals(answer)) { locationUser.setAnswered_correct(1); marker.setIcon(BitmapDescriptorFactory.fromResource(R.drawable.greenmarkersmall)); } else { locationUser.setAnswered_correct(0); marker.setIcon(BitmapDescriptorFactory.fromResource(R.drawable.greymarkersmall)); } locationUserController.postLocationUser(locationUser); dialog.cancel(); } }); Marker m =(Marker) marker.getTag(); if(m != null) { int vraagId = m.getVraag_id(); GetQuestion getq = new GetQuestion(); getq.execute("http: } dialog.show(); return true; } }); } @Override public void onConnected(Bundle connectionHint) { //draw the circle around the current location // LatLng currentLocation = GetCurrentLocation(); // Circle circle = mMap.addCircle(new CircleOptions() // .center(new LatLng(currentLocation.latitude, currentLocation.longitude)) // .radius(500) // .strokeColor(Color.RED) // .fillColor(Color.TRANSPARENT)); // Instantiates a new Polygon object and adds points to define a rectangle PolygonOptions rectOptions = new PolygonOptions() .add(new LatLng(51.80185467344209, 4.680642485618591), new LatLng(51.799180878825474, 4.678325057029724), new LatLng(51.79726334535511, 4.677445292472839), new LatLng(51.79665953765794, 4.679537415504456), new LatLng(51.797814064006644, 4.685030579566956), new LatLng(51.80013629759001,4.685245156288147)); rectOptions.strokeColor(Color.RED); // Get back the mutable Polygon Polygon polygon = mMap.addPolygon(rectOptions); //use Handler to start zoom function after 3 seconds final Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { ZoomCameraToCurrentPosition(); } }, 1500); } public void AddButtonOnClickListeners() { // markerButton = (Button) findViewById(R.id.currentLocMarkerButton); // markerButton.setOnClickListener(new View.OnClickListener() { // @Override // public void onClick(View view) // SetMarkerAtCurrentLocation(); qrButton = (FloatingActionButton) findViewById(R.id.floatingQRbutton); qrButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent i = new Intent(getApplicationContext(),QRScanActivity.class); startActivity(i); } }); startButton = (FloatingActionButton) findViewById(R.id.floatingStartButton); startButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { PostKoppelTochtUser pktu = new PostKoppelTochtUser(); pktu.execute("http: mMap.clear(); PlaceMarkers(); } }); } public void ZoomCameraToCurrentPosition() { LatLng currentPos = GetCurrentLocation(); CameraPosition cameraPosition = new CameraPosition.Builder() .target(currentPos) // Sets the center of the map to location user .zoom(16) // Sets the zoom .bearing(0) // Sets the orientation of the camera to north .tilt(30) // Sets the tilt of the camera to 30 degrees .build(); // Creates a CameraPosition from the builder mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); } //Gets all data from the quest trough the quest controller and puts it in a global variable public void GetQuestData(int questId) { QuestController questController = new QuestController(); quest = questController.getQuest(questId); } public void PlaceMarkers() { Boolean started = false; userQuestList = questUserController.getQuestByUserId(user_id); for (int i = 0; i < userQuestList.size(); i++) { if (userQuestList.get(i).getId() == quest.getId()) { started = true; } } for (int i = 0; i < markerLocations.size(); i++) { MarkerOptions options = new MarkerOptions(); LatLng markerPos = new LatLng(markerLocations.get(i).getLatitude(),markerLocations.get(i).getLongitude()); options.position(markerPos); options.title(markerLocations.get(i).getName()); options.snippet(markerLocations.get(i).getInfo()); if (started) { options.icon(BitmapDescriptorFactory.fromResource(R.drawable.redmarkersmall)); } else { options.icon(BitmapDescriptorFactory.fromResource(R.drawable.greymarkersmall)); } Marker markerEntity = new Marker(); markerEntity.setVraag_id(markerLocations.get(i).getVraag_id()); markerEntity.setId(markerLocations.get(i).getId()); markerEntity.setInfo(markerLocations.get(i).getInfo()); markerEntity.setLatitude(markerLocations.get(i).getLatitude()); markerEntity.setLongitude(markerLocations.get(i).getLongitude()); markerEntity.setName(markerLocations.get(i).getName()); com.google.android.gms.maps.model.Marker m = mMap.addMarker(options); m.setTag(markerEntity); } } //Sets marker at the users current location public void SetMarkerAtCurrentLocation() { if (ContextCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) { //store last known location Location currentLocation = LocationServices.FusedLocationApi.getLastLocation(mGoogleApiClient); //create a new marker MarkerOptions options = new MarkerOptions(); LatLng pos = new LatLng(currentLocation.getLatitude(),currentLocation.getLongitude()); options.position(pos); options.title("marker #" + markerCount); options.draggable(true); options.snippet("dit is een snippet"); options.icon(BitmapDescriptorFactory.fromResource(R.drawable.redmarkersmall)); mMap.addMarker(options); markerCount++; } } public LatLng GetCurrentLocation() { LatLng pos = null; if (ContextCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) { Location currentLocation = LocationServices.FusedLocationApi.getLastLocation(mGoogleApiClient); pos = new LatLng(currentLocation.getLatitude(),currentLocation.getLongitude()); } return pos; } protected void onStart() { mGoogleApiClient.connect(); super.onStart(); // ATTENTION: This was auto-generated to implement the App Indexing API. AppIndex.AppIndexApi.start(mGoogleApiClient, getIndexApiAction()); } protected void onStop() { mGoogleApiClient.disconnect(); super.onStop();// ATTENTION: This was auto-generated to implement the App Indexing API. AppIndex.AppIndexApi.end(mGoogleApiClient, getIndexApiAction()); } @Override public void onConnectionSuspended(int cause) { } @Override public void onConnectionFailed(ConnectionResult result) { } public Action getIndexApiAction() { Thing object = new Thing.Builder() .setName("Maps Page") // TODO: Define a title for the content shown. // TODO: Make sure this auto-generated URL is correct. .setUrl(Uri.parse("http://[ENTER-YOUR-URL-HERE]")) .build(); return new Action.Builder(Action.TYPE_VIEW) .setObject(object) .setActionStatus(Action.STATUS_TYPE_COMPLETED) .build(); } public class GetSpeurtochtJsonData extends AsyncTask<String, String, ArrayList> { @Override protected ArrayList doInBackground(String... urlString) { ArrayList locations = new ArrayList<Marker>(); try { URL url = new URL(urlString[0]); HttpURLConnection urlConnection = (HttpURLConnection) url.openConnection(); urlConnection.setRequestMethod("GET"); urlConnection.connect(); BufferedReader bufferedReader = new BufferedReader( new InputStreamReader(urlConnection.getInputStream())); String next; while ((next = bufferedReader.readLine()) != null) { JSONArray ja = new JSONArray(next); for (int i = 0; i < ja.length(); i++) { JSONObject jo = (JSONObject) ja.get(i); Marker marker = new Marker(); marker.setId(Integer.parseInt(jo.getString("id"))); marker.setLatitude(Double.parseDouble(jo.getString("latitude"))); marker.setLongitude(Double.parseDouble(jo.getString("longitude"))); marker.setName(jo.getString("name")); marker.setInfo(jo.getString("info")); marker.setVraag_id(Integer.parseInt(jo.getString("question_id"))); locations.add(marker); } } }catch(MalformedURLException e) { e.printStackTrace(); } catch(IOException e) { e.printStackTrace(); } catch(JSONException e) { e.printStackTrace(); } return locations; } @Override protected void onPreExecute() { super.onPreExecute(); } @Override protected void onPostExecute(ArrayList locations) { markerLocations = locations; PlaceMarkers(); } @Override protected void onProgressUpdate(String... values) { super.onProgressUpdate(values); } } public class PostKoppelTochtUser extends AsyncTask<String , Void ,String> { String server_response; @Override protected String doInBackground(String... strings) { URL url; HttpURLConnection urlConnection = null; try { url = new URL(strings[0]); urlConnection = (HttpURLConnection) url.openConnection(); urlConnection.setDoOutput(true); urlConnection.setDoInput(true); urlConnection.setRequestMethod("POST"); urlConnection.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); DataOutputStream wr = new DataOutputStream(urlConnection.getOutputStream ()); try { JSONObject obj = new JSONObject(); obj.put("tocht_id" , Integer.toString(speurtochtId)); obj.put("user_id" , Integer.toString(user_id)); //obj.put("started_bool", "1"); //obj.put("finished_bool", "0"); wr.writeBytes(obj.toString()); Log.e("JSON Input", obj.toString()); wr.flush(); wr.close(); } catch (JSONException ex) { ex.printStackTrace(); } urlConnection.connect(); int responseCode = urlConnection.getResponseCode(); if(responseCode == HttpURLConnection.HTTP_OK){ server_response = readStream(urlConnection.getInputStream()); } } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return null; } @Override protected void onPostExecute(String s) { super.onPostExecute(s); Log.e("Response", "" + server_response); } } public class GetQuestion extends AsyncTask<String, String, ArrayList<String>> { @Override protected ArrayList<String> doInBackground(String... urlString) { ArrayList<String> questionData = new ArrayList<>(); try { URL url = new URL(urlString[0]); HttpURLConnection urlConnection = (HttpURLConnection) url.openConnection(); urlConnection.setRequestMethod("GET"); urlConnection.connect(); BufferedReader bufferedReader = new BufferedReader( new InputStreamReader(urlConnection.getInputStream())); String next; while ((next = bufferedReader.readLine()) != null) { JSONObject jo = new JSONObject(next); questionData.add(jo.getString("vraag")); questionData.add(jo.getString("answer_1")); questionData.add(jo.getString("answer_2")); questionData.add(jo.getString("answer_3")); questionData.add(jo.getString("answer_4")); questionData.add(jo.getString("correct_answer")); } }catch(MalformedURLException e) { e.printStackTrace(); } catch(IOException e) { e.printStackTrace(); } catch(JSONException e) { e.printStackTrace(); } return questionData; } @Override protected void onPreExecute() { super.onPreExecute(); } @Override protected void onPostExecute( ArrayList<String> questionData) { questionText.setText((String)questionData.get(0)); answerRadio1.setText((String) questionData.get(1)); answerRadio2.setText((String) questionData.get(2)); answerRadio3.setText((String) questionData.get(3)); answerRadio4.setText((String) questionData.get(4)); correctAnswer = questionData.get(5); } @Override protected void onProgressUpdate(String... values) { super.onProgressUpdate(values); } } //turn the response from the server into a readable string public static String readStream(InputStream in) { BufferedReader reader = null; StringBuffer response = new StringBuffer(); try { reader = new BufferedReader(new InputStreamReader(in)); String line = ""; while ((line = reader.readLine()) != null) { response.append(line); } } catch (IOException e) { e.printStackTrace(); } finally { if (reader != null) { try { reader.close(); } catch (IOException e) { e.printStackTrace(); } } } return response.toString(); } }
package tw.com.akdg.thsrreceipt; import android.accounts.NetworkErrorException; import android.app.Activity; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.view.Menu; import android.view.MenuItem; import android.widget.TextView; import android.widget.Toast; import com.google.zxing.Result; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import me.dm7.barcodescanner.zxing.ZXingScannerView; public class MainActivity extends Activity implements ZXingScannerView.ResultHandler { private static final String TAG = MainActivity.class.getName(); public static final int MAIL_RESULT = 0; private boolean qrcodeLock = false; private ZXingScannerView mZXingScannerView; private Handler mHandler = new Handler(); private Receipt mReceipt; private final static SimpleDateFormat mDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mReceipt = new Receipt(this); mZXingScannerView = (ZXingScannerView) findViewById(R.id.view); } @Override protected void onResume() { super.onResume(); mZXingScannerView.setResultHandler(this); mZXingScannerView.startCamera(); } @Override protected void onPause() { super.onPause(); mZXingScannerView.stopCamera(); } private TextView mTextView; @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); MenuItem menuItem = menu.findItem(R.id.action_pdf_count); menuItem.setActionView(R.layout.action_num_message); mTextView = (TextView) menuItem.getActionView().findViewById(R.id.textView); updatePDFCount(); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_send_mail) { try { sendMail(new File(mReceipt.getZipFilePath())); } catch (IOException e) { e.printStackTrace(); } return true; } return super.onOptionsItemSelected(item); } private void sendMail(File file) { Intent intent = new Intent(Intent.ACTION_SEND); intent.putExtra(Intent.EXTRA_EMAIL, new String[]{ getPreferences(Context.MODE_PRIVATE).getString("RECEIVEMAIL", "")}); intent.setClassName("com.google.android.gm", "com.google.android.gm.ComposeActivityGmail"); intent.putExtra(Intent.EXTRA_SUBJECT, String.format("%s_%s", getString(R.string.mail_title), mDateFormat.format(System.currentTimeMillis()))); intent.putExtra(Intent.EXTRA_STREAM, Uri.parse("file://" + file.getAbsolutePath())); startActivityForResult(intent, MAIL_RESULT); } @Override public void handleResult(final Result result) { if (!qrcodeLock) { final String qrcode = result.getText(); if (qrcode.length() != 124) { mHandler.post(new Runnable() { @Override public void run() { Toast.makeText(MainActivity.this, "Please re-scan.", Toast.LENGTH_LONG).show(); } }); return; } final String pnr = qrcode.substring(13, 21); final String tid = qrcode.substring(0, 13); new AsyncTask<Void, Void, Void>() { ProgressDialog mDialog = new ProgressDialog(MainActivity.this); @Override protected void onPreExecute() { super.onPreExecute(); qrcodeLock = true; mHandler.post(new Runnable() { @Override public void run() { mDialog.setMessage(getString(R.string.download)); mDialog.show(); } }); } @Override protected Void doInBackground(Void... voids) { try { mReceipt.downloadReceipt(pnr, tid); } catch (IOException e) { e.printStackTrace(); } catch (NetworkErrorException e) { e.printStackTrace(); } return null; } @Override protected void onPostExecute(Void aVoid) { super.onPostExecute(aVoid); qrcodeLock = false; updatePDFCount(); mZXingScannerView.startCamera(); mDialog.dismiss(); } }.execute(); } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (resultCode == MAIL_RESULT) { File filePDFDir = getDir(Receipt.PDF_DIR_NAME, Context.MODE_PRIVATE); for (File file : filePDFDir.listFiles()) { file.delete(); } updatePDFCount(); } } private void updatePDFCount() { mHandler.post(new Runnable() { @Override public void run() { File filePDFDir = getDir(Receipt.PDF_DIR_NAME, Context.MODE_PRIVATE); mTextView.setText(String.format("%d", filePDFDir.listFiles().length)); } }); } }
package org.commcare.android.view.c3; import org.commcare.android.util.InvalidStateException; import org.commcare.suite.model.graph.Graph; import org.commcare.suite.model.graph.GraphData; import org.commcare.suite.model.graph.SeriesData; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.Iterator; public class AxisConfiguration extends Configuration { public AxisConfiguration(GraphData data) throws JSONException, InvalidStateException { super(data); JSONObject x = getAxis("x"); JSONObject y = getAxis("y"); JSONObject y2 = getAxis("secondary-y"); if (mData.getType().equals(Graph.TYPE_TIME)) { x.put("type", "timeseries"); } // Display secondary y axis, regardless of if it has data; this makes the // whitespace around the graph look more reasonable. X and primary Y axis show by default. y2.put("show", true); mConfiguration.put("x", x); mConfiguration.put("y", y); mConfiguration.put("y2", y2); // Bar graphs may be rotated. C3 defaults to vertical bars. if (mData.getType().equals(Graph.TYPE_BAR) && !mData.getConfiguration("bar-orientation", "horizontal").equalsIgnoreCase("vertical")) { mConfiguration.put("rotated", true); } } /** * Add min and max bounds to given axis. * @param axis Current axis configuration. Will be modified. * @param prefix Prefix for commcare model's configuration: "x", "y", or "secondary-y" */ private void addBounds(JSONObject axis, String prefix) throws InvalidStateException, JSONException { addBound(axis, prefix, "min"); addBound(axis, prefix, "max"); } /** * Add min or max bound to given axis. * @param axis Current axis configuratoin. Will be modified. * @param prefix Prefix for commcare model's configuration: "x", "y", or "secondary-y" * @param suffix "min" or "max" */ private void addBound(JSONObject axis, String prefix, String suffix) throws JSONException, InvalidStateException { String key = prefix + "-" + suffix; String value = mData.getConfiguration(key); if (value != null) { if (prefix.equals("x") && mData.getType().equals(Graph.TYPE_TIME)) { axis.put(suffix, parseTime(value, key)); } else { axis.put(suffix, parseDouble(value, key)); } } } /** * Configure tick count, placement, and labels. * @param axis Current axis configuration. Will be modified. * @param key One of "x-labels", "y-labels", "secondary-y-labels" * @param varName If the axis uses a hash of labels (position => label), a variable * will be created with this name to store those labels. */ private void addTickConfig(JSONObject axis, String key, String varName) throws InvalidStateException, JSONException { // The labels configuration might be a JSON array of numbers, // a JSON object of number => string, or a single number String labelString = mData.getConfiguration(key); JSONObject tick = new JSONObject(); boolean usingCustomText = false; mVariables.put(varName, "{}"); if (labelString != null) { try { // Array: label each given value JSONArray labels = new JSONArray(labelString); JSONArray values = new JSONArray(); for (int i = 0; i < labels.length(); i++) { String xValue = labels.getString(i); if (mData.getType().equals(Graph.TYPE_TIME)) { values.put(parseTime(xValue, key)); } else { values.put(parseDouble(xValue, key)); } } tick.put("values", values); } catch (JSONException je) { // Assume try block failed because labelString isn't an array. // Try parsing it as an object. try { // Object: each key is a location on the axis, // and the value is text with which to label it JSONObject labels = new JSONObject(labelString); JSONArray values = new JSONArray(); Iterator i = labels.keys(); while (i.hasNext()) { String location = (String)i.next(); if (mData.getType().equals(Graph.TYPE_TIME)) { values.put(parseTime(location, key)); } else { values.put(parseDouble(location, key)); } } tick.put("values", values); mVariables.put(varName, labels.toString()); usingCustomText = true; } catch (JSONException e) { // Assume labelString is just a scalar, which // represents the number of labels the user wants. tick.put("count", Integer.valueOf(labelString)); } } } if (key.startsWith("x") && !usingCustomText && mData.getType().equals(Graph.TYPE_TIME)) { tick.put("format", mData.getConfiguration("x-labels-time-format", "%Y-%m-%d")); } if (key.startsWith("secondary-y")) { // If there aren't any series for the secondary y axis, don't label it boolean hasSecondaryAxis = false; for (SeriesData s : mData.getSeries()) { hasSecondaryAxis = hasSecondaryAxis || Boolean.valueOf(s.getConfiguration("secondary-y", "false")); if (hasSecondaryAxis) { break; } } if (!hasSecondaryAxis) { tick.put("values", new JSONArray()); } } if (tick.length() > 0) { axis.put("tick", tick); } } /** * Add title to axis. * @param axis Current axis configuration. Will be modified. * @param key One of "x-title", "y-title", "secondary-y-title" * @param position For horizontal axis, (inner|outer)-(right|center|left) * For vertical axis, (inner|outer)-(top|middle|bottom) */ private void addTitle(JSONObject axis, String key, String position) throws JSONException { String title = mData.getConfiguration(key, ""); // String.trim doesn't cover characters like unicode's non-breaking space title = title.replaceAll("^\\s*", ""); title = title.replaceAll("\\s*$", ""); // Show title regardless of whether or not it exists, to give all graphs consistent padding JSONObject label = new JSONObject(); label.put("text", title); label.put("position", position); axis.put("label", label); } /** * Generate axis configuration. * @param prefix Prefix for commcare model's configuration: "x", "y", or "secondary-y" * @return JSONObject representing the axis's configuration */ private JSONObject getAxis(String prefix) throws JSONException, InvalidStateException { final boolean showAxes = Boolean.valueOf(mData.getConfiguration("show-axes", "true")); if (!showAxes) { return new JSONObject("{ show: false }"); } JSONObject config = new JSONObject(); boolean isX = prefix.equals("x"); // Undo C3's automatic axis padding config.put("padding", new JSONObject("{top: 0, right: 0, bottom: 0, left: 0}")); addTitle(config, prefix + "-title", isX ? "outer-center" : "outer-middle"); addBounds(config, prefix); String jsPrefix = prefix.equals("secondary-y") ? "y2" : prefix; addTickConfig(config, prefix + "-labels", jsPrefix + "Labels"); return config; } }
package org.basex.query.func; import static org.basex.query.util.Err.*; import static org.basex.util.Token.*; import org.basex.query.*; import org.basex.query.expr.*; import org.basex.query.util.format.*; import org.basex.query.value.item.*; import org.basex.query.value.type.*; import org.basex.util.*; import org.basex.util.hash.*; public final class FNFormat extends StandardFunc { /** Pattern cache. */ private final TokenObjMap<FormatParser> formats = new TokenObjMap<FormatParser>(); /** * Constructor. * @param ii input info * @param f function definition * @param e arguments */ public FNFormat(final InputInfo ii, final Function f, final Expr... e) { super(ii, f, e); } @Override public Item item(final QueryContext ctx, final InputInfo ii) throws QueryException { switch(sig) { case FORMAT_INTEGER: return formatInteger(ctx); case FORMAT_NUMBER: return formatNumber(ctx); case FORMAT_DATETIME: return formatDate(AtomType.DTM, ctx); case FORMAT_DATE: return formatDate(AtomType.DAT, ctx); case FORMAT_TIME: return formatDate(AtomType.TIM, ctx); default: return super.item(ctx, ii); } } /** * Returns a formatted integer. * @param ctx query context * @return string * @throws QueryException query exception */ private Str formatInteger(final QueryContext ctx) throws QueryException { final byte[] pic = checkStr(expr[1], ctx); final byte[] lng = expr.length == 2 ? EMPTY : checkStr(expr[2], ctx); if(expr[0].isEmpty()) return Str.ZERO; final long num = checkItr(expr[0], ctx); FormatParser fp = formats.get(pic); if(fp == null) { fp = new IntFormat(pic, info); formats.put(pic, fp); } return Str.get(Formatter.get(lng).formatInt(num, fp)); } /** * Returns a formatted number. * @param ctx query context * @return string * @throws QueryException query exception */ private Str formatNumber(final QueryContext ctx) throws QueryException { // evaluate arguments Item it = expr[0].item(ctx, info); if(it == null) it = Dbl.NAN; else if(!it.type.isNumberOrUntyped()) number(this, it); // retrieve picture final byte[] pic = checkStr(expr[1], ctx); // retrieve format declaration final QNm frm = expr.length == 3 ? new QNm(trim(checkEStr(expr[2], ctx)), ctx) : new QNm(EMPTY); final DecFormatter df = ctx.sc.decFormats.get(frm.id()); if(df == null) throw FORMNUM.thrw(info, frm); return Str.get(df.format(info, it, pic)); } /** * Returns a formatted number. * @param ctx query context * @param tp input type * @return string * @throws QueryException query exception */ private Item formatDate(final Type tp, final QueryContext ctx) throws QueryException { final Item it = expr[0].item(ctx, info); final byte[] pic = checkEStr(expr[1], ctx); final byte[] lng = expr.length == 5 ? checkEStr(expr[2], ctx) : EMPTY; final byte[] cal = expr.length == 5 ? checkEStr(expr[3], ctx) : EMPTY; final byte[] plc = expr.length == 5 ? checkEStr(expr[4], ctx) : EMPTY; if(it == null) return null; final ADate date = (ADate) checkType(it, tp); final Formatter form = Formatter.get(lng); return Str.get(form.formatDate(date, lng, pic, cal, plc, info)); } }
package com.board.gd.config; import com.board.gd.auth.EmailAuthenticationProvider; import com.board.gd.domain.user.UserService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpMethod; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; @EnableWebSecurity public class SecurityConfig extends WebSecurityConfigurerAdapter { @Autowired private UserService userService; @Autowired private EmailAuthenticationProvider userAuthenticationProvider; @Autowired public void configAuthentication(AuthenticationManagerBuilder auth) throws Exception { auth .authenticationProvider(userAuthenticationProvider) .userDetailsService(userService).passwordEncoder(new BCryptPasswordEncoder()); } @Override public void configure(HttpSecurity http) throws Exception { http.csrf().disable() .authorizeRequests() .antMatchers("/users/login", "/users/signup", "/users/email", "/users/*/auth", "/users/data").access("permitAll")
package slimpleslickgame; import org.newdawn.slick.geom.Shape; import util.ColorSwitch; import client.GameEvent; import client.GameStatsEvents; public class OpponentPlayer extends Player { public OpponentPlayer(byte id, GameStatsEvents gse) { super(id, gse); } @Override public void update(int delta, Shape containerShape) { if(dead) return; GameEvent e; int score = 0; while ((e = gse.pop(id)) != null) { switch (e.getRole()) { case CREEP: { if (e.isAlive()) { super.creeps.put(e.getId(), new Creep(e.getPosition(), ColorSwitch.getColorFromId(e.getSendId()))); } else { super.creeps.remove(e.getId()); } break; } case PLAYER: { if (!e.isAlive()){ dead = true; } if(e.getPlayerHp() != -1){ stats.setHP(e.getPlayerHp()); } if (e.getPosition() != null) { super.position = e.getPosition(); } if (e.getDirection() != null) { // super.direction = e.getDirection(); } if (e.getScore() > 0) { score = e.getScore(); } super.updatePosition(containerShape); break; } case BULLET: { if (e.isAlive()) { super.gun.shoot(e.getPosition()); } else { super.gun.delete(e.getId()); } break; } } } for (Creep c : super.creeps.values()) { c.update(delta); } if(score > 0){ stats.update(delta, score); } super.gun.update(delta); } }
package io.sniffy; import com.codahale.metrics.Timer; import org.junit.Before; import org.junit.Test; import ru.yandex.qatools.allure.annotations.Features; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.Map; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.IntStream; import static org.junit.Assert.*; public class SniffyTest extends BaseTest { @Before public void clearSpies() { Sniffy.registeredSpies.clear(); Sniffy.currentThreadSpies.clear(); } @Test public void hasSpiesFromOtherThreads() throws Exception { try (@SuppressWarnings("unused") Spy spy = Sniffy.spy()) { AtomicBoolean hasSpies = new AtomicBoolean(); Thread thread = new Thread(() -> hasSpies.set(Sniffy.hasSpies())); thread.start(); thread.join(); assertTrue(hasSpies.get()); } } @Test public void hasNotSpiesFromOtherThreads() throws Exception { try (@SuppressWarnings("unused") CurrentThreadSpy spy = Sniffy.spyCurrentThread()) { AtomicBoolean hasSpies = new AtomicBoolean(); Thread thread = new Thread(() -> hasSpies.set(Sniffy.hasSpies())); thread.start(); thread.join(); assertFalse(hasSpies.get()); } } @Test public void testCurrentThreadSpy() throws Exception { CurrentThreadSpy spy = Sniffy.spyCurrentThread(); executeStatements(2); executeStatementsInOtherThread(3); assertEquals(2, spy.executedStatements()); assertEquals(1, spy.getExecutedStatements().size()); assertEquals(2, spy.getExecutedStatements().values().iterator().next().queries.get()); } @Test @Features("issues/292") public void testGetGlobalSqlStats() throws Exception { Sniffy.getGlobalSqlStats().clear(); executeStatements(3); ConcurrentMap<String, Timer> globalSqlStats = Sniffy.getGlobalSqlStats(); assertEquals(1, globalSqlStats.size()); Map.Entry<String, Timer> entry = globalSqlStats.entrySet().iterator().next(); assertEquals("SELECT 1 FROM DUAL", entry.getKey()); assertEquals(3, entry.getValue().getCount()); } @Test @Features("issues/292") public void testLruGlobalSqlStats() throws Exception { Sniffy.getGlobalSqlStats().clear(); for (int i = 0; i < Sniffy.TOP_SQL_CAPACITY; i++) { executeSelectStatements(i, 2 + i % 100); } assertEquals(Sniffy.TOP_SQL_CAPACITY, Sniffy.getGlobalSqlStats().size()); assertNotNull(Sniffy.getGlobalSqlStats().get(String.format("SELECT %d FROM DUAL", 0))); executeSelectStatements(Sniffy.TOP_SQL_CAPACITY + 1000, 1); assertEquals(Sniffy.TOP_SQL_CAPACITY, Sniffy.getGlobalSqlStats().size()); assertTrue(Sniffy.getGlobalSqlStats().containsKey(String.format("SELECT %d FROM DUAL", Sniffy.TOP_SQL_CAPACITY + 1000))); assertTrue(Sniffy.getGlobalSqlStats().containsKey(String.format("SELECT %d FROM DUAL", 0))); assertFalse(Sniffy.getGlobalSqlStats().containsKey(String.format("SELECT %d FROM DUAL", 1))); } private void executeSelectStatements(int index, int count) throws SQLException { try (Connection connection = openConnection(); Statement statement = connection.createStatement()) { for (int i = 0; i < count; i++) { statement.execute(String.format("SELECT %d FROM DUAL", index)); } } } }
package net.fortuna.ical4j.data; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.text.ParseException; import net.fortuna.ical4j.model.Calendar; import net.fortuna.ical4j.model.Component; import net.fortuna.ical4j.model.ComponentFactory; import net.fortuna.ical4j.model.Parameter; import net.fortuna.ical4j.model.ParameterFactoryImpl; import net.fortuna.ical4j.model.Property; import net.fortuna.ical4j.model.PropertyFactoryImpl; import net.fortuna.ical4j.model.TimeZone; import net.fortuna.ical4j.model.TimeZoneRegistryFactory; import net.fortuna.ical4j.model.component.VEvent; import net.fortuna.ical4j.model.component.VTimeZone; import net.fortuna.ical4j.model.component.VToDo; import net.fortuna.ical4j.model.parameter.TzId; import net.fortuna.ical4j.model.property.DateListProperty; import net.fortuna.ical4j.model.property.DateProperty; import net.fortuna.ical4j.util.Constants; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Parses and builds an iCalendar model from an input stream. * Note that this class is not thread-safe. * * @version 2.0 * @author Ben Fortuna */ public class CalendarBuilder implements ContentHandler { private static final Charset DEFAULT_CHARSET = Charset.forName("UTF-8"); private static Log log = LogFactory.getLog(CalendarBuilder.class); private CalendarParser parser; private Calendar calendar; private Component component; private Component subComponent; private Property property; /** * Default constructor. */ public CalendarBuilder() { this(new CalendarParserImpl()); } /** * Constructs a new calendar builder using the specified * calendar parser. * @param parser a calendar parser used to parse calendar files */ public CalendarBuilder(final CalendarParser parser) { this.parser = parser; } /** * Builds an iCalendar model from the specified input stream. * * @param in * @return a calendar * @throws IOException * @throws ParserException */ public final Calendar build(final InputStream in) throws IOException, ParserException { return build(new InputStreamReader(in, DEFAULT_CHARSET)); } /** * Builds an iCalendar model from the specified reader. * An <code>UnfoldingReader</code> is applied to the specified * reader to ensure the data stream is correctly unfolded where * appropriate. * * @param in * @return a calendar * @throws IOException * @throws ParserException */ public final Calendar build(final Reader in) throws IOException, ParserException { UnfoldingReader uin = new UnfoldingReader(in); // re-initialise.. calendar = null; component = null; subComponent = null; property = null; parser.parse(uin, this); return calendar; } /* (non-Javadoc) * @see net.fortuna.ical4j.data.ContentHandler#endCalendar() */ public void endCalendar() { // do nothing.. } /* (non-Javadoc) * @see net.fortuna.ical4j.data.ContentHandler#endComponent(java.lang.String) */ public void endComponent(final String name) { if (component != null) { if (subComponent != null) { if (component instanceof VTimeZone) { ((VTimeZone) component).getObservances().add(subComponent); } else if (component instanceof VEvent) { ((VEvent) component).getAlarms().add(subComponent); } else if (component instanceof VToDo) { ((VToDo) component).getAlarms().add(subComponent); } subComponent = null; } else { calendar.getComponents().add(component); if (component instanceof VTimeZone) { // register the timezone for use with iCalendar objects.. TimeZoneRegistryFactory.getInstance().getRegistry().register(new TimeZone((VTimeZone) component)); } component = null; } } } /* (non-Javadoc) * @see net.fortuna.ical4j.data.ContentHandler#endProperty(java.lang.String) */ public void endProperty(final String name) { if (property != null) { // replace with a constant instance if applicable.. property = Constants.forProperty(property); if (component != null) { if (subComponent != null) { subComponent.getProperties().add(property); } else { component.getProperties().add(property); } } else if (calendar != null) { calendar.getProperties().add(property); } property = null; } } /* (non-Javadoc) * @see net.fortuna.ical4j.data.ContentHandler#parameter(java.lang.String, java.lang.String) */ public void parameter(final String name, final String value) throws URISyntaxException { if (property != null) { // parameter names are case-insensitive, but convert to upper case to simplify further processing Parameter param = ParameterFactoryImpl.getInstance().createParameter(name.toUpperCase(), value); property.getParameters().add(param); if (param instanceof TzId) { TimeZone timezone = TimeZoneRegistryFactory.getInstance().getRegistry().getTimeZone(param.getValue()); try { ((DateProperty) property).setTimeZone(timezone); } catch (Exception e) { try { ((DateListProperty) property).setTimeZone(timezone); } catch (Exception e2) { log.warn("Error setting timezone [" + param + "] on property [" + property.getName() + "]", e); } } } } } /* (non-Javadoc) * @see net.fortuna.ical4j.data.ContentHandler#propertyValue(java.lang.String) */ public void propertyValue(final String value) throws URISyntaxException, ParseException, IOException { if (property != null) { property.setValue(value); } } /* (non-Javadoc) * @see net.fortuna.ical4j.data.ContentHandler#startCalendar() */ public void startCalendar() { calendar = new Calendar(); } /* (non-Javadoc) * @see net.fortuna.ical4j.data.ContentHandler#startComponent(java.lang.String) */ public void startComponent(final String name) { if (component != null) { subComponent = ComponentFactory.getInstance().createComponent(name); } else { component = ComponentFactory.getInstance().createComponent(name); } } /* (non-Javadoc) * @see net.fortuna.ical4j.data.ContentHandler#startProperty(java.lang.String) */ public void startProperty(final String name) { // property names are case-insensitive, but convert to upper case to simplify further processing property = PropertyFactoryImpl.getInstance().createProperty(name.toUpperCase()); } }
package org.jasig.portal.services; import java.util.Vector; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.Properties; import org.jasig.portal.GenericPortalBean; import org.jasig.portal.security.IRole; import org.jasig.portal.security.IPerson; import org.jasig.portal.security.IAuthorization; import org.jasig.portal.security.IAuthorizationFactory; import org.jasig.portal.security.PortalSecurityException; /** * @author Bernie Durfee, bdurfee@interactivebusiness.com * @version $Revision$ */ public class Authorization { protected IAuthorization m_authorization = null; protected static String s_factoryName = null; protected static IAuthorizationFactory m_Factory = null; static { // Get the security properties file File secprops = new File(GenericPortalBean.getPortalBaseDir() + "properties" + File.separator + "security.properties"); // Get the properties from the security properties file Properties pr = new Properties(); try { pr.load(new FileInputStream(secprops)); // Look for our authorization factory and instantiate an instance of it or die trying. if ((s_factoryName = pr.getProperty("authorizationProvider")) == null) { LogService.instance().log(LogService.ERROR, new PortalSecurityException("AuthorizationProvider not specified or incorrect in security.properties")); } else { try { m_Factory = (IAuthorizationFactory)Class.forName(s_factoryName).newInstance(); } catch (Exception e) { LogService.instance().log(LogService.ERROR, new PortalSecurityException("Failed to instantiate " + s_factoryName)); } } } catch (IOException e) { LogService.instance().log(LogService.ERROR, new PortalSecurityException(e.getMessage())); } } /** * put your documentation comment here */ public Authorization () { // From our factory get an actual authorization instance m_authorization = m_Factory.getAuthorization(); } /** * put your documentation comment here * @param person * @param role * @return */ public boolean isUserInRole (IPerson person, IRole role) { return (m_authorization.isUserInRole(person, role)); } /** * put your documentation comment here * @return */ public Vector getAllRoles () { return (m_authorization.getAllRoles()); } /** * put your documentation comment here * @param channelID * @param roles * @return */ public int setChannelRoles (int channelID, Vector roles) { return (m_authorization.setChannelRoles(channelID, roles)); } /** * put your documentation comment here * @param person * @return */ public boolean canUserPublish (IPerson person) { return (m_authorization.canUserPublish(person)); } // For the subscribe mechanism to use public Vector getAuthorizedChannels (IPerson person) { return (m_authorization.getAuthorizedChannels(person)); } /** * put your documentation comment here * @param person * @param channelID * @return */ public boolean canUserSubscribe (IPerson person, int channelID) { return (m_authorization.canUserSubscribe(person, channelID)); } /** * put your documentation comment here * @param person * @param channelID * @return */ public boolean canUserRender (IPerson person, int channelID) { return (m_authorization.canUserRender(person, channelID)); } /** * put your documentation comment here * @param person * @param channelID * @return */ public Vector getChannelRoles (int channelID) { return (m_authorization.getChannelRoles(channelID)); } /** * put your documentation comment here * @param person * @return */ public Vector getUserRoles (IPerson person) { return (m_authorization.getUserRoles(person)); } /** * put your documentation comment here * @param person * @param roles */ public void addUserRoles (IPerson person, Vector roles) { m_authorization.addUserRoles(person, roles); } /** * put your documentation comment here * @param person * @param roles */ public void removeUserRoles (IPerson person, Vector roles) { m_authorization.removeUserRoles(person, roles); } }
package org.jfree.data.xy; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import org.jfree.chart.util.ObjectUtilities; import org.jfree.chart.util.PublicCloneable; import org.jfree.data.DomainInfo; import org.jfree.data.Range; import org.jfree.data.general.DatasetChangeEvent; import org.jfree.data.general.DatasetUtilities; import org.jfree.data.general.SeriesChangeEvent; /** * An {@link XYDataset} where every series shares the same x-values (required * for generating stacked area charts). */ public class DefaultTableXYDataset extends AbstractIntervalXYDataset implements TableXYDataset, IntervalXYDataset, DomainInfo, PublicCloneable { /** * Storage for the data - this list will contain zero, one or many * XYSeries objects. */ private List data = null; /** Storage for the x values. */ private HashSet xPoints = null; /** A flag that controls whether or not events are propogated. */ private boolean propagateEvents = true; /** A flag that controls auto pruning. */ private boolean autoPrune = false; /** The delegate used to control the interval width. */ private IntervalXYDelegate intervalDelegate; /** * Creates a new empty dataset. */ public DefaultTableXYDataset() { this(false); } /** * Creates a new empty dataset. * * @param autoPrune a flag that controls whether or not x-values are * removed whenever the corresponding y-values are all * <code>null</code>. */ public DefaultTableXYDataset(boolean autoPrune) { this.autoPrune = autoPrune; this.data = new ArrayList(); this.xPoints = new HashSet(); this.intervalDelegate = new IntervalXYDelegate(this, false); addChangeListener(this.intervalDelegate); } /** * Returns the flag that controls whether or not x-values are removed from * the dataset when the corresponding y-values are all <code>null</code>. * * @return A boolean. */ public boolean isAutoPrune() { return this.autoPrune; } /** * Adds a series to the collection and sends a {@link DatasetChangeEvent} * to all registered listeners. The series should be configured to NOT * allow duplicate x-values. * * @param series the series (<code>null</code> not permitted). */ public void addSeries(XYSeries series) { if (series == null) { throw new IllegalArgumentException("Null 'series' argument."); } if (series.getAllowDuplicateXValues()) { throw new IllegalArgumentException( "Cannot accept XYSeries that allow duplicate values. " + "Use XYSeries(seriesName, <sort>, false) constructor." ); } updateXPoints(series); this.data.add(series); series.addChangeListener(this); fireDatasetChanged(); } /** * Adds any unique x-values from 'series' to the dataset, and also adds any * x-values that are in the dataset but not in 'series' to the series. * * @param series the series (<code>null</code> not permitted). */ private void updateXPoints(XYSeries series) { if (series == null) { throw new IllegalArgumentException("Null 'series' not permitted."); } HashSet seriesXPoints = new HashSet(); boolean savedState = this.propagateEvents; this.propagateEvents = false; for (int itemNo = 0; itemNo < series.getItemCount(); itemNo++) { Number xValue = series.getX(itemNo); seriesXPoints.add(xValue); if (!this.xPoints.contains(xValue)) { this.xPoints.add(xValue); int seriesCount = this.data.size(); for (int seriesNo = 0; seriesNo < seriesCount; seriesNo++) { XYSeries dataSeries = (XYSeries) this.data.get(seriesNo); if (!dataSeries.equals(series)) { dataSeries.add(xValue, null); } } } } Iterator iterator = this.xPoints.iterator(); while (iterator.hasNext()) { Number xPoint = (Number) iterator.next(); if (!seriesXPoints.contains(xPoint)) { series.add(xPoint, null); } } this.propagateEvents = savedState; } /** * Updates the x-values for all the series in the dataset. */ public void updateXPoints() { this.propagateEvents = false; for (int s = 0; s < this.data.size(); s++) { updateXPoints((XYSeries) this.data.get(s)); } if (this.autoPrune) { prune(); } this.propagateEvents = true; } /** * Returns the number of series in the collection. * * @return The series count. */ public int getSeriesCount() { return this.data.size(); } /** * Returns the number of x values in the dataset. * * @return The number of x values in the dataset. */ public int getItemCount() { if (this.xPoints == null) { return 0; } else { return this.xPoints.size(); } } /** * Returns a series. * * @param series the series (zero-based index). * * @return The series (never <code>null</code>). */ public XYSeries getSeries(int series) { if ((series < 0) || (series >= getSeriesCount())) { throw new IllegalArgumentException("Index outside valid range."); } return (XYSeries) this.data.get(series); } /** * Returns the key for a series. * * @param series the series (zero-based index). * * @return The key for a series. */ public Comparable getSeriesKey(int series) { // check arguments...delegated return getSeries(series).getKey(); } /** * Returns the number of items in the specified series. * * @param series the series (zero-based index). * * @return The number of items in the specified series. */ public int getItemCount(int series) { // check arguments...delegated return getSeries(series).getItemCount(); } /** * Returns the x-value for the specified series and item. * * @param series the series (zero-based index). * @param item the item (zero-based index). * * @return The x-value for the specified series and item. */ public Number getX(int series, int item) { XYSeries s = (XYSeries) this.data.get(series); return s.getX(item); } /** * Returns the starting X value for the specified series and item. * * @param series the series (zero-based index). * @param item the item (zero-based index). * * @return The starting X value. */ public Number getStartX(int series, int item) { return this.intervalDelegate.getStartX(series, item); } /** * Returns the ending X value for the specified series and item. * * @param series the series (zero-based index). * @param item the item (zero-based index). * * @return The ending X value. */ public Number getEndX(int series, int item) { return this.intervalDelegate.getEndX(series, item); } /** * Returns the y-value for the specified series and item. * * @param series the series (zero-based index). * @param index the index of the item of interest (zero-based). * * @return The y-value for the specified series and item (possibly * <code>null</code>). */ public Number getY(int series, int index) { XYSeries s = (XYSeries) this.data.get(series); return s.getY(index); } /** * Returns the starting Y value for the specified series and item. * * @param series the series (zero-based index). * @param item the item (zero-based index). * * @return The starting Y value. */ public Number getStartY(int series, int item) { return getY(series, item); } /** * Returns the ending Y value for the specified series and item. * * @param series the series (zero-based index). * @param item the item (zero-based index). * * @return The ending Y value. */ public Number getEndY(int series, int item) { return getY(series, item); } /** * Removes all the series from the collection and sends a * {@link DatasetChangeEvent} to all registered listeners. */ public void removeAllSeries() { // Unregister the collection as a change listener to each series in // the collection. for (int i = 0; i < this.data.size(); i++) { XYSeries series = (XYSeries) this.data.get(i); series.removeChangeListener(this); } // Remove all the series from the collection and notify listeners. this.data.clear(); this.xPoints.clear(); fireDatasetChanged(); } /** * Removes a series from the collection and sends a * {@link DatasetChangeEvent} to all registered listeners. * * @param series the series (<code>null</code> not permitted). */ public void removeSeries(XYSeries series) { // check arguments... if (series == null) { throw new IllegalArgumentException("Null 'series' argument."); } // remove the series... if (this.data.contains(series)) { series.removeChangeListener(this); this.data.remove(series); if (this.data.size() == 0) { this.xPoints.clear(); } fireDatasetChanged(); } } /** * Removes a series from the collection and sends a * {@link DatasetChangeEvent} to all registered listeners. * * @param series the series (zero based index). */ public void removeSeries(int series) { // check arguments... if ((series < 0) || (series > getSeriesCount())) { throw new IllegalArgumentException("Index outside valid range."); } // fetch the series, remove the change listener, then remove the series. XYSeries s = (XYSeries) this.data.get(series); s.removeChangeListener(this); this.data.remove(series); if (this.data.size() == 0) { this.xPoints.clear(); } else if (this.autoPrune) { prune(); } fireDatasetChanged(); } /** * Removes the items from all series for a given x value. * * @param x the x-value. */ public void removeAllValuesForX(Number x) { if (x == null) { throw new IllegalArgumentException("Null 'x' argument."); } boolean savedState = this.propagateEvents; this.propagateEvents = false; for (int s = 0; s < this.data.size(); s++) { XYSeries series = (XYSeries) this.data.get(s); series.remove(x); } this.propagateEvents = savedState; this.xPoints.remove(x); fireDatasetChanged(); } /** * Returns <code>true</code> if all the y-values for the specified x-value * are <code>null</code> and <code>false</code> otherwise. * * @param x the x-value. * * @return A boolean. */ protected boolean canPrune(Number x) { for (int s = 0; s < this.data.size(); s++) { XYSeries series = (XYSeries) this.data.get(s); if (series.getY(series.indexOf(x)) != null) { return false; } } return true; } /** * Removes all x-values for which all the y-values are <code>null</code>. */ public void prune() { HashSet hs = (HashSet) this.xPoints.clone(); Iterator iterator = hs.iterator(); while (iterator.hasNext()) { Number x = (Number) iterator.next(); if (canPrune(x)) { removeAllValuesForX(x); } } } /** * This method receives notification when a series belonging to the dataset * changes. It responds by updating the x-points for the entire dataset * and sending a {@link DatasetChangeEvent} to all registered listeners. * * @param event information about the change. */ public void seriesChanged(SeriesChangeEvent event) { if (this.propagateEvents) { updateXPoints(); fireDatasetChanged(); } } /** * Tests this collection for equality with an arbitrary object. * * @param obj the object (<code>null</code> permitted). * * @return A boolean. */ public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof DefaultTableXYDataset)) { return false; } DefaultTableXYDataset that = (DefaultTableXYDataset) obj; if (this.autoPrune != that.autoPrune) { return false; } if (this.propagateEvents != that.propagateEvents) { return false; } if (!this.intervalDelegate.equals(that.intervalDelegate)) { return false; } if (!ObjectUtilities.equal(this.data, that.data)) { return false; } return true; } /** * Returns a hash code. * * @return A hash code. */ public int hashCode() { int result; result = (this.data != null ? this.data.hashCode() : 0); result = 29 * result + (this.xPoints != null ? this.xPoints.hashCode() : 0); result = 29 * result + (this.propagateEvents ? 1 : 0); result = 29 * result + (this.autoPrune ? 1 : 0); return result; } /** * Returns an independent copy of this dataset. * * @return A clone. * * @throws CloneNotSupportedException if there is some reason that cloning * cannot be performed. */ public Object clone() throws CloneNotSupportedException { DefaultTableXYDataset clone = (DefaultTableXYDataset) super.clone(); int seriesCount = this.data.size(); clone.data = new java.util.ArrayList(seriesCount); for (int i = 0; i < seriesCount; i++) { XYSeries series = (XYSeries) this.data.get(i); clone.data.add(series.clone()); } clone.intervalDelegate = new IntervalXYDelegate(clone); // need to configure the intervalDelegate to match the original clone.intervalDelegate.setFixedIntervalWidth(getIntervalWidth()); clone.intervalDelegate.setAutoWidth(isAutoWidth()); clone.intervalDelegate.setIntervalPositionFactor( getIntervalPositionFactor()); clone.updateXPoints(); return clone; } /** * Returns the minimum x-value in the dataset. * * @param includeInterval a flag that determines whether or not the * x-interval is taken into account. * * @return The minimum value. */ public double getDomainLowerBound(boolean includeInterval) { return this.intervalDelegate.getDomainLowerBound(includeInterval); } /** * Returns the maximum x-value in the dataset. * * @param includeInterval a flag that determines whether or not the * x-interval is taken into account. * * @return The maximum value. */ public double getDomainUpperBound(boolean includeInterval) { return this.intervalDelegate.getDomainUpperBound(includeInterval); } /** * Returns the range of the values in this dataset's domain. * * @param includeInterval a flag that determines whether or not the * x-interval is taken into account. * * @return The range. */ public Range getDomainBounds(boolean includeInterval) { if (includeInterval) { return this.intervalDelegate.getDomainBounds(includeInterval); } else { return DatasetUtilities.iterateDomainBounds(this, includeInterval); } } /** * Returns the interval position factor. * * @return The interval position factor. */ public double getIntervalPositionFactor() { return this.intervalDelegate.getIntervalPositionFactor(); } /** * Sets the interval position factor. Must be between 0.0 and 1.0 inclusive. * If the factor is 0.5, the gap is in the middle of the x values. If it * is lesser than 0.5, the gap is farther to the left and if greater than * 0.5 it gets farther to the right. * * @param d the new interval position factor. */ public void setIntervalPositionFactor(double d) { this.intervalDelegate.setIntervalPositionFactor(d); fireDatasetChanged(); } /** * returns the full interval width. * * @return The interval width to use. */ public double getIntervalWidth() { return this.intervalDelegate.getIntervalWidth(); } /** * Sets the interval width to a fixed value, and sends a * {@link DatasetChangeEvent} to all registered listeners. * * @param d the new interval width (must be > 0). */ public void setIntervalWidth(double d) { this.intervalDelegate.setFixedIntervalWidth(d); fireDatasetChanged(); } /** * Returns whether the interval width is automatically calculated or not. * * @return A flag that determines whether or not the interval width is * automatically calculated. */ public boolean isAutoWidth() { return this.intervalDelegate.isAutoWidth(); } /** * Sets the flag that indicates whether the interval width is automatically * calculated or not. * * @param b a boolean. */ public void setAutoWidth(boolean b) { this.intervalDelegate.setAutoWidth(b); fireDatasetChanged(); } }
package javax.enterprise.inject;
package ucar.unidata.test.util; import org.junit.Test; import ucar.nc2.util.DiskCache2; import java.io.File; /** * Test DiskCache2 * * @author caron * @since 7/21/2014 */ public class TestDiskCache { @Test public void testNotExist() throws Exception { DiskCache2 cache = DiskCache2.getDefault(); File file = cache.getFile("gfs.t00z.master.grbf00.10m.uv.grib2"); // not exist System.out.printf("canWrite= %s%n", file.canWrite()); assert !file.canWrite(); } public void testReletivePath() throws Exception { String org = System.getProperty("user.dir"); try { System.setProperty("user.dir", TestDir.cdmUnitTestDir); System.out.printf("user.dir = %s%n", System.getProperty("user.dir")); File pwd = new File(System.getProperty("user.dir")); String filename = "transforms/albers.nc"; File rel2 = new File(pwd, filename); System.out.printf("abs = %s%n", rel2.getCanonicalFile()); assert rel2.exists(); assert rel2.canWrite(); } finally { System.setProperty("user.dir", org); } } }
package org.jetel.component; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.lang.ref.WeakReference; import java.net.URL; import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; import java.nio.charset.Charset; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.dom4j.io.SAXContentHandler; import org.jetel.data.DataField; import org.jetel.data.DataRecord; import org.jetel.data.Defaults; import org.jetel.data.StringDataField; import org.jetel.data.sequence.Sequence; import org.jetel.exception.AttributeNotFoundException; import org.jetel.exception.BadDataFormatException; import org.jetel.exception.ComponentNotReadyException; import org.jetel.exception.ConfigurationProblem; import org.jetel.exception.ConfigurationStatus; import org.jetel.exception.ConfigurationStatus.Priority; import org.jetel.exception.ConfigurationStatus.Severity; import org.jetel.exception.JetelException; import org.jetel.exception.XMLConfigurationException; import org.jetel.graph.Node; import org.jetel.graph.OutputPort; import org.jetel.graph.Result; import org.jetel.graph.TransformationGraph; import org.jetel.metadata.DataFieldMetadata; import org.jetel.metadata.DataRecordMetadata; import org.jetel.sequence.PrimitiveSequence; import org.jetel.util.AutoFilling; import org.jetel.util.ReadableChannelIterator; import org.jetel.util.file.FileURLParser; import org.jetel.util.file.FileUtils; import org.jetel.util.property.ComponentXMLAttributes; import org.jetel.util.property.PropertyRefResolver; import org.jetel.util.property.RefResFlag; import org.jetel.util.string.StringUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; /** * <h3>XMLExtract Component</h3> * * <!-- Provides the logic to parse a xml file and filter to different ports based on * a matching element. The element and all children will be turned into a * Data record --> * * <table border="1"> * <th>Component:</th> * <tr><td><h4><i>Name:</i></h4></td> * <td>XMLExtract</td></tr> * <tr><td><h4><i>Category:</i></h4></td> * <td></td></tr> * <tr><td><h4><i>Description:</i></h4></td> * <td>Provides the logic to parse a xml file and filter to different ports based on * a matching element. The element and all children will be turned into a * Data record.</td></tr> * <tr><td><h4><i>Inputs:</i></h4></td> * <td>0</td></tr> * <tr><td><h4><i>Outputs:</i></h4></td> * <td>Output port[0] defined/connected. Depends on mapping definition.</td></tr> * <tr><td><h4><i>Comment:</i></h4></td> * <td></td></tr> * </table> * <br> * <table border="1"> * <th>XML attributes:</th> * <tr><td><b>type</b></td><td>"XML_EXTRACT"</td></tr> * <tr><td><b>id</b></td><td>component identification</td> * <tr><td><b>sourceUri</b></td><td>location of source XML data to process</td> * <tr><td><b>useNestedNodes</b></td><td><b>true</b> if nested unmapped XML elements will be used as data source; <b>false</b> if will be ignored</td> * <tr><td><b>mapping</b></td><td>&lt;mapping&gt;</td> * </tr> * </table> * * Provides the logic to parse a xml file and filter to different ports based on * a matching element. The element and all children will be turned into a * Data record.<br> * Mapping attribute contains mapping hierarchy in XML form. DTD of mapping:<br> * <code> * &lt;!ELEMENT Mappings (Mapping*)&gt;<br> * * &lt;!ELEMENT Mapping (Mapping*)&gt;<br> * &lt;!ATTLIST Mapping<br> * &nbsp;element NMTOKEN #REQUIRED<br> * &nbsp;&nbsp;//name of binded XML element<br> * &nbsp;outPort NMTOKEN #IMPLIED<br> * &nbsp;&nbsp;//name of output port for this mapped XML element<br> * &nbsp;parentKey NMTOKEN #IMPLIED<br> * &nbsp;&nbsp;//field name of parent record, which is copied into field of the current record<br> * &nbsp;&nbsp;//passed in generatedKey atrribute<br> * &nbsp;generatedKey NMTOKEN #IMPLIED<br> * &nbsp;&nbsp;//see parentKey comment<br> * &nbsp;sequenceField NMTOKEN #IMPLIED<br> * &nbsp;&nbsp;//field name, which will be filled by value from sequence<br> * &nbsp;&nbsp;//(can be used to generate new key field for relative records)<br> * &nbsp;sequenceId NMTOKEN #IMPLIED<br> * &nbsp;&nbsp;//we can supply sequence id used to fill a field defined in a sequenceField attribute<br> * &nbsp;&nbsp;//(if this attribute is omited, non-persistent PrimitiveSequence will be used)<br> * &nbsp;xmlFields NMTOKEN #IMPLIED<br> * &nbsp;&nbsp;//comma separeted xml element names, which will be mapped on appropriate record fields<br> * &nbsp;&nbsp;//defined in cloverFields attribute<br> * &nbsp;cloverFields NMTOKEN #IMPLIED<br> * &nbsp;&nbsp;//see xmlFields comment<br> * &gt;<br> * </code> * All nested XML elements will be recognized as record fields and mapped by name * (except elements serviced by other nested Mapping elements), if you prefere other mapping * xml fields and clover fields than 'by name', use xmlFields and cloveFields attributes * to setup custom fields mapping. 'useNestedNodes' component attribute defines * if also child of nested xml elements will be mapped on the current clover record. * Record from nested Mapping element could be connected via key fields with parent record produced * by parent Mapping element (see parentKey and generatedKey attribute notes). * In case that fields are unsuitable for key composing, extractor could fill * one or more fields with values comming from sequence (see sequenceField and sequenceId attribute). * * For example: given an xml file:<br> * <code> * &lt;myXML&gt; <br> * &nbsp;&lt;phrase&gt; <br> * &nbsp;&nbsp;&lt;text&gt;hello&lt;/text&gt; <br> * &nbsp;&nbsp;&lt;localization&gt; <br> * &nbsp;&nbsp;&nbsp;&lt;chinese&gt;how allo yee dew ying&lt;/chinese&gt; <br> * &nbsp;&nbsp;&nbsp;&lt;german&gt;wie gehts&lt;/german&gt; <br> * &nbsp;&nbsp;&lt;/localization&gt; <br> * &nbsp;&lt;/phrase&gt; <br> * &nbsp;&lt;locations&gt; <br> * &nbsp;&nbsp;&lt;location&gt; <br> * &nbsp;&nbsp;&nbsp;&lt;name&gt;Stormwind&lt;/name&gt; <br> * &nbsp;&nbsp;&nbsp;&lt;description&gt;Beautiful European architecture with a scenic canal system.&lt;/description&gt; <br> * &nbsp;&nbsp;&lt;/location&gt; <br> * &nbsp;&nbsp;&lt;location&gt; <br> * &nbsp;&nbsp;&nbsp;&lt;name&gt;Ironforge&lt;/name&gt; <br> * &nbsp;&nbsp;&nbsp;&lt;description&gt;Economic capital of the region with a high population density.&lt;/description&gt; <br> * &nbsp;&nbsp;&lt;/location&gt; <br> * &nbsp;&lt;/locations&gt; <br> * &nbsp;&lt;someUselessElement&gt;...&lt;/someUselessElement&gt; <br> * &nbsp;&lt;someOtherUselessElement/&gt; <br> * &nbsp;&lt;phrase&gt; <br> * &nbsp;&nbsp;&lt;text&gt;bye&lt;/text&gt; <br> * &nbsp;&nbsp;&lt;localization&gt; <br> * &nbsp;&nbsp;&nbsp;&lt;chinese&gt;she yee lai ta&lt;/chinese&gt; <br> * &nbsp;&nbsp;&nbsp;&lt;german&gt;aufweidersehen&lt;/german&gt; <br> * &nbsp;&nbsp;&lt;/localization&gt; <br> * &nbsp;&lt;/phrase&gt; <br> * &lt;/myXML&gt; <br> * </code> Suppose we want to pull out "phrase" as one datarecord, * "localization" as another datarecord, and "location" as the final datarecord * and ignore the useless elements. First we define the metadata for the * records. Then create the following mapping in the graph: <br> * <code> * &lt;node id="myId" type="com.lrn.etl.job.component.XMLExtract"&gt; <br> * &nbsp;&lt;attr name="mapping"&gt;<br> * &nbsp;&nbsp;&lt;Mapping element="phrase" outPort="0" sequenceField="id"&gt;<br> * &nbsp;&nbsp;&nbsp;&lt;Mapping element="localization" outPort="1" parentKey="id" generatedKey="parent_id"/&gt;<br> * &nbsp;&nbsp;&lt;/Mapping&gt; <br> * &nbsp;&nbsp;&lt;Mapping element="location" outPort="2"/&gt;<br> * &nbsp;&lt;/attr&gt;<br> * &lt;/node&gt;<br> * </code> Port 0 will get the DataRecords:<br> * 1) id=1, text=hello<br> * 2) id=2, text=bye<br> * Port 1 will get:<br> * 1) parent_id=1, chinese=how allo yee dew ying, german=wie gehts<br> * 2) parent_id=2, chinese=she yee lai ta, german=aufwiedersehen<br> * Port 2 will get:<br> * 1) name=Stormwind, description=Beautiful European architecture with a scenic * canal system.<br> * 2) name=Ironforge, description=Economic capital of the region with a high * population density.<br> * <hr> * Issue: Enclosing elements having values are not supported.<br> * i.e. <br> * <code> * &lt;x&gt; <br> * &lt;y&gt;z&lt;/y&gt;<br> * xValue<br> * &lt;/x&gt;<br> * </code> there will be no column x with value xValue.<br> * Issue: Namespaces are not considered.<br> * i.e. <br> * <code> * &lt;ns1:x&gt;xValue&lt;/ns1:x&gt;<br> * &lt;ns2:x&gt;xValue2&lt;/ns2:x&gt;<br> * </code> will be considered the same x. * * @author KKou */ public class XMLExtract extends Node { // Logger private static final Log LOG = LogFactory.getLog(XMLExtract.class); // xml attributes public static final String XML_SOURCEURI_ATTRIBUTE = "sourceUri"; private static final String XML_USENESTEDNODES_ATTRIBUTE = "useNestedNodes"; private static final String XML_MAPPING_ATTRIBUTE = "mapping"; private static final String XML_CHARSET_ATTRIBUTE = "charset"; // mapping attributes private static final String XML_MAPPING = "Mapping"; private final static String XML_MAPPING_URL_ATTRIBUTE = "mappingURL"; private static final String XML_ELEMENT = "element"; private static final String XML_OUTPORT = "outPort"; private static final String XML_PARENTKEY = "parentKey"; private static final String XML_GENERATEDKEY = "generatedKey"; private static final String XML_XMLFIELDS = "xmlFields"; private static final String XML_CLOVERFIELDS = "cloverFields"; private static final String XML_SEQUENCEFIELD = "sequenceField"; private static final String XML_SEQUENCEID = "sequenceId"; private static final String XML_SKIP_ROWS_ATTRIBUTE = "skipRows"; private static final String XML_NUMRECORDS_ATTRIBUTE = "numRecords"; private static final String XML_TRIM_ATTRIBUTE = "trim"; private static final String XML_VALIDATE_ATTRIBUTE = "validate"; private static final String XML_XML_FEATURES_ATTRIBUTE = "xmlFeatures"; private static final String XML_NAMESPACE_BINDINGS_ATTRIBUTE = "namespaceBindings"; /** MiSho Experimental Templates */ private static final String XML_TEMPLATE_ID = "templateId"; private static final String XML_TEMPLATE_REF = "templateRef"; private static final String XML_TEMPLATE_DEPTH = "nestedDepth"; private static final String FEATURES_DELIMETER = ";"; private static final String FEATURES_ASSIGN = ":="; // component name public final static String COMPONENT_TYPE = "XML_EXTRACT"; // from which input port to read private final static int INPUT_PORT = 0; public static final String PARENT_MAPPING_REFERENCE_PREFIX = ".."; public static final String PARENT_MAPPING_REFERENCE_SEPARATOR = "/"; public static final String PARENT_MAPPING_REFERENCE_PREFIX_WITHSEPARATOR = PARENT_MAPPING_REFERENCE_PREFIX + PARENT_MAPPING_REFERENCE_SEPARATOR; public static final String ELEMENT_VALUE_REFERENCE = "{}."; // Map of elementName => output port private Map<String, Mapping> m_elementPortMap = new HashMap<String, Mapping>(); // Where the XML comes from private InputSource m_inputSource; // input file private String inputFile; private ReadableChannelIterator readableChannelIterator; // can I use nested nodes for mapping processing? private boolean useNestedNodes = true; // global skip and numRecords private int skipRows=0; // do not skip rows by default private int numRecords = -1; // autofilling support private AutoFilling autoFilling = new AutoFilling(); private String xmlFeatures; private boolean validate; private String charset = Defaults.DataParser.DEFAULT_CHARSET_DECODER; private boolean trim = true; private String mappingURL; private String mapping; private NodeList mappingNodes; private TreeMap<String, Mapping> declaredTemplates = new TreeMap<String, Mapping>(); /** * Namespace bindings relate namespace prefix used in Mapping specification * and the namespace URI used by the namespace declaration in processed XML document */ private HashMap<String,String> namespaceBindings = new HashMap<String,String>(); /** * SAX Handler that will dispatch the elements to the different ports. */ private class SAXHandler extends SAXContentHandler { // depth of the element, used to determine when we hit the matching // close element private int m_level = 0; // flag set if we saw characters, otherwise don't save the column (used // to set null values) private boolean m_hasCharacters = false; //flag to skip text value immediately after end xml tag, for instance //<root> // <subtag>text</subtag> // another text //</root> //"another text" will be ignored private boolean m_grabCharacters = true; // buffer for node value private StringBuilder m_characters = new StringBuilder(); // the active mapping private Mapping m_activeMapping = null; private Set<String> cloverAttributes; /** * @param cloverAttributes */ public SAXHandler(Set<String> cloverAttributes) { super(); this.cloverAttributes = cloverAttributes; } /** * @see org.xml.sax.ContentHandler#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes) */ public void startElement(String namespaceURI, String localName, String qualifiedName, Attributes attributes) throws SAXException { m_level++; m_grabCharacters = true; // store value of parent of currently starting element (if appropriate) if (m_activeMapping != null && m_hasCharacters && m_level == m_activeMapping.getLevel() + 1) { if (m_activeMapping.descendantReferences.containsKey(ELEMENT_VALUE_REFERENCE)) { m_activeMapping.descendantReferences.put(ELEMENT_VALUE_REFERENCE, trim ? m_characters.toString().trim() : m_characters.toString()); } processCharacters(null,null, true); } // Regardless of starting element type, reset the length of the buffer and flag m_characters.setLength(0); m_hasCharacters = false; final String universalName = augmentURI(namespaceURI) + localName; Mapping mapping = null; if (m_activeMapping == null) { mapping = (Mapping) m_elementPortMap.get(universalName); } else if (useNestedNodes || m_activeMapping.getLevel() == m_level - 1) { mapping = (Mapping) m_activeMapping.getChildMapping(universalName); } if (mapping != null) { // We have a match, start converting all child nodes into // the DataRecord structure m_activeMapping = mapping; m_activeMapping.setLevel(m_level); // clear cached values of xml fields referenced by descendants (there may be values from previously read element of this m_activemapping) for (Entry<String, String> e : m_activeMapping.descendantReferences.entrySet()) { e.setValue(null); } if (mapping.getOutRecord() != null) { //sequence fields initialization String sequenceFieldName = m_activeMapping.getSequenceField(); if(sequenceFieldName != null && m_activeMapping.getOutRecord().hasField(sequenceFieldName)) { Sequence sequence = m_activeMapping.getSequence(); DataField sequenceField = m_activeMapping.getOutRecord().getField(sequenceFieldName); if(sequenceField.getType() == DataFieldMetadata.INTEGER_FIELD) { sequenceField.setValue(sequence.nextValueInt()); } else if(sequenceField.getType() == DataFieldMetadata.LONG_FIELD || sequenceField.getType() == DataFieldMetadata.DECIMAL_FIELD || sequenceField.getType() == DataFieldMetadata.NUMERIC_FIELD) { sequenceField.setValue(sequence.nextValueLong()); } else { sequenceField.fromString(sequence.nextValueString()); } } m_activeMapping.prepareDoMap(); m_activeMapping.incCurrentRecord4Mapping(); // This is the closing element of the matched element that // triggered the processing // That should be the end of this record so send it off to the // next Node if (runIt) { try { DataRecord outRecord = m_activeMapping.getOutRecord(); String[] generatedKey = m_activeMapping.getGeneratedKey(); String[] parentKey = m_activeMapping.getParentKey(); if (parentKey != null) { //if generatedKey is a single array, all parent keys are concatenated into generatedKey field //I know it is ugly code... if(generatedKey.length != parentKey.length && generatedKey.length != 1) { LOG.warn(getId() + ": XML Extract Mapping's generatedKey and parentKey attribute has different number of field."); m_activeMapping.setGeneratedKey(null); m_activeMapping.setParentKey(null); } else { for(int i = 0; i < parentKey.length; i++) { boolean existGeneratedKeyField = (outRecord != null) && (generatedKey.length == 1 ? outRecord.hasField(generatedKey[0]) : outRecord.hasField(generatedKey[i])); boolean existParentKeyField = m_activeMapping.getParent().getOutRecord() != null && m_activeMapping.getParent().getOutRecord().hasField(parentKey[i]); if (!existGeneratedKeyField) { LOG.warn(getId() + ": XML Extract Mapping's generatedKey field was not found. generatedKey: " + (generatedKey.length == 1 ? generatedKey[0] : generatedKey[i]) + " of element " + m_activeMapping.m_element + ", outPort: " + m_activeMapping.m_outPort); m_activeMapping.setGeneratedKey(null); m_activeMapping.setParentKey(null); } else if (!existParentKeyField) { LOG.warn(getId() + ": XML Extract Mapping's parentKey field was not found. parentKey: " + parentKey[i] + " of element " + m_activeMapping.m_element + ", outPort: " + m_activeMapping.m_outPort); m_activeMapping.setGeneratedKey(null); m_activeMapping.setParentKey(null); } else { // both outRecord and m_activeMapping.getParrent().getOutRecord are not null // here, because of if-else if-else chain DataField generatedKeyField = generatedKey.length == 1 ? outRecord.getField(generatedKey[0]) : outRecord.getField(generatedKey[i]); DataField parentKeyField = m_activeMapping.getParent().getOutRecord().getField(parentKey[i]); if(generatedKey.length != parentKey.length) { if(generatedKeyField.getType() != DataFieldMetadata.STRING_FIELD) { LOG.warn(getId() + ": XML Extract Mapping's generatedKey field has to be String type (keys are concatened to this field)."); m_activeMapping.setGeneratedKey(null); m_activeMapping.setParentKey(null); } else { ((StringDataField) generatedKeyField).append(parentKeyField.toString()); } } else { generatedKeyField.setValue(parentKeyField.getValue()); } } } } } } catch (Exception ex) { throw new SAXException(" for output port number '" + m_activeMapping.getOutPort() + "'. Check also parent mapping. ", ex); } // Fill fields from parent record (if any mapped) if (m_activeMapping.hasFieldsFromAncestor()) { for (AncestorFieldMapping afm : m_activeMapping.getFieldsFromAncestor()) { if (m_activeMapping.getOutRecord().hasField(afm.currentField) && afm.ancestor != null) { m_activeMapping.getOutRecord().getField(afm.currentField).fromString(afm.ancestor.descendantReferences.get(afm.ancestorField)); } } } } else { throw new SAXException("Stop Signaled"); } } } if(m_activeMapping != null //used only if we right now recognize new mapping element or if we want to use nested unmapped nodes as a source of data && (useNestedNodes || mapping != null)) { // In a matched element (i.e. we are creating a DataRecord) // Store all attributes as columns (this hasn't been // used/tested) for (int i = 0; i < attributes.getLength(); i++) { final String attributeLocalName = attributes.getLocalName(i); String attrName = augmentURI(attributes.getURI(i)) + attributeLocalName; if (m_activeMapping.descendantReferences.containsKey(attrName)) { String val = attributes.getValue(i); m_activeMapping.descendantReferences.put(attrName, trim ? val.trim() : val); } //use fields mapping final Map<String, String> xmlCloverMap = m_activeMapping.getXml2CloverFieldsMap(); String fieldName = null; if (xmlCloverMap != null) { if (xmlCloverMap.containsKey(attrName)) { fieldName = xmlCloverMap.get(attrName); } else if (m_activeMapping.explicitCloverFields.contains(attrName)) { continue; // don't do implicit mapping if clover field is used in an explicit mapping } } if (fieldName == null) { // we could not find mapping using the universal name -> try implicit mapping using local name fieldName = attributeLocalName; } // TODO Labels replace: if (m_activeMapping.getOutRecord() != null && m_activeMapping.getOutRecord().hasField(fieldName)) { String val = attributes.getValue(i); m_activeMapping.getOutRecord().getField(fieldName).fromString(trim ? val.trim() : val); } // TODO Labels end replace // TODO Labels replace with: //DataRecord outRecord = m_activeMapping.getOutRecord(); //DataField field = null; //if (outRecord != null) { // if (outRecord.hasLabeledField(fieldName)) { // field = outRecord.getFieldByLabel(fieldName); //if (field != null) { // String val = attributes.getValue(i); // field.fromString(trim ? val.trim() : val); } } } /** * @see org.xml.sax.ContentHandler#characters(char[], int, int) */ public void characters(char[] data, int offset, int length) throws SAXException { // Save the characters into the buffer, endElement will store it into the field if (m_activeMapping != null && m_grabCharacters) { m_characters.append(data, offset, length); m_hasCharacters = true; } } /** * @see org.xml.sax.ContentHandler#endElement(java.lang.String, java.lang.String, java.lang.String) */ public void endElement(String namespaceURI, String localName, String qualifiedName) throws SAXException { if (m_activeMapping != null) { String fullName = "{" + namespaceURI + "}" + localName; // cache characters value if the xml field is referenced by descendant if (m_level - 1 <= m_activeMapping.getLevel() && m_activeMapping.descendantReferences.containsKey(fullName)) { m_activeMapping.descendantReferences.put(fullName, trim ? m_characters.toString().trim() : m_characters.toString()); } processCharacters(namespaceURI, localName, m_level == m_activeMapping.getLevel()); // Regardless of whether this was saved, reset the length of the // buffer and flag m_characters.setLength(0); m_hasCharacters = false; } if (m_activeMapping != null && m_level == m_activeMapping.getLevel()) { // This is the closing element of the matched element that // triggered the processing // That should be the end of this record so send it off to the // next Node if (runIt) { try { OutputPort outPort = getOutputPort(m_activeMapping.getOutPort()); if (outPort != null) { // we just ignore creating output, if port is empty (without metadata) or not specified DataRecord outRecord = m_activeMapping.getOutRecord(); // skip or process row if (skipRows > 0) { if (m_activeMapping.getParent() == null) skipRows } else { //check for index of last returned record if(!(numRecords >= 0 && numRecords == autoFilling.getGlobalCounter())) { // set autofilling autoFilling.setAutoFillingFields(outRecord); // can I do the map? it depends on skip and numRecords. if (m_activeMapping.doMap()) { //send off record outPort.writeRecord(outRecord); } // if (m_activeMapping.getParent() == null) autoFilling.incGlobalCounter(); } } // resets all child's mappings for skip and numRecords m_activeMapping.resetCurrentRecord4ChildMapping(); // reset record outRecord.reset(); } m_activeMapping = m_activeMapping.getParent(); } catch (Exception ex) { throw new SAXException(ex); } } else { throw new SAXException("Stop Signaled"); } } //text value immediately after end tag element should not be stored m_grabCharacters = false; //ended an element so decrease our depth m_level } /** * Store the characters processed by the characters() call back only if we have corresponding * output field and we are on the right level or we want to use data from nested unmapped nodes */ private void processCharacters(String namespaceURI, String localName, boolean elementValue) { // Create universal name String universalName = null; if (localName != null) { universalName = augmentURI(namespaceURI) + localName; } String fieldName = null; //use fields mapping Map<String, String> xml2clover = m_activeMapping.getXml2CloverFieldsMap(); if (xml2clover != null) { if (elementValue && xml2clover.containsKey(ELEMENT_VALUE_REFERENCE)) { fieldName = xml2clover.get(ELEMENT_VALUE_REFERENCE); } else if (xml2clover.containsKey(universalName)) { fieldName = xml2clover.get(universalName); } else if (m_activeMapping.explicitCloverFields.contains(localName) ) { // XXX: this is nonsense code ... the names stored here are field names and the code used XML element names return; // don't do implicit mapping if clover field is used in an explicit mapping } if (fieldName == null) { /* * As we could not find match using qualified name * try mapping the xml element/attribute without the namespace prefix */ fieldName = localName; } } // TODO Labels replace: if (m_activeMapping.getOutRecord() != null && m_activeMapping.getOutRecord().hasField(fieldName) && (useNestedNodes || m_level - 1 <= m_activeMapping.getLevel())) { DataField field = m_activeMapping.getOutRecord().getField(fieldName); // TODO Labels replace end // TODO Labels replace with: //DataRecord outRecord = m_activeMapping.getOutRecord(); //DataField field = null; //if ((outRecord != null) && (useNestedNodes || m_level - 1 <= m_activeMapping.getLevel())) { // if (outRecord.hasLabeledField(fieldName)) { // field = outRecord.getFieldByLabel(fieldName); //if (field != null) { // TODO Labels replace with end // If field is nullable and there's no character data set it to null if (m_hasCharacters) { try { if (field.getValue() != null && cloverAttributes.contains(fieldName)) { field.fromString(trim ? field.getValue().toString().trim() : field.getValue().toString()); } else { field.fromString(trim ? m_characters.toString().trim() : m_characters.toString()); } } catch (BadDataFormatException ex) { // This is a bit hacky here SOOO let me explain... if (field.getType() == DataFieldMetadata.DATE_FIELD) { // XML dateTime format is not supported by the // DateFormat oject that clover uses... // so timezones are unparsable // i.e. XML wants -5:00 but DateFormat wants // -500 // Attempt to munge and retry... (there has to // be a better way) try { // Chop off the ":" in the timezone (it HAS // to be at the end) String dateTime = m_characters.substring(0, m_characters.lastIndexOf(":")) + m_characters .substring(m_characters .lastIndexOf(":") + 1); DateFormat format = new SimpleDateFormat(field.getMetadata().getFormatStr()); field.setValue(format.parse(trim ? dateTime.trim() : dateTime)); } catch (Exception ex2) { // Oh well we tried, throw the originating // exception throw ex; } } else { throw ex; } } } else if (field.getType() == DataFieldMetadata.STRING_FIELD // and value wasn't already stored (from characters) && (field.getValue() == null || field.getValue().equals(field.getMetadata().getDefaultValueStr()))) { field.setValue(""); } } } } /** * Mapping holds a single mapping. */ public class Mapping { String m_element; // name of an element for this mapping int m_outPort; // output port number DataRecord m_outRecord; // output record String[] m_parentKey; // parent keys String[] m_generatedKey; // generated keys Map<String, Mapping> m_childMap; // direct children for this mapping WeakReference<Mapping> m_parent; // direct parent mapping int m_level; // original xml tree level (a depth of this element) String m_sequenceField; // sequence field String m_sequenceId; // sequence ID Sequence sequence; // sequence (Simple, Db,..) /** Mapping - xml name -> clover field name */ Map<String, String> xml2CloverFieldsMap = new HashMap<String, String>(); /** List of clover fields (among else) which will be filled from ancestor */ List<AncestorFieldMapping> fieldsFromAncestor; /** Mapping - xml name -> clover field name; these xml fields are referenced by descendant mappings */ Map<String, String> descendantReferences = new HashMap<String, String>(); /** Set of Clover fields which are mapped explicitly (using xmlFields & cloverFields attributes). * It is union of xml2CloverFieldsMap.values() and Clover fields from fieldsFromAncestor list. Its purpose: quick lookup */ Set<String> explicitCloverFields = new HashSet<String>(); // for skip and number a record attribute for this mapping int skipRecords4Mapping; // skip records int numRecords4Mapping = Integer.MAX_VALUE; // number records // int skipSourceRecords4Mapping; // skip records // int numSourceRecords4Mapping = -1; // number records int currentRecord4Mapping; // record counter for this mapping boolean processSkipOrNumRecords; // what xml element can be skiped boolean bDoMap = true; // should I skip an xml element? depends on processSkipOrNumRecords boolean bReset4CurrentRecord4Mapping; // should I reset submappings? /** * Copy constructor - created a deep copy of all attributes and children elements */ public Mapping(Mapping otherMapping, Mapping parent) { this.m_element = otherMapping.m_element; this.m_outPort = otherMapping.m_outPort; this.m_parentKey = otherMapping.m_parentKey == null ? null : Arrays.copyOf(otherMapping.m_parentKey,otherMapping.m_parentKey.length); this.m_generatedKey = otherMapping.m_generatedKey == null ? null : Arrays.copyOf(otherMapping.m_generatedKey, otherMapping.m_generatedKey.length); this.m_sequenceField = otherMapping.m_sequenceField; this.m_sequenceId = otherMapping.m_sequenceId; this.skipRecords4Mapping = otherMapping.skipRecords4Mapping; this.numRecords4Mapping = otherMapping.numRecords4Mapping; xml2CloverFieldsMap = new HashMap<String, String>(otherMapping.xml2CloverFieldsMap); // Create deep copy of children elements if (otherMapping.m_childMap != null) { this.m_childMap = new HashMap<String,Mapping>(); for (String key : otherMapping.m_childMap.keySet()) { final Mapping child = new Mapping(otherMapping.m_childMap.get(key), this); this.m_childMap.put(key, child); } } if (parent != null) { setParent(parent); parent.addChildMapping(this); } if (otherMapping.hasFieldsFromAncestor()) { for (AncestorFieldMapping m : otherMapping.getFieldsFromAncestor()) { addAncestorFieldMapping(m.originalFieldReference, m.currentField); } } } /** * Minimally required information. */ public Mapping(String element, int outPort, Mapping parent) { m_element = element; m_outPort = outPort; m_parent = new WeakReference<Mapping>(parent); if (parent != null) { parent.addChildMapping(this); } } /** * Gives the optional attributes parentKey and generatedKey. */ public Mapping(String element, int outPort, String parentKey[], String[] generatedKey, Mapping parent) { this(element, outPort, parent); m_parentKey = parentKey; m_generatedKey = generatedKey; } /** * Gets original xml tree level (a deep of this element) * @return */ public int getLevel() { return m_level; } /** * Sets original xml tree level (a deep of this element) * @param level */ public void setLevel(int level) { m_level = level; } /** * Sets direct children for this mapping. * @return */ public Map<String, Mapping> getChildMap() { return m_childMap; } /** * Gets direct children for this mapping. * @param element * @return */ public Mapping getChildMapping(String element) { if (m_childMap == null) { return null; } return m_childMap.get(element); } /** * Adds a direct child for this mapping. * @param mapping */ public void addChildMapping(Mapping mapping) { if (m_childMap == null) { m_childMap = new HashMap<String, Mapping>(); } m_childMap.put(mapping.getElement(), mapping); } /** * Removes a direct child for this mapping. * @param mapping */ public void removeChildMapping(Mapping mapping) { if (m_childMap == null) { return; } m_childMap.remove(mapping.getElement()); } /** * Gets an element name for this mapping. * @return */ public String getElement() { return m_element; } /** * Sets an element name for this mapping. * @param element */ public void setElement(String element) { m_element = element; } /** * Gets generated keys of for this mapping. * @return */ public String[] getGeneratedKey() { return m_generatedKey; } /** * Sets generated keys of for this mapping. * @param generatedKey */ public void setGeneratedKey(String[] generatedKey) { m_generatedKey = generatedKey; } /** * Gets an output port. * @return */ public int getOutPort() { return m_outPort; } /** * Sets an output port. * @param outPort */ public void setOutPort(int outPort) { m_outPort = outPort; } /** * Gets mapping - xml name -> clover field name * WARNING: values of this map must be kept in synch with explicitCloverFields; prefer {@link #putXml2CloverFieldMap()} */ public Map<String, String> getXml2CloverFieldsMap() { return xml2CloverFieldsMap; } public void putXml2CloverFieldMap(String xmlField, String cloverField) { xml2CloverFieldsMap.put(createQualifiedName(xmlField), cloverField); explicitCloverFields.add(cloverField); } /** * Gets an output record. * @return */ public DataRecord getOutRecord() { if (m_outRecord == null) { OutputPort outPort = getOutputPort(getOutPort()); if (outPort != null) { DataRecordMetadata dataRecordMetadata = outPort.getMetadata(); autoFilling.addAutoFillingFields(dataRecordMetadata); m_outRecord = new DataRecord(dataRecordMetadata); m_outRecord.init(); m_outRecord.reset(); } // Original code is commented, it is valid to have null port now /* else { LOG .warn(getId() + ": Port " + getOutPort() + " does not have an edge connected. Please connect the edge or remove the mapping."); }*/ } return m_outRecord; } /** * Sets an output record. * @param outRecord */ public void setOutRecord(DataRecord outRecord) { m_outRecord = outRecord; } /** * Gets parent key. * @return */ public String[] getParentKey() { return m_parentKey; } /** * Sets parent key. * @param parentKey */ public void setParentKey(String[] parentKey) { m_parentKey = parentKey; } /** * Gets a parent mapping. * @return */ public Mapping getParent() { if (m_parent != null) { return m_parent.get(); } else { return null; } } /** * Sets a parent mapping. * @param parent */ public void setParent(Mapping parent) { m_parent = new WeakReference<Mapping>(parent); } /** * Gets a sequence name. * @return */ public String getSequenceField() { return m_sequenceField; } /** * Sets a sequence name. * @param field */ public void setSequenceField(String field) { m_sequenceField = field; } /** * Gets a sequence ID. * @return */ public String getSequenceId() { return m_sequenceId; } /** * Sets a sequence ID. * @param id */ public void setSequenceId(String id) { m_sequenceId = id; } /** * Gets a Sequence (simple sequence, db sequence, ...). * @return */ public Sequence getSequence() { if(sequence == null) { String element = StringUtils.normalizeName(StringUtils.trimXmlNamespace(getElement())); if(getSequenceId() == null) { sequence = new PrimitiveSequence(element, getGraph(), element); } else { sequence = getGraph().getSequence(getSequenceId()); if(sequence == null) { LOG.warn(getId() + ": Sequence " + getSequenceId() + " does not exist in " + "transformation graph. Primitive sequence is used instead."); sequence = new PrimitiveSequence(element, getGraph(), element); } } } return sequence; } /** * processSkipOrNumRecords is true - mapping can be skipped */ public boolean getProcessSkipOrNumRecords() { if (processSkipOrNumRecords) return true; Mapping parent = getParent(); if (parent == null) { return processSkipOrNumRecords; } return parent.getProcessSkipOrNumRecords(); } /** * Sets inner variables for processSkipOrNumRecords. */ public void prepareProcessSkipOrNumRecords() { Mapping parentMapping = getParent(); processSkipOrNumRecords = parentMapping != null && parentMapping.getProcessSkipOrNumRecords() || (skipRecords4Mapping > 0 || numRecords4Mapping < Integer.MAX_VALUE); } /** * Sets inner variables for bReset4CurrentRecord4Mapping. */ public void prepareReset4CurrentRecord4Mapping() { bReset4CurrentRecord4Mapping = processSkipOrNumRecords; if (m_childMap != null) { Mapping mapping; for (Iterator<Entry<String, Mapping>> it=m_childMap.entrySet().iterator(); it.hasNext();) { mapping = it.next().getValue(); if (mapping.processSkipOrNumRecords) { bReset4CurrentRecord4Mapping = true; break; } } } } /** * skipRecords for this mapping. * @param skipRecords4Mapping */ public void setSkipRecords4Mapping(int skipRecords4Mapping) { this.skipRecords4Mapping = skipRecords4Mapping; } /** * numRecords for this mapping. * @param numRecords4Mapping */ public void setNumRecords4Mapping(int numRecords4Mapping) { this.numRecords4Mapping = numRecords4Mapping; } // /** // * skipRecords for this mapping. // * @param skipRecords4Mapping // */ // public void setSkipSourceRecords4Mapping(int skipSourceRecords4Mapping) { // this.skipSourceRecords4Mapping = skipSourceRecords4Mapping; // /** // * numRecords for this mapping. // * @param numRecords4Mapping // */ // public void setNumSourceRecords4Mapping(int numSourceRecords4Mapping) { // this.numSourceRecords4Mapping = numSourceRecords4Mapping; /** * Counter for this mapping. */ public void incCurrentRecord4Mapping() { currentRecord4Mapping++; } /** * Resets submappings. */ public void resetCurrentRecord4ChildMapping() { if (!bReset4CurrentRecord4Mapping) return; if (m_childMap != null) { Mapping mapping; for (Iterator<Entry<String, Mapping>> it=m_childMap.entrySet().iterator(); it.hasNext();) { mapping = it.next().getValue(); mapping.currentRecord4Mapping = 0; mapping.resetCurrentRecord4ChildMapping(); } } } /** * Sets if this and child mapping should be skipped. */ public void prepareDoMap() { if (!processSkipOrNumRecords) return; Mapping parent = getParent(); bDoMap = (parent == null || parent.doMap()) && currentRecord4Mapping >= skipRecords4Mapping && currentRecord4Mapping-skipRecords4Mapping < numRecords4Mapping; if (m_childMap != null) { Mapping mapping; for (Iterator<Entry<String, Mapping>> it=m_childMap.entrySet().iterator(); it.hasNext();) { mapping = it.next().getValue(); mapping.prepareDoMap(); } } } /** * Can process this mapping? It depends on currentRecord4Mapping, skipRecords4Mapping and numRecords4Mapping * for this and parent mappings. * @return */ public boolean doMap() { return !processSkipOrNumRecords || (processSkipOrNumRecords && bDoMap); } public void addAncestorField(AncestorFieldMapping ancestorFieldReference) { if (fieldsFromAncestor == null) { fieldsFromAncestor = new LinkedList<AncestorFieldMapping>(); } fieldsFromAncestor.add(ancestorFieldReference); if (ancestorFieldReference.ancestor != null) { ancestorFieldReference.ancestor.descendantReferences.put(ancestorFieldReference.ancestorField, null); } explicitCloverFields.add(ancestorFieldReference.currentField); } public List<AncestorFieldMapping> getFieldsFromAncestor() { return fieldsFromAncestor; } public boolean hasFieldsFromAncestor() { return fieldsFromAncestor != null && !fieldsFromAncestor.isEmpty(); } private void addAncestorFieldMapping(String ancestorFieldRef, String currentField) { String ancestorField = ancestorFieldRef; ancestorField = normalizeAncestorValueRef(ancestorField); Mapping ancestor = this; while (ancestorField.startsWith(PARENT_MAPPING_REFERENCE_PREFIX_WITHSEPARATOR)) { ancestor = ancestor.getParent(); if (ancestor == null) { // User may want this in template declaration LOG.debug("Invalid ancestor XML field reference " + ancestorFieldRef + " in mapping of element <" + this.getElement() + ">"); break; } ancestorField = ancestorField.substring(PARENT_MAPPING_REFERENCE_PREFIX_WITHSEPARATOR.length()); } // After the ancestor prefix has been stripped, process the namespace ancestorField = createQualifiedName(ancestorField); if (ancestor != null) { addAncestorField(new AncestorFieldMapping(ancestor, ancestorField, currentField, ancestorFieldRef)); } else { // This AncestorFieldMapping makes sense in templates - invalid ancestor reference may become valid in template reference addAncestorField(new AncestorFieldMapping(null, null, currentField, ancestorFieldRef)); } } /** * If <code>ancestorField</code> is reference to ancestor element value, returns its normalized * version, otherwise returns unchanged original parameter. * Normalized ancestor field reference always ends with "../.": suffix. * Valid unnormalized ancestor element value references are i.e.: ".." or "../" */ private String normalizeAncestorValueRef(String ancestorField) { if (PARENT_MAPPING_REFERENCE_PREFIX.equals(ancestorField)) { return PARENT_MAPPING_REFERENCE_PREFIX_WITHSEPARATOR + ELEMENT_VALUE_REFERENCE; } if (ancestorField.startsWith(PARENT_MAPPING_REFERENCE_PREFIX_WITHSEPARATOR)) { if (ancestorField.endsWith(PARENT_MAPPING_REFERENCE_PREFIX)) { ancestorField += PARENT_MAPPING_REFERENCE_SEPARATOR + ELEMENT_VALUE_REFERENCE; } else if (ancestorField.endsWith(PARENT_MAPPING_REFERENCE_PREFIX_WITHSEPARATOR)) { ancestorField += ELEMENT_VALUE_REFERENCE; } } return ancestorField; } } public static class AncestorFieldMapping { final Mapping ancestor; final String ancestorField; final String currentField; final String originalFieldReference; public AncestorFieldMapping(Mapping ancestor, String ancestorField, String currentField, String originalFieldReference) { this.ancestor = ancestor; this.ancestorField = ancestorField; this.currentField = currentField; this.originalFieldReference = originalFieldReference; } } /** * Constructs an XML Extract node with the given id. */ public XMLExtract(String id) { super(id); } /** * Creates an inctence of this class from a xml node. * @param graph * @param xmlElement * @return * @throws XMLConfigurationException */ public static Node fromXML(TransformationGraph graph, Element xmlElement) throws XMLConfigurationException { ComponentXMLAttributes xattribs = new ComponentXMLAttributes(xmlElement, graph); XMLExtract extract; try { // constructor extract = new XMLExtract(xattribs.getString(XML_ID_ATTRIBUTE)); // set input file extract.setInputFile(xattribs.getStringEx(XML_SOURCEURI_ATTRIBUTE,RefResFlag.SPEC_CHARACTERS_OFF)); // set dtd schema // if (xattribs.exists(XML_SCHEMA_ATTRIBUTE)) { // extract.setSchemaFile(xattribs.getString(XML_SCHEMA_ATTRIBUTE)); // if can use nested nodes. if(xattribs.exists(XML_USENESTEDNODES_ATTRIBUTE)) { extract.setUseNestedNodes(xattribs.getBoolean(XML_USENESTEDNODES_ATTRIBUTE)); } // set mapping String mappingURL = xattribs.getStringEx(XML_MAPPING_URL_ATTRIBUTE, null,RefResFlag.SPEC_CHARACTERS_OFF); String mapping = xattribs.getString(XML_MAPPING_ATTRIBUTE, null); NodeList nodes = xmlElement.getChildNodes(); if (mappingURL != null) extract.setMappingURL(mappingURL); else if (mapping != null) extract.setMapping(mapping); else if (nodes != null && nodes.getLength() > 0){ //old-fashioned version of mapping definition //mapping xml elements are child nodes of the component extract.setNodes(nodes); } else { xattribs.getStringEx(XML_MAPPING_URL_ATTRIBUTE,RefResFlag.SPEC_CHARACTERS_OFF); // throw configuration exception } // set namespace bindings attribute Properties props = null; if (xattribs.exists(XML_NAMESPACE_BINDINGS_ATTRIBUTE)) { try { props = new Properties(); final String content = xattribs.getString( XML_NAMESPACE_BINDINGS_ATTRIBUTE, null); if (content != null) { props.load(new ByteArrayInputStream(content.getBytes())); } } catch (IOException e) { throw new XMLConfigurationException("Unable to initialize namespace bindings",e); } final HashMap<String,String> namespaceBindings = new HashMap<String,String>(); for (String name : props.stringPropertyNames()) { namespaceBindings.put(name, props.getProperty(name)); } extract.setNamespaceBindings(namespaceBindings); } // set a skip row attribute if (xattribs.exists(XML_SKIP_ROWS_ATTRIBUTE)){ extract.setSkipRows(xattribs.getInteger(XML_SKIP_ROWS_ATTRIBUTE)); } // set a numRecord attribute if (xattribs.exists(XML_NUMRECORDS_ATTRIBUTE)){ extract.setNumRecords(xattribs.getInteger(XML_NUMRECORDS_ATTRIBUTE)); } if (xattribs.exists(XML_XML_FEATURES_ATTRIBUTE)){ extract.setXmlFeatures(xattribs.getString(XML_XML_FEATURES_ATTRIBUTE)); } if (xattribs.exists(XML_VALIDATE_ATTRIBUTE)){ extract.setValidate(xattribs.getBoolean(XML_VALIDATE_ATTRIBUTE)); } if (xattribs.exists(XML_CHARSET_ATTRIBUTE)){ extract.setCharset(xattribs.getString(XML_CHARSET_ATTRIBUTE)); } if (xattribs.exists(XML_TRIM_ATTRIBUTE)){ extract.setTrim(xattribs.getBoolean(XML_TRIM_ATTRIBUTE)); } return extract; } catch (Exception ex) { throw new XMLConfigurationException(COMPONENT_TYPE + ":" + xattribs.getString(XML_ID_ATTRIBUTE," unknown ID ") + ":" + ex.getMessage(),ex); } } @Deprecated private void setNodes(NodeList nodes) { this.mappingNodes = nodes; } public void setMappingURL(String mappingURL) { this.mappingURL = mappingURL; } public void setMapping(String mapping) { this.mapping = mapping; } /** * Sets the trim indicator. * @param trim */ public void setTrim(boolean trim) { this.trim = trim; } /** * Creates org.w3c.dom.Document object from the given String. * * @param inString * @return * @throws XMLConfigurationException */ private static Document createDocumentFromString(String inString) throws XMLConfigurationException { InputSource is = new InputSource(new StringReader(inString)); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setCoalescing(true); Document doc; try { doc = dbf.newDocumentBuilder().parse(is); } catch (Exception e) { throw new XMLConfigurationException("Mapping parameter parse error occur.", e); } return doc; } /** * Creates org.w3c.dom.Document object from the given ReadableByteChannel. * * @param readableByteChannel * @return * @throws XMLConfigurationException */ public static Document createDocumentFromChannel(ReadableByteChannel readableByteChannel) throws XMLConfigurationException { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setCoalescing(true); Document doc; try { doc = dbf.newDocumentBuilder().parse(Channels.newInputStream(readableByteChannel)); } catch (Exception e) { throw new XMLConfigurationException("Mapping parameter parse error occur.", e); } return doc; } /** * Creates mappings. * * @param graph * @param extract * @param parentMapping * @param nodeXML */ private List<String> processMappings(TransformationGraph graph, Mapping parentMapping, org.w3c.dom.Node nodeXML) { List<String> errors = new LinkedList<String>(); if (XML_MAPPING.equals(nodeXML.getNodeName())) { // for a mapping declaration, process all of the attributes // element, outPort, parentKeyName, generatedKey ComponentXMLAttributes attributes = new ComponentXMLAttributes((Element) nodeXML, graph); Mapping mapping = null; if (attributes.exists(XML_TEMPLATE_REF)) { // template mapping reference String templateId = null; try { templateId = attributes.getString(XML_TEMPLATE_REF); } catch (AttributeNotFoundException e) { // this cannot happen (see if above) errors.add("Attribute 'templateId' is missing"); return errors; } if (!declaredTemplates.containsKey(templateId)) { errors.add("Template '" + templateId + "' has not been declared"); return errors; } mapping = new Mapping(declaredTemplates.get(templateId), parentMapping); } // standard mapping declaration try { int outputPort = -1; if (attributes.exists(XML_OUTPORT)) { outputPort = attributes.getInteger(XML_OUTPORT); } if (mapping == null) { mapping = new Mapping( createQualifiedName(attributes.getString(XML_ELEMENT)), outputPort, parentMapping); } else { if (outputPort != -1) { mapping.setOutPort(outputPort); if (attributes.exists(XML_ELEMENT)) { mapping.setElement( createQualifiedName(attributes.getString(XML_ELEMENT))); } } } } catch(AttributeNotFoundException ex) { errors.add("Required attribute 'element' missing. Skipping this mapping and all children."); return errors; } // Add new root mapping if (parentMapping == null) { addMapping(mapping); } boolean parentKeyPresent = false; boolean generatedKeyPresent = false; if (attributes.exists(XML_PARENTKEY)) { final String[] parentKey = attributes.getString(XML_PARENTKEY, null).split(Defaults.Component.KEY_FIELDS_DELIMITER_REGEX); mapping.setParentKey(parentKey); parentKeyPresent = true; } if (attributes.exists(XML_GENERATEDKEY)) { final String[] generatedKey = attributes.getString(XML_GENERATEDKEY, null).split(Defaults.Component.KEY_FIELDS_DELIMITER_REGEX); mapping.setGeneratedKey(generatedKey); generatedKeyPresent = true; } if (parentKeyPresent != generatedKeyPresent) { errors.add("Mapping for element: " + mapping.getElement() + " must either have both 'parentKey' and 'generatedKey' attributes or neither."); mapping.setParentKey(null); mapping.setGeneratedKey(null); } if (parentKeyPresent && mapping.getParent() == null) { errors.add("Mapping for element: " + mapping.getElement() + " may only have 'parentKey' or 'generatedKey' attributes if it is a nested mapping."); mapping.setParentKey(null); mapping.setGeneratedKey(null); } //mapping between xml fields and clover fields initialization if (attributes.exists(XML_XMLFIELDS) && attributes.exists(XML_CLOVERFIELDS)) { String[] xmlFields = attributes.getString(XML_XMLFIELDS, null).split(Defaults.Component.KEY_FIELDS_DELIMITER); String[] cloverFields = attributes.getString(XML_CLOVERFIELDS, null).split(Defaults.Component.KEY_FIELDS_DELIMITER_REGEX); // TODO add existence check for Clover fields, if possible if(xmlFields.length == cloverFields.length){ for (int i = 0; i < xmlFields.length; i++) { if (xmlFields[i].startsWith(PARENT_MAPPING_REFERENCE_PREFIX_WITHSEPARATOR) || xmlFields[i].equals(PARENT_MAPPING_REFERENCE_PREFIX)) { mapping.addAncestorFieldMapping(xmlFields[i], cloverFields[i]); } else { mapping.putXml2CloverFieldMap(xmlFields[i], cloverFields[i]); } } } else { errors.add("Mapping for element: " + mapping.getElement() + " must have same number of the xml fields and the clover fields attribute."); } } //sequence field if (attributes.exists(XML_SEQUENCEFIELD)) { mapping.setSequenceField(attributes.getString(XML_SEQUENCEFIELD, null)); mapping.setSequenceId(attributes.getString(XML_SEQUENCEID, null)); } //skip rows field if (attributes.exists(XML_SKIP_ROWS_ATTRIBUTE)) { mapping.setSkipRecords4Mapping(attributes.getInteger(XML_SKIP_ROWS_ATTRIBUTE, 0)); } //number records field if (attributes.exists(XML_NUMRECORDS_ATTRIBUTE)) { mapping.setNumRecords4Mapping(attributes.getInteger(XML_NUMRECORDS_ATTRIBUTE, Integer.MAX_VALUE)); } // template declaration if (attributes.exists(XML_TEMPLATE_ID)) { final String templateId = attributes.getString(XML_TEMPLATE_ID, null); if (declaredTemplates.containsKey(templateId)) { errors.add("Template '" + templateId + "' has duplicate declaration"); } declaredTemplates.put(templateId, mapping); } // prepare variables for skip and numRecords for this mapping mapping.prepareProcessSkipOrNumRecords(); // multiple nested references of a template if (attributes.exists(XML_TEMPLATE_REF) && attributes.exists(XML_TEMPLATE_DEPTH)) { int depth = attributes.getInteger(XML_TEMPLATE_DEPTH, 1) - 1; Mapping currentMapping = mapping; while (depth > 0) { currentMapping = new Mapping(currentMapping, currentMapping); currentMapping.prepareProcessSkipOrNumRecords(); depth } while (currentMapping != mapping) { currentMapping.prepareReset4CurrentRecord4Mapping(); currentMapping = currentMapping.getParent(); } } // Process all nested mappings NodeList nodes = nodeXML.getChildNodes(); for (int i = 0; i < nodes.getLength(); i++) { org.w3c.dom.Node node = nodes.item(i); errors.addAll(processMappings(graph, mapping, node)); } // prepare variable reset of skip and numRecords' attributes mapping.prepareReset4CurrentRecord4Mapping(); } else if (nodeXML.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE) { errors.add("Unknown element '" + nodeXML.getNodeName() + "' is ignored with all it's child elements."); } // Ignore every other xml element (text values, comments...) return errors; } @Override public void preExecute() throws ComponentNotReadyException { super.preExecute(); if (firstRun()) { // sets input file to readableChannelIterator and sets its settings (directory, charset, input port,...) if (inputFile != null) { createReadableChannelIterator(); this.readableChannelIterator.init(); } } else { autoFilling.reset(); this.readableChannelIterator.reset(); } if (!readableChannelIterator.isGraphDependentSource()) prepareNextSource(); } @Override public Result execute() throws Exception { Result result; // parse xml from input file(s). if (parseXML()) { // finished successfully result = runIt ? Result.FINISHED_OK : Result.ABORTED; } else { // an error occurred result = runIt ? Result.ERROR : Result.ABORTED; } broadcastEOF(); return result; } @Override public void postExecute() throws ComponentNotReadyException { super.postExecute(); //no input channel is closed here - this could be changed in future } /** * Parses the inputSource. The SAXHandler defined in this class will handle * the rest of the events. Returns false if there was an exception * encountered during processing. */ private boolean parseXML() throws JetelException{ // create new sax factory SAXParserFactory factory = SAXParserFactory.newInstance(); factory.setValidating(validate); factory.setNamespaceAware(true); initXmlFeatures(factory); SAXParser parser; Set<String> xmlAttributes = getXMLMappingValues(); try { // create new sax parser parser = factory.newSAXParser(); } catch (Exception ex) { throw new JetelException(ex.getMessage(), ex); } try { // prepare next source if (readableChannelIterator.isGraphDependentSource()) { try { if(!nextSource()) return true; } catch (JetelException e) { throw new ComponentNotReadyException(e.getMessage()/*"FileURL attribute (" + inputFile + ") doesn't contain valid file url."*/, e); } } do { if (m_inputSource != null) { // parse the input source parser.parse(m_inputSource, new SAXHandler(xmlAttributes)); } // get a next source } while (nextSource()); } catch (SAXException ex) { // process error if (!runIt) { return true; // we were stopped by a stop signal... probably } LOG.error("XML Extract: " + getId() + " Parse Exception" + ex.getMessage(), ex); throw new JetelException("XML Extract: " + getId() + " Parse Exception", ex); } catch (Exception ex) { LOG.error("XML Extract: " + getId() + " Unexpected Exception", ex); throw new JetelException("XML Extract: " + getId() + " Unexpected Exception", ex); } return true; } private Set<String> getXMLMappingValues() { try { SAXParser saxParser = SAXParserFactory.newInstance().newSAXParser(); DefaultHandler handler = new MyHandler(); InputStream is = null; if (this.mappingURL != null) { String filePath = FileUtils.getFile(getGraph().getRuntimeContext().getContextURL(), mappingURL); is = new FileInputStream(new File(filePath)); } else if (this.mapping != null) { is = new ByteArrayInputStream(mapping.getBytes(charset)); } if (is != null) { saxParser.parse(is, handler); return ((MyHandler) handler).getCloverAttributes(); } } catch (Exception e) { return new HashSet<String>(); } return new HashSet<String>(); } /** * Xml features initialization. * @throws JetelException */ private void initXmlFeatures(SAXParserFactory factory) throws JetelException { if (xmlFeatures == null) return; String[] aXmlFeatures = xmlFeatures.split(FEATURES_DELIMETER); String[] aOneFeature; try { for (String oneFeature: aXmlFeatures) { aOneFeature = oneFeature.split(FEATURES_ASSIGN); if (aOneFeature.length != 2) throw new JetelException("The xml feature '" + oneFeature + "' has wrong format"); factory.setFeature(aOneFeature[0], Boolean.parseBoolean(aOneFeature[1])); } } catch (Exception e) { throw new JetelException(e.getMessage(), e); } } /** * Perform sanity checks. */ public void init() throws ComponentNotReadyException { if(isInitialized()) return; super.init(); augmentNamespaceURIs(); TransformationGraph graph = getGraph(); URL projectURL = graph != null ? graph.getRuntimeContext().getContextURL() : null; // prepare mapping if (mappingURL != null) { try { ReadableByteChannel ch = FileUtils.getReadableChannel(projectURL, mappingURL); Document doc = createDocumentFromChannel(ch); Element rootElement = doc.getDocumentElement(); mappingNodes = rootElement.getChildNodes(); } catch (Exception e) { throw new ComponentNotReadyException(e); } } else if (mapping != null) { Document doc; try { doc = createDocumentFromString(mapping); } catch (XMLConfigurationException e) { throw new ComponentNotReadyException(e); } Element rootElement = doc.getDocumentElement(); mappingNodes = rootElement.getChildNodes(); } //iterate over 'Mapping' elements declaredTemplates.clear(); String errorPrefix = getId() + ": Mapping error - "; for (int i = 0; i < mappingNodes.getLength(); i++) { org.w3c.dom.Node node = mappingNodes.item(i); List<String> errors = processMappings(graph, null, node); for (String error : errors) { LOG.warn(errorPrefix + error); } } // test that we have at least one input port and one output if (outPorts.size() < 1) { throw new ComponentNotReadyException(getId() + ": At least one output port has to be defined!"); } if (m_elementPortMap.size() < 1) { throw new ComponentNotReadyException( getId() + ": At least one mapping has to be defined. <Mapping element=\"elementToMatch\" outPort=\"123\" [parentKey=\"key in parent\" generatedKey=\"new foreign key in target\"]/>"); } } private void augmentNamespaceURIs() { for (String prefix : namespaceBindings.keySet()) { String uri = namespaceBindings.get(prefix); namespaceBindings.put(prefix, augmentURI(uri)); } } private String augmentURI(String uri) { if (uri == null) { return null; } return "{" + uri + "}"; } private void createReadableChannelIterator() throws ComponentNotReadyException { TransformationGraph graph = getGraph(); URL projectURL = graph != null ? graph.getRuntimeContext().getContextURL() : null; this.readableChannelIterator = new ReadableChannelIterator( getInputPort(INPUT_PORT), projectURL, inputFile); this.readableChannelIterator.setCharset(charset); this.readableChannelIterator.setPropertyRefResolver(new PropertyRefResolver(graph.getGraphProperties())); this.readableChannelIterator.setDictionary(graph.getDictionary()); } /** * Prepares a next source. * @throws ComponentNotReadyException */ private void prepareNextSource() throws ComponentNotReadyException { try { if(!nextSource()) { //throw new ComponentNotReadyException("FileURL attribute (" + inputFile + ") doesn't contain valid file url."); } } catch (JetelException e) { throw new ComponentNotReadyException(e.getMessage()/*"FileURL attribute (" + inputFile + ") doesn't contain valid file url."*/, e); } } /** * Switch to the next source file. * @return * @throws JetelException */ private boolean nextSource() throws JetelException { ReadableByteChannel stream = null; while (readableChannelIterator.hasNext()) { autoFilling.resetSourceCounter(); autoFilling.resetGlobalSourceCounter(); stream = readableChannelIterator.next(); if (stream == null) continue; // if record no record found autoFilling.setFilename(readableChannelIterator.getCurrentFileName()); File tmpFile = new File(autoFilling.getFilename()); long timestamp = tmpFile.lastModified(); autoFilling.setFileSize(tmpFile.length()); autoFilling.setFileTimestamp(timestamp == 0 ? null : new Date(timestamp)); m_inputSource = new InputSource(Channels.newInputStream(stream)); return true; } readableChannelIterator.blankRead(); return false; } private String createQualifiedName(String prefixedName) { if (prefixedName == null || prefixedName.isEmpty()) { return prefixedName; } // check if universal XML name exists int indexOfOpenBracket = prefixedName.indexOf("{"); if (-1<indexOfOpenBracket && indexOfOpenBracket<prefixedName.indexOf("}")) { return prefixedName; } final String[] parsed = prefixedName.split(":"); if (parsed.length < 2) { return "{}" + parsed[0]; } /* * Prefixed element: * Get the URI (already in Clark's notation) and use it to create qualified name */ String namespaceURI = namespaceBindings.get(parsed[0]); namespaceURI = namespaceURI == null ? "{}" : namespaceURI; return namespaceURI + parsed[1]; } private String[] createQualifiedName(String[] prefixedNames) { final String[] result = new String[prefixedNames.length]; for (int i = 0; i<prefixedNames.length; i++) { result[i] = createQualifiedName(prefixedNames[i]); } return result; } public String getType() { return COMPONENT_TYPE; } private void checkUniqueness(ConfigurationStatus status, Mapping mapping) { if (mapping.getOutRecord() == null) { return; } new UniqueLabelsValidator(status, this).validateMetadata(mapping.getOutRecord().getMetadata()); if (mapping.getChildMap() != null) { for (Mapping child: mapping.getChildMap().values()) { checkUniqueness(status, child); } } } @Override public ConfigurationStatus checkConfig(ConfigurationStatus status) { if (charset != null && !Charset.isSupported(charset)) { status.add(new ConfigurationProblem( "Charset "+charset+" not supported!", ConfigurationStatus.Severity.ERROR, this, ConfigurationStatus.Priority.NORMAL)); } TransformationGraph graph = getGraph(); //Check whether XML mapping schema is valid try { SAXParserFactory factory = SAXParserFactory.newInstance(); SAXParser saxParser = factory.newSAXParser(); DefaultHandler handler = new MyHandler(); InputSource is = null; Document doc = null; if (this.mappingURL != null) { String filePath = FileUtils.getFile(graph.getRuntimeContext().getContextURL(), mappingURL); is = new InputSource(new FileInputStream(new File(filePath))); ReadableByteChannel ch = FileUtils.getReadableChannel( graph != null ? graph.getRuntimeContext().getContextURL() : null, mappingURL); doc = createDocumentFromChannel(ch); } else if (this.mapping != null) { // inlined mapping // don't use the charset of the component's input files, but the charset of the .grf file is = new InputSource(new StringReader(mapping)); doc = createDocumentFromString(mapping); } if (is != null) { saxParser.parse(is, handler); Set<String> attributesNames = ((MyHandler) handler).getAttributeNames(); for (String attributeName : attributesNames) { if (!isXMLAttribute(attributeName)) { status.add(new ConfigurationProblem("Can't resolve XML attribute: " + attributeName, Severity.WARNING, this, Priority.NORMAL)); } } } if (doc != null) { Element rootElement = doc.getDocumentElement(); mappingNodes = rootElement.getChildNodes(); for (int i = 0; i < mappingNodes.getLength(); i++) { org.w3c.dom.Node node = mappingNodes.item(i); List<String> errors = processMappings(graph, null, node); ConfigurationProblem problem; for (String error : errors) { problem = new ConfigurationProblem("Mapping error - " + error, Severity.WARNING, this, Priority.NORMAL); status.add(problem); } } } } catch (Exception e) { status.add(new ConfigurationProblem("Can't parse XML mapping schema. Reason: " + e.getMessage(), Severity.ERROR, this, Priority.NORMAL)); } finally { declaredTemplates.clear(); } // TODO Labels: //for (Mapping mapping: getMappings().values()) { // checkUniqueness(status, mapping); // TODO Labels end try { // check inputs if (inputFile != null) { createReadableChannelIterator(); this.readableChannelIterator.checkConfig(); URL contextURL = graph != null ? graph.getRuntimeContext().getContextURL() : null; String fName = null; Iterator<String> fit = readableChannelIterator.getFileIterator(); while (fit.hasNext()) { try { fName = fit.next(); if (fName.equals("-")) continue; if (fName.startsWith("dict:")) continue; //this test has to be here, since an involuntary warning is caused String mostInnerFile = FileURLParser.getMostInnerAddress(fName); URL url = FileUtils.getFileURL(contextURL, mostInnerFile); if (FileUtils.isServerURL(url)) { //FileUtils.checkServer(url); //this is very long operation continue; } if (FileURLParser.isArchiveURL(fName)) { // test if the archive file exists // getReadableChannel is too long for archives String path = url.getRef() != null ? url.getFile() + "#" + url.getRef() : url.getFile(); if (new File(path).exists()) continue; throw new ComponentNotReadyException("File is unreachable: " + fName); } FileUtils.getReadableChannel(contextURL, fName).close(); } catch (IOException e) { throw new ComponentNotReadyException("File is unreachable: " + fName, e); } catch (ComponentNotReadyException e) { throw new ComponentNotReadyException("File is unreachable: " + fName, e); } } } } catch (ComponentNotReadyException e) { ConfigurationProblem problem = new ConfigurationProblem(e.getMessage(), ConfigurationStatus.Severity.WARNING, this, ConfigurationStatus.Priority.NORMAL); if(!StringUtils.isEmpty(e.getAttributeName())) { problem.setAttributeName(e.getAttributeName()); } status.add(problem); } finally { free(); } //TODO return status; } private static class MyHandler extends DefaultHandler { //Handler used at checkConfig to parse XML mapping and retrieve attributes names private Set<String> attributeNames = new HashSet<String>(); private Set<String> cloverAttributes = new HashSet<String>(); public void startElement(String namespaceURI, String localName, String qName, Attributes atts) { int length = atts.getLength(); for (int i=0; i<length; i++) { String xmlField = atts.getQName(i); attributeNames.add(xmlField); if (xmlField.equals("cloverFields")) { cloverAttributes.add(atts.getValue(i)); } } } public Set<String> getAttributeNames() { return attributeNames; } public Set<String> getCloverAttributes() { return cloverAttributes; } } private boolean isXMLAttribute(String attribute) { //returns true if given attribute is known XML attribute if (attribute.equals(XML_ELEMENT) || attribute.equals(XML_OUTPORT) || attribute.equals(XML_PARENTKEY) || attribute.equals(XML_GENERATEDKEY) || attribute.equals(XML_XMLFIELDS) || attribute.equals(XML_CLOVERFIELDS) || attribute.equals(XML_SEQUENCEFIELD) || attribute.equals(XML_SEQUENCEID) || attribute.equals(XML_SKIP_ROWS_ATTRIBUTE) || attribute.equals(XML_NUMRECORDS_ATTRIBUTE) || attribute.equals(XML_TRIM_ATTRIBUTE) || attribute.equals(XML_VALIDATE_ATTRIBUTE) || attribute.equals(XML_XML_FEATURES_ATTRIBUTE) || attribute.equals(XML_TEMPLATE_ID) || attribute.equals(XML_TEMPLATE_REF) || attribute.equals(XML_TEMPLATE_DEPTH)) { return true; } return false; } public org.w3c.dom.Node toXML() { return null; } /** * Set the input source containing the XML this will parse. */ public void setInputSource(InputSource inputSource) { m_inputSource = inputSource; } /** * Sets an input file. * @param inputFile */ public void setInputFile(String inputFile) { this.inputFile = inputFile; } /** * * @param useNestedNodes */ public void setUseNestedNodes(boolean useNestedNodes) { this.useNestedNodes = useNestedNodes; } /** * Accessor to add a mapping programatically. */ public void addMapping(Mapping mapping) { m_elementPortMap.put(mapping.getElement(), mapping); } /** * Returns the mapping. Maybe make this read-only? */ public Map<String,Mapping> getMappings() { // return Collections.unmodifiableMap(m_elementPortMap); // return a // read-only map return m_elementPortMap; } /** * Sets skipRows - how many elements to skip. * @param skipRows */ public void setSkipRows(int skipRows) { this.skipRows = skipRows; } /** * Sets numRecords - how many elements to process. * @param numRecords */ public void setNumRecords(int numRecords) { this.numRecords = Math.max(numRecords, 0); } /** * Sets the xml feature. * @param xmlFeatures */ public void setXmlFeatures(String xmlFeatures) { this.xmlFeatures = xmlFeatures; } /** * Sets validation option. * @param validate */ public void setValidate(boolean validate) { this.validate = validate; } /** * Sets charset for dictionary and input port reading. * @param string */ public void setCharset(String charset) { this.charset = charset; } /** * Sets namespace bindings to allow processing that relate namespace prefix used in Mapping * and namespace URI used in processed XML document * @param namespaceBindings the namespaceBindings to set */ public void setNamespaceBindings(HashMap<String, String> namespaceBindings) { this.namespaceBindings = namespaceBindings; } // private void resetRecord(DataRecord record) { // // reset the record setting the nullable fields to null and default // // values. Unfortunately init() does not do this, so if you have a field // // that's nullable and you never set a value to it, it will NOT be null. // // the reason we need to reset data records is the fact that XML data is // // not as rigidly // // structured as csv fields, so column values are regularly "missing" // // and without a reset // // the prior row's value will be present. // for (int i = 0; i < record.getNumFields(); i++) { // DataFieldMetadata fieldMetadata = record.getMetadata().getField(i); // DataField field = record.getField(i); // if (fieldMetadata.isNullable()) { // // Default all nullables to null // field.setNull(true); // } else if(fieldMetadata.isDefaultValue()) { // //Default all default values to their given defaults // field.setToDefaultValue(); // } else { // // Not nullable so set it to the default value (what init does) // switch (fieldMetadata.getType()) { // case DataFieldMetadata.INTEGER_FIELD: // ((IntegerDataField) field).setValue(0); // break; // case DataFieldMetadata.STRING_FIELD: // ((StringDataField) field).setValue(""); // break; // case DataFieldMetadata.DATE_FIELD: // case DataFieldMetadata.DATETIME_FIELD: // ((DateDataField) field).setValue(0); // break; // case DataFieldMetadata.NUMERIC_FIELD: // ((NumericDataField) field).setValue(0); // break; // case DataFieldMetadata.LONG_FIELD: // ((LongDataField) field).setValue(0); // break; // case DataFieldMetadata.DECIMAL_FIELD: // ((NumericDataField) field).setValue(0); // break; // case DataFieldMetadata.BYTE_FIELD: // ((ByteDataField) field).setValue((byte) 0); // break; // case DataFieldMetadata.UNKNOWN_FIELD: // default: // break; }
package org.jetel.data.lookup; import org.jetel.data.DataRecord; import org.jetel.exception.ComponentNotReadyException; import org.jetel.exception.JetelException; import org.jetel.metadata.DataRecordMetadata; /** * Interface for lookup tables. This is a minimum functionality required. * <br><br> * The intended use of LookupTable is:<br> * <ol> * <li>LookupTable constructed (new LookupTable()) * <li>setLookupKey() method called to specify what object will be used as * a lookup key (usually String, RecordKey, Object[]) * <li>init() method called to populate table with values or otherwise prepare for use * <li>get() or getNext() methods called repeatedly * <li>close() method called to free resources occupied by lookup table * </ol> * <br> * <i>Note:</i> not all variants of get() method may be supported by particular * LookupTable implementation.<br> * * @author DPavlis * @since 8.7.2004 * */ public interface LookupTable { /** * Called when the lookup table is first used/needed. Usually at * the beginnig of phases in which the lookup is used. Any memory & time intensive * allocation should happen during call to this method.<br> * It may happen that this method is called several times; however it should * be secured that initialization (allocation, etc.) is performed only once or * every time close() method was called prior to this method.<br> * * @throws ComponentNotReadyException * NOTE: copy from GraphElement */ public void init() throws ComponentNotReadyException; /** * Specifies what object type will be used for looking up data. * According to Object type used for calling this method, proper get() method * should be called then. * * @param key can be one of these Object types - String, Object[], RecordKey */ public void setLookupKey(Object key); /** * Return DataRecord stored in lookup table under the specified String * key. * * @param keyString - the key to be used for looking up DataRecord * @return DataRecord associated with specified key or NULL if not found */ public DataRecord get(String keyString); /** * Return DataRecord stored in lookup table under the specified keys.<br> * As a lookup values, only following objects should be used - Integer, Double, Date, String) * * @param keys values used for look-up of data record * @return DataRecord associated with specified keys or NULL if not found * @throws JetelException */ public DataRecord get(Object[] keys); /** * Returns DataRecord stored in lookup table. * * @param keyRecord DataRecord to be used for looking up data * @return DataRecord associated with specified key or NULL if not found * @throws JetelException */ public DataRecord get(DataRecord keyRecord); /** * Next DataRecord stored under the same key as the previous one sucessfully * retrieved while calling get() method. * * @return DataRecord or NULL if no other DataRecord is stored under the same key */ public DataRecord getNext(); /** * Returns number of DataRecords found by last get() method call.<br> * * @return number of found data records or -1 if can't be applied for this lookup table * implementation * @throws JetelException */ public int getNumFound(); /** * Returns the metadata associated with the DataRecord stored in this lookup table. * @return the metadata object */ public DataRecordMetadata getMetadata(); /** * Closes the lookup table - frees any allocated resource (memory, etc.)<br> * This method is called when this lookup table is no more needed during TransformationGraph * execution.<br> * Can be also called from user's transformation method. If close() is called, then * continuation in using lookup table should not be permitted - till init() is called * again. * NOTE: copy from GraphElement */ public void free(); /** * NOTE: copy from GraphElement */ public abstract boolean checkConfig(); }
package org.jetel.lookup; import java.text.RuleBasedCollator; import java.util.Comparator; import java.util.Iterator; import java.util.SortedSet; import java.util.TreeSet; import org.jetel.data.DataRecord; import org.jetel.data.RecordComparator; import org.jetel.data.RecordKey; import org.jetel.data.lookup.LookupTable; import org.jetel.data.parser.Parser; import org.jetel.exception.ComponentNotReadyException; import org.jetel.exception.JetelException; import org.jetel.graph.GraphElement; import org.jetel.metadata.DataRecordMetadata; /** * Range lookup table contains records, which defines intervals. It means that they must * have special structure: first field is the name of the interval, odd fields marks starts of * intervals, even fields (from 2) means corresponding ends of intervals, eg: Lookup table defined * as follows:<br> * low_slow,0,10,0,50<br> * low_fast,0,10,50,100<br> * high_slow,10,20,0,50<br> * high_fast,10,20,50,100<br> * has 4 intervals with 2 searching parameters: first from interval 0-10, and second from interval 0-100.<br> * Intervals can overlap, but then to get all resulting intervals it is necessery to go through all defined. */ public class RangeLookupTable extends GraphElement implements LookupTable { protected DataRecordMetadata metadata;//defines lookup table protected Parser dataParser; protected TreeSet<DataRecord> lookupTable;//set of intervals protected SortedSet<DataRecord> subTable; protected int numFound; protected RecordKey lookupKey; protected DataRecord tmpRecord; private DataRecord tmp; protected IntervalRecordComparator comparator; protected int[] keyFields = null; protected Iterator<DataRecord> subTableIterator; protected RuleBasedCollator collator = null; /** * Constructor for most general range lookup table * * @param id id * @param metadata metadata defining this lookup table * @param parser parser for reading defining records * @param collator collator for comparing string fields */ public RangeLookupTable(String id, DataRecordMetadata metadata, Parser parser, RuleBasedCollator collator){ super(id); this.metadata = metadata; this.dataParser = parser; this.collator = collator; } public RangeLookupTable(String id, DataRecordMetadata metadata, Parser parser){ this(id,metadata,parser,null); } /* (non-Javadoc) * @see org.jetel.graph.GraphElement#init() */ @Override public synchronized void init() throws ComponentNotReadyException { if(isInitialized()) return; super.init(); comparator = new IntervalRecordComparator(metadata,collator); lookupTable = new TreeSet<DataRecord>(comparator); tmpRecord=new DataRecord(metadata); tmpRecord.init(); //read records from file if (dataParser != null) { dataParser.init(metadata); try { while (dataParser.getNext(tmpRecord) != null) { lookupTable.add(tmpRecord.duplicate()); } } catch (JetelException e) { throw new ComponentNotReadyException(this, e.getMessage(), e); } dataParser.close(); } numFound=0; } public DataRecord get(String keyString) { // TODO Auto-generated method stub return null; } /* (non-Javadoc) * @see org.jetel.data.lookup.LookupTable#get(java.lang.Object[]) */ public DataRecord get(Object[] keys) { //prepare "interval" from keyRecord:set start end end for the value for (int i=0;i<keys.length;i++){ tmpRecord.getField(2*i+1).setValue(keys[i]); tmpRecord.getField(2*(i+1)).setValue(keys[i]); } return get(); } /* (non-Javadoc) * @see org.jetel.data.lookup.LookupTable#get(org.jetel.data.DataRecord) */ public DataRecord get(DataRecord keyRecord) { if (keyFields == null){ throw new RuntimeException("Set lookup key first!!!!"); } //prepare "interval" from keyRecord:set start end end for the value for (int i=0;i<lookupKey.getLenght();i++){ tmpRecord.getField(2*i+1).setValue(keyRecord.getField(keyFields[i])); tmpRecord.getField(2*(i+1)).setValue(keyRecord.getField(keyFields[i])); } return get(); } /** * This method finds all greater records, then set in get(Object[]) or get(DataRecord) * method, in lookup table and stores them in subTable * * @return */ private DataRecord get(){ //get all greater intervals subTable = lookupTable.tailSet(tmpRecord); subTableIterator = subTable.iterator(); numFound = 0; return getNext(); } /* (non-Javadoc) * @see org.jetel.data.lookup.LookupTable#getMetadata() */ public DataRecordMetadata getMetadata() { return metadata; } /* (non-Javadoc) * @see org.jetel.data.lookup.LookupTable#getNext() */ public DataRecord getNext() { //get next interval if exists if (subTableIterator != null && subTableIterator.hasNext()) { tmp = subTableIterator.next(); }else{ return null; } //if value is not in interval try next for (int i=1;i<tmp.getNumFields();i+=2){ if (!(tmpRecord.getField(i).compareTo(tmp.getField(i)) > -1 && tmpRecord.getField(i+1).compareTo(tmp.getField(i+1)) < 1)) { return getNext(); } } numFound++; return tmp; } /* (non-Javadoc) * @see org.jetel.data.lookup.LookupTable#getNumFound() */ public int getNumFound() { int alreadyFound = numFound; while (getNext() != null) {} int tmp = numFound; subTableIterator = subTable.iterator(); for (int i=0;i<alreadyFound;i++){ getNext(); } return tmp; } /* (non-Javadoc) * @see org.jetel.data.lookup.LookupTable#put(java.lang.Object, org.jetel.data.DataRecord) */ public boolean put(Object key, DataRecord data) { lookupTable.add(data); return true; } /* (non-Javadoc) * @see org.jetel.data.lookup.LookupTable#remove(java.lang.Object) */ public boolean remove(Object key) { if (key instanceof DataRecord) { return lookupTable.remove(key); }else{ throw new IllegalArgumentException("Requires key parameter of type "+DataRecord.class.getName()); } } /* (non-Javadoc) * @see org.jetel.data.lookup.LookupTable#setLookupKey(java.lang.Object) */ public void setLookupKey(Object key) { if (key instanceof RecordKey){ this.lookupKey=((RecordKey)key); keyFields = lookupKey.getKeyFields(); }else{ throw new RuntimeException("Incompatible Object type specified as lookup key: "+key.getClass().getName()); } } /* (non-Javadoc) * @see java.lang.Iterable#iterator() */ public Iterator<DataRecord> iterator() { return lookupTable.iterator(); } /** * Comparator for special records (defining range lookup table). * It compares odd and even fields of two records using RecordComparator class. * * @see RecordComparator * */ private class IntervalRecordComparator implements Comparator<DataRecord>{ RecordComparator[] startComparator;//comparators for odd fields RecordComparator[] endComparator;//comparators for even fields int startComparison; int endComparison; /** * Costructor * * @param metadata metadata of records, which defines lookup table * @param collator collator for comparing string data fields */ public IntervalRecordComparator(DataRecordMetadata metadata, RuleBasedCollator collator) { startComparator = new RecordComparator[(metadata.getNumFields()-1)/2]; endComparator = new RecordComparator[(metadata.getNumFields()-1)/2]; for (int i=0;i<startComparator.length;i++){ startComparator[i] = new RecordComparator(new int[]{2*i+1},collator); endComparator[i] = new RecordComparator(new int[]{2*(i+1)},collator); } } public IntervalRecordComparator(int[] keyFields) { this(metadata,null); } /* (non-Javadoc) * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) * * Intervals are equal if their start and end points are equal. * Interval o2 is after interval o1 if o1 is subinterval of o2 or start of o2 is * after start of o1 and end of o2 is after end of o1: * startComparison endComparison intervalComparison * o1.start.compareTo(o2.start) o1.end.compareTo(o2.end)o1.compareTo(o2) * -1 -1 -1 * -1 0 1(o2 is subinterval of o1) * -1 1 1(o2 is subinterval of o1) * 0 -1 -1(o1 is subinterval of o2) * 0 0 0(equal) * 0 1 1(o2 is subinterval of o1) * 1 -1 -1(o1 is subinterval of o2) * 1 0 -1(o1 is subinterval of o2) * 1 1 1 */ public int compare(DataRecord o1, DataRecord o2) { for (int i=0;i<startComparator.length;i++){ startComparison = startComparator[i].compare(o1, o2); endComparison = endComparator[i].compare(o1, o2); if (endComparison == -1) return -1; if (!(startComparison == 0 && endComparison == 0) ){ if (startComparison == 1 && endComparison == 0) { return -1; }else{ return 1; } } } return 0; } } }
package com.intellij.compiler.impl; import com.intellij.CommonBundle; import com.intellij.analysis.AnalysisScope; import com.intellij.compiler.*; import com.intellij.compiler.make.CacheCorruptedException; import com.intellij.compiler.make.DependencyCache; import com.intellij.compiler.make.MakeUtil; import com.intellij.compiler.progress.CompilerProgressIndicator; import com.intellij.javaee.module.J2EEModuleUtilEx; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.compiler.*; import com.intellij.openapi.compiler.Compiler; import com.intellij.openapi.compiler.ex.CompilerPathsEx; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.ProjectJdk; import com.intellij.openapi.roots.*; import com.intellij.openapi.roots.ui.configuration.ClasspathEditor; import com.intellij.openapi.roots.ui.configuration.ContentEntriesEditor; import com.intellij.openapi.roots.ui.configuration.ModulesConfigurator; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.*; import com.intellij.openapi.wm.StatusBar; import com.intellij.openapi.wm.WindowManager; import com.intellij.packageDependencies.DependenciesBuilder; import com.intellij.packageDependencies.ForwardDependenciesBuilder; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.PsiCompiledElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.util.ProfilingUtil; import com.intellij.util.containers.StringInterner; import gnu.trove.THashMap; import gnu.trove.THashSet; import gnu.trove.TObjectProcedure; import org.jetbrains.annotations.NonNls; import java.io.*; import java.util.*; public class CompileDriver { private static final Logger LOG = Logger.getInstance("#com.intellij.compiler.impl.CompileDriver"); private final Project myProject; private final Map<Compiler,Object> myCompilerToCacheMap = new THashMap<Compiler, Object>(); private Map<Pair<Compiler, Module>, VirtualFile> myGenerationCompilerModuleToOutputDirMap; private final StringInterner myStringInterner = new StringInterner(); private String myCachesDirectoryPath; private TreeBasedPathsSet myOutputFilesOnDisk = null; private boolean myShouldClearOutputDirectory; private Map<Module, String> myModuleOutputPaths = new HashMap<Module, String>(); private Map<Module, String> myModuleTestOutputPaths = new HashMap<Module, String>(); private ProjectRootManager myProjectRootManager; private static final @NonNls String VERSION_FILE_NAME = "version.dat"; private static final @NonNls String LOCK_FILE_NAME = "in_progress.dat"; private final FileProcessingCompilerAdapterFactory myProcessingCompilerAdapterFactory; private final FileProcessingCompilerAdapterFactory myPackagingCompilerAdapterFactory; final ProjectCompileScope myProjectCompileScope; public CompileDriver(Project project) { myProject = project; myCachesDirectoryPath = CompilerPaths.getCacheStoreDirectory(myProject).getPath().replace('/', File.separatorChar); myShouldClearOutputDirectory = CompilerWorkspaceConfiguration.getInstance(myProject).CLEAR_OUTPUT_DIRECTORY; myGenerationCompilerModuleToOutputDirMap = new com.intellij.util.containers.HashMap<Pair<Compiler, Module>, VirtualFile>(); final GeneratingCompiler[] compilers = CompilerManager.getInstance(myProject).getCompilers(GeneratingCompiler.class); ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { final Module[] allModules = ModuleManager.getInstance(myProject).getModules(); for (GeneratingCompiler compiler : compilers) { for (final Module module : allModules) { final String path = getGenerationOutputPath(compiler, module); final File file = new File(path); final VirtualFile vFile; if (file.mkdirs()) { vFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file); } else { vFile = LocalFileSystem.getInstance().findFileByPath(path); } Pair<Compiler, Module> pair = new Pair<Compiler, Module>(compiler, module); myGenerationCompilerModuleToOutputDirMap.put(pair, vFile); } } } }); myProjectRootManager = ProjectRootManager.getInstance(myProject); myProcessingCompilerAdapterFactory = new FileProcessingCompilerAdapterFactory() { public FileProcessingCompilerAdapter create(CompileContext context, FileProcessingCompiler compiler) { return new FileProcessingCompilerAdapter(context, compiler); } }; myPackagingCompilerAdapterFactory = new FileProcessingCompilerAdapterFactory() { public FileProcessingCompilerAdapter create(CompileContext context, FileProcessingCompiler compiler) { return new PackagingCompilerAdapter(context, (PackagingCompiler)compiler); } }; myProjectCompileScope = new ProjectCompileScope(myProject); } public void rebuild(CompileStatusNotification callback) { doRebuild(callback, null, true, addAdditionalRoots(myProjectCompileScope)); } public void make(CompileStatusNotification callback) { make(myProjectCompileScope, callback); } public void make(Project project, Module[] modules, CompileStatusNotification callback) { make(new ModuleCompileScope(project, modules, true), callback); } public void make(Module module, CompileStatusNotification callback) { make(new ModuleCompileScope(module, true), callback); } public void make(CompileScope scope, CompileStatusNotification callback) { scope = addAdditionalRoots(scope); if (validateCompilerConfiguration(scope, false)) { startup(scope, false, false, callback, null, true, false); } } public void compile(CompileScope scope, CompileStatusNotification callback, boolean trackDependencies) { if (trackDependencies) { scope = new TrackDependenciesScope(scope); } if (validateCompilerConfiguration(scope, false)) { startup(scope, false, true, callback, null, true, trackDependencies); } } private static class CompileStatus { final int CACHE_FORMAT_VERSION; final boolean COMPILATION_IN_PROGRESS; public CompileStatus(int cacheVersion, boolean isCompilationInProgress) { CACHE_FORMAT_VERSION = cacheVersion; COMPILATION_IN_PROGRESS = isCompilationInProgress; } } private CompileStatus readStatus() { final boolean isInProgress = new File(myCachesDirectoryPath, LOCK_FILE_NAME).exists(); int version = -1; try { final File versionFile = new File(myCachesDirectoryPath, VERSION_FILE_NAME); DataInputStream in = new DataInputStream(new FileInputStream(versionFile)); try { version = in.readInt(); } finally { in.close(); } } catch (FileNotFoundException e) { // ignore } catch (IOException e) { LOG.info(e); // may happen in case of IDEA crashed and the file is not written properly return null; } return new CompileStatus(version, isInProgress); } private void writeStatus(CompileStatus status, CompileContext context) { final File statusFile = new File(myCachesDirectoryPath, VERSION_FILE_NAME); final File lockFile = new File(myCachesDirectoryPath, LOCK_FILE_NAME); try { statusFile.createNewFile(); DataOutputStream out = new DataOutputStream(new FileOutputStream(statusFile)); try { out.writeInt(status.CACHE_FORMAT_VERSION); } finally { out.close(); } if (status.COMPILATION_IN_PROGRESS) { lockFile.createNewFile(); } else { lockFile.delete(); } } catch (IOException e) { context.addMessage(CompilerMessageCategory.ERROR, CompilerBundle.message("compiler.error.exception", e.getMessage()), null, -1, -1); } } private void doRebuild(CompileStatusNotification callback, CompilerMessage message, final boolean checkCachesVersion, final CompileScope compileScope) { if (validateCompilerConfiguration(compileScope, true)) { startup(compileScope, true, false, callback, message, checkCachesVersion, false); } } private CompileScope addAdditionalRoots(CompileScope originalScope) { CompileScope scope = originalScope; for (final Pair<Compiler, Module> pair : myGenerationCompilerModuleToOutputDirMap.keySet()) { final VirtualFile outputDir = myGenerationCompilerModuleToOutputDirMap.get(pair); scope = new CompositeScope(scope, new FileSetCompileScope(new VirtualFile[]{outputDir}, new Module[]{pair.getSecond()})); } CompileScope additionalJ2eeScope = com.intellij.javaee.make.MakeUtil.getInstance().getOutOfSourceJ2eeCompileScope(scope); if (additionalJ2eeScope != null) { scope = new CompositeScope(scope, additionalJ2eeScope); } return scope; } private void startup(final CompileScope scope, final boolean isRebuild, final boolean forceCompile, final CompileStatusNotification callback, CompilerMessage message, final boolean checkCachesVersion, final boolean trackDependencies) { final CompilerProgressIndicator indicator = new CompilerProgressIndicator( myProject, CompilerWorkspaceConfiguration.getInstance(myProject).COMPILE_IN_BACKGROUND, forceCompile ? CompilerBundle.message("compiler.content.name.compile") : CompilerBundle.message("compiler.content.name.make")); WindowManager.getInstance().getStatusBar(myProject).setInfo(""); final DependencyCache dependencyCache = new DependencyCache(myCachesDirectoryPath, myProject); final CompileContextImpl compileContext = new CompileContextImpl(myProject, indicator, scope, dependencyCache, this, !isRebuild && !forceCompile); for (Pair<Compiler, Module> pair : myGenerationCompilerModuleToOutputDirMap.keySet()) { compileContext.assignModule(myGenerationCompilerModuleToOutputDirMap.get(pair), pair.getSecond()); } if (message != null) { compileContext.addMessage(message); } FileDocumentManager.getInstance().saveAllDocuments(); final Thread compileThread = new Thread("Compile Thread") { public void run() { synchronized (CompilerManager.getInstance(myProject)) { ProgressManager.getInstance().runProcess(new Runnable() { public void run() { try { if (LOG.isDebugEnabled()) { LOG.debug("COMPILATION STARTED"); } doCompile(compileContext, isRebuild, forceCompile, callback, checkCachesVersion, trackDependencies); } finally { if (LOG.isDebugEnabled()) { LOG.debug("COMPILATION FINISHED"); } } } }, compileContext.getProgressIndicator()); } } }; compileThread.setPriority(Thread.NORM_PRIORITY); compileThread.start(); } private void doCompile(final CompileContextImpl compileContext, final boolean isRebuild, final boolean forceCompile, final CompileStatusNotification callback, final boolean checkCachesVersion, final boolean trackDependencies) { ExitStatus status = ExitStatus.ERRORS; boolean wereExceptions = false; try { compileContext.getProgressIndicator().pushState(); if (checkCachesVersion) { final CompileStatus compileStatus = readStatus(); if (compileStatus == null) { compileContext.requestRebuildNextTime(CompilerBundle.message("error.compiler.caches.corrupted")); } else if (compileStatus.CACHE_FORMAT_VERSION != -1 && compileStatus.CACHE_FORMAT_VERSION != CompilerConfiguration.DEPENDENCY_FORMAT_VERSION) { compileContext.requestRebuildNextTime(CompilerBundle.message("error.caches.old.format")); } else if (compileStatus.COMPILATION_IN_PROGRESS) { compileContext.requestRebuildNextTime(CompilerBundle.message("error.previous.compilation.failed")); } if (compileContext.isRebuildRequested()) { return; } } writeStatus(new CompileStatus(CompilerConfiguration.DEPENDENCY_FORMAT_VERSION, true), compileContext); if (compileContext.getMessageCount(CompilerMessageCategory.ERROR) > 0) { return; } status = doCompile(compileContext, isRebuild, forceCompile, trackDependencies, getAllOutputDirectories()); } catch (Throwable ex) { wereExceptions = true; throw new RuntimeException(ex); } finally { dropDependencyCache(compileContext); compileContext.getProgressIndicator().popState(); final ExitStatus _status = status; if (compileContext.isRebuildRequested()) { ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { doRebuild( callback, new CompilerMessageImpl(myProject, CompilerMessageCategory.INFORMATION, compileContext.getRebuildReason(), null, -1, -1, null), false, compileContext.getCompileScope() ); } }, ModalityState.NON_MMODAL); } else { writeStatus(new CompileStatus(CompilerConfiguration.DEPENDENCY_FORMAT_VERSION, wereExceptions), compileContext); ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { final int errorCount = compileContext.getMessageCount(CompilerMessageCategory.ERROR); final int warningCount = compileContext.getMessageCount(CompilerMessageCategory.WARNING); final String statusMessage = createStatusMessage(_status, warningCount, errorCount); final StatusBar statusBar = WindowManager.getInstance().getStatusBar(myProject); if (statusBar != null) { // because this code is in invoke later, the code may work for already closed project // in case another project was opened in the frame while the compiler was working (See SCR# 28591) statusBar.setInfo(statusMessage); } if (_status != ExitStatus.UP_TO_DATE && compileContext.getMessageCount(null) > 0) { compileContext.addMessage(CompilerMessageCategory.INFORMATION, statusMessage, null, -1, -1); } if (callback != null) { callback.finished(_status == ExitStatus.CANCELLED, errorCount, warningCount, compileContext); } ProfilingUtil.operationFinished("make"); } }, ModalityState.NON_MMODAL); } } } private static String createStatusMessage(final ExitStatus status, final int warningCount, final int errorCount) { if (status == ExitStatus.CANCELLED) { return CompilerBundle.message("status.compilation.aborted"); } if (status == ExitStatus.UP_TO_DATE) { return CompilerBundle.message("status.all.up.to.date"); } if (status == ExitStatus.SUCCESS) { return warningCount > 0 ? CompilerBundle.message("status.compilation.completed.successfully.with.warnings", warningCount) : CompilerBundle.message("status.compilation.completed.successfully"); } return CompilerBundle.message("status.compilation.completed.successfully.with.warnings.and.errors", errorCount, warningCount); } private static class ExitStatus { private String myName; private ExitStatus(@NonNls String name) { myName = name; } public String toString() { return myName; } public static final ExitStatus CANCELLED = new ExitStatus("CANCELLED"); public static final ExitStatus ERRORS = new ExitStatus("ERRORS"); public static final ExitStatus SUCCESS = new ExitStatus("SUCCESS"); public static final ExitStatus UP_TO_DATE = new ExitStatus("UP_TO_DATE"); } private static class ExitException extends Exception{ private final ExitStatus myStatus; public ExitException(ExitStatus status) { myStatus = status; } public ExitStatus getExitStatus() { return myStatus; } } private ExitStatus doCompile(CompileContextImpl context, boolean isRebuild, final boolean forceCompile, final boolean trackDependencies, final Set<File> outputDirectories) { try { if (isRebuild) { deleteAll(context, outputDirectories); if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) { return ExitStatus.ERRORS; } } try { context.getProgressIndicator().pushState(); if (!executeCompileTasks(context, true)) { return ExitStatus.CANCELLED; } } finally { context.getProgressIndicator().popState(); } if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) { return ExitStatus.ERRORS; } if (!isRebuild) { // compile tasks may change the contents of the output dirs so it is more safe to gather output files here context.getProgressIndicator().setText(CompilerBundle.message("progress.scanning.output")); myOutputFilesOnDisk = new TreeBasedPathsSet(myStringInterner, '/'); CompilerPathsEx.visitFiles(context.getAllOutputDirectories(), new CompilerPathsEx.FileVisitor() { protected void acceptFile(VirtualFile file, String fileRoot, String filePath) { if (!(file.getFileSystem() instanceof JarFileSystem)){ myOutputFilesOnDisk.add(filePath); } } }); } boolean didSomething = false; final CompilerManager compilerManager = CompilerManager.getInstance(myProject); try { didSomething |= generateSources(compilerManager, context, forceCompile); didSomething |= invokeFileProcessingCompilers(compilerManager, context, SourceInstrumentingCompiler.class, myProcessingCompilerAdapterFactory, forceCompile, true); didSomething |= translate(context, compilerManager, forceCompile, isRebuild, trackDependencies, outputDirectories); didSomething |= invokeFileProcessingCompilers(compilerManager, context, ClassInstrumentingCompiler.class, myProcessingCompilerAdapterFactory, isRebuild, false); // explicitly passing forceCompile = false because in scopes that is narrower than ProjectScope it is impossible // to understand whether the class to be processed is in scope or not. Otherwise compiler may process its items even if // there were changes in completely independent files. didSomething |= invokeFileProcessingCompilers(compilerManager, context, ClassPostProcessingCompiler.class, myProcessingCompilerAdapterFactory, isRebuild, false); didSomething |= invokeFileProcessingCompilers(compilerManager, context, PackagingCompiler.class, myPackagingCompilerAdapterFactory, isRebuild, true); didSomething |= invokeFileProcessingCompilers(compilerManager, context, Validator.class, myProcessingCompilerAdapterFactory, forceCompile, true); } catch (ExitException e) { return e.getExitStatus(); } finally { // drop in case it has not been dropped yet. dropDependencyCache(context); } try { context.getProgressIndicator().pushState(); if (!executeCompileTasks(context, false)) { return ExitStatus.CANCELLED; } } finally { context.getProgressIndicator().popState(); } if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) { return ExitStatus.ERRORS; } if (!didSomething) { return ExitStatus.UP_TO_DATE; } return ExitStatus.SUCCESS; } catch (ProcessCanceledException e) { return ExitStatus.CANCELLED; } } private static void dropDependencyCache(final CompileContextImpl context) { context.getProgressIndicator().pushState(); try { context.getProgressIndicator().setText(CompilerBundle.message("progress.saving.caches")); context.getDependencyCache().dispose(); } finally { context.getProgressIndicator().popState(); } } private boolean generateSources(final CompilerManager compilerManager, CompileContextImpl context, final boolean forceCompile) throws ExitException{ boolean didSomething = false; final SourceGeneratingCompiler[] sourceGenerators = compilerManager.getCompilers(SourceGeneratingCompiler.class); for (final SourceGeneratingCompiler sourceGenerator : sourceGenerators) { if (context.getProgressIndicator().isCanceled()) { throw new ExitException(ExitStatus.CANCELLED); } final boolean generatedSomething = generateOutput(context, sourceGenerator, forceCompile); dropInternalCache(sourceGenerator); if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) { throw new ExitException(ExitStatus.ERRORS); } didSomething |= generatedSomething; } return didSomething; } private boolean translate(final CompileContextImpl context, final CompilerManager compilerManager, final boolean forceCompile, boolean isRebuild, final boolean trackDependencies, final Set<File> outputDirectories) throws ExitException { boolean didSomething = false; final TranslatingCompiler[] translators = compilerManager.getCompilers(TranslatingCompiler.class); final VfsSnapshot snapshot = ApplicationManager.getApplication().runReadAction(new Computable<VfsSnapshot>() { public VfsSnapshot compute() { return new VfsSnapshot(myStringInterner, context.getCompileScope().getFiles(null, true)); } }); for (final TranslatingCompiler translator : translators) { if (context.getProgressIndicator().isCanceled()) { throw new ExitException(ExitStatus.CANCELLED); } final boolean compiledSomething = compileSources(context, snapshot, translator, forceCompile, isRebuild, trackDependencies, outputDirectories); // free memory earlier to leave other compilers more space dropDependencyCache(context); dropInternalCache(translator); if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) { throw new ExitException(ExitStatus.ERRORS); } didSomething |= compiledSomething; } return didSomething; } private static interface FileProcessingCompilerAdapterFactory { FileProcessingCompilerAdapter create(CompileContext context, FileProcessingCompiler compiler); } private boolean invokeFileProcessingCompilers(final CompilerManager compilerManager, CompileContextImpl context, Class<? extends FileProcessingCompiler> fileProcessingCompilerClass, FileProcessingCompilerAdapterFactory factory, boolean forceCompile, final boolean checkScope) throws ExitException { LOG.assertTrue(FileProcessingCompiler.class.isAssignableFrom(fileProcessingCompilerClass)); boolean didSomething = false; final FileProcessingCompiler[] compilers = compilerManager.getCompilers(fileProcessingCompilerClass); if (compilers.length > 0) { try { for (final FileProcessingCompiler compiler : compilers) { if (context.getProgressIndicator().isCanceled()) { throw new ExitException(ExitStatus.CANCELLED); } final boolean processedSomething = processFiles(factory.create(context, compiler), forceCompile, checkScope); dropInternalCache(compiler); if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) { throw new ExitException(ExitStatus.ERRORS); } didSomething |= processedSomething; } } catch(ProcessCanceledException e) { throw e; } catch(ExitException e) { throw e; } catch (Exception e) { context.addMessage(CompilerMessageCategory.ERROR, CompilerBundle.message("compiler.error.exception", e.getMessage()), null, -1, -1); LOG.error(e); } } return didSomething; } private static Map<Module, Set<GeneratingCompiler.GenerationItem>> buildModuleToGenerationItemMap( GeneratingCompiler.GenerationItem[] items) { final Map<Module, Set<GeneratingCompiler.GenerationItem>> map = new THashMap<Module, Set<GeneratingCompiler.GenerationItem>>(); for (GeneratingCompiler.GenerationItem item : items) { Module module = item.getModule(); LOG.assertTrue(module != null); Set<GeneratingCompiler.GenerationItem> itemSet = map.get(module); if (itemSet == null) { itemSet = new HashSet<GeneratingCompiler.GenerationItem>(); map.put(module, itemSet); } itemSet.add(item); } return map; } private void deleteAll(final CompileContext context, Set<File> outputDirectories) { context.getProgressIndicator().pushState(); try { final boolean isTestMode = ApplicationManager.getApplication().isUnitTestMode(); final Compiler[] allCompilers = CompilerManager.getInstance(myProject).getCompilers(Compiler.class); context.getProgressIndicator().setText(CompilerBundle.message("progress.clearing.output")); for (final Compiler compiler : allCompilers) { if (compiler instanceof GeneratingCompiler) { final StateCache<ValidityState> cache = getGeneratingCompilerCache((GeneratingCompiler)compiler); if (!myShouldClearOutputDirectory) { final Iterator<String> urlIterator = cache.getUrlsIterator(); while(urlIterator.hasNext()) { new File(VirtualFileManager.extractPath(urlIterator.next())).delete(); } } cache.wipe(); } else if (compiler instanceof FileProcessingCompiler) { final FileProcessingCompilerStateCache cache = getFileProcessingCompilerCache((FileProcessingCompiler)compiler); cache.wipe(); } else if (compiler instanceof TranslatingCompiler) { final TranslatingCompilerStateCache cache = getTranslatingCompilerCache((TranslatingCompiler)compiler); if (!myShouldClearOutputDirectory) { final Iterator<String> urlIterator = cache.getOutputUrlsIterator(); while(urlIterator.hasNext()) { final String outputPath = urlIterator.next(); final String sourceUrl = cache.getSourceUrl(outputPath); if (sourceUrl == null || !FileUtil.pathsEqual(outputPath, VirtualFileManager.extractPath(sourceUrl))) { new File(outputPath).delete(); if (isTestMode) { CompilerManagerImpl.addDeletedPath(outputPath); } } } } cache.wipe(); } } if (myShouldClearOutputDirectory) { clearOutputDirectories(outputDirectories); } else { // refresh is still required pruneEmptyDirectories(outputDirectories); // to avoid too much files deleted events CompilerUtil.doRefresh(new Runnable() { public void run() { final VirtualFile[] outputDirectories = CompilerPathsEx.getOutputDirectories(ModuleManager.getInstance(myProject).getModules()); for (final VirtualFile outputDirectory : outputDirectories) { outputDirectory.refresh(false, true); } } }); } clearCompilerSystemDirectory(context); } finally { context.getProgressIndicator().popState(); } } private static void pruneEmptyDirectories(final Set<File> directories) { for (File directory : directories) { doPrune(directory); } } private static boolean doPrune(final File directory) { final File[] files = directory.listFiles(); boolean isEmpty = true; for (File file : files) { if (file.isDirectory()) { if (doPrune(file)) { file.delete(); } else { isEmpty = false; } } else { isEmpty = false; } } return isEmpty; } private Set<File> getAllOutputDirectories() { final Set<File> outputDirs = new THashSet<File>(); ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { final VirtualFile[] outputDirectories = CompilerPathsEx.getOutputDirectories(ModuleManager.getInstance(myProject).getModules()); for (final VirtualFile outputDirectory : outputDirectories) { final File directory = VfsUtil.virtualToIoFile(outputDirectory); outputDirs.add(directory); } } }); return outputDirs; } private static void clearOutputDirectories(final Set<File> outputDirectories) { // do not delete directories themselves, or we'll get rootsChanged() otherwise Collection<File> filesToDelete = new ArrayList<File>(outputDirectories.size()*2); for (File outputDirectory : outputDirectories) { File[] files = outputDirectory.listFiles(); if (files != null) { filesToDelete.addAll(Arrays.asList(files)); } } FileUtil.asyncDelete(filesToDelete); // ensure output directories exist, create and refresh if not exist final List<File> createdFiles = new ArrayList<File>(outputDirectories.size()); for (final File file : outputDirectories) { if (file.mkdirs()) { createdFiles.add(file); } } CompilerUtil.refreshIOFiles(createdFiles); } private void clearCompilerSystemDirectory(final CompileContext context) { final File[] children = new File(myCachesDirectoryPath).listFiles(); if (children != null) { for (final File child : children) { final boolean deleteOk = FileUtil.delete(child); if (!deleteOk) { context.addMessage(CompilerMessageCategory.ERROR, CompilerBundle.message("compiler.error.failed.to.delete", child.getPath()), null, -1, -1); } } } ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { for (Pair<Compiler, Module> pair : myGenerationCompilerModuleToOutputDirMap.keySet()) { final VirtualFile dir = myGenerationCompilerModuleToOutputDirMap.get(pair); final File[] files = VfsUtil.virtualToIoFile(dir).listFiles(); if (files != null) { for (final File file : files) { final boolean deleteOk = FileUtil.delete(file); if (!deleteOk) { context.addMessage(CompilerMessageCategory.ERROR, CompilerBundle.message("compiler.error.failed.to.delete", file.getPath()), null, -1, -1); } } } } } }); } private VirtualFile getGenerationOutputDir(final GeneratingCompiler compiler, final Module module) { return myGenerationCompilerModuleToOutputDirMap.get(new Pair<Compiler, Module>(compiler, module)); } private static String getGenerationOutputPath(GeneratingCompiler compiler, Module module) { final String generatedCompilerDirectoryPath = CompilerPaths.getGeneratedDataDirectory(module.getProject(), compiler).getPath(); return generatedCompilerDirectoryPath.replace(File.separatorChar, '/') + "/" + (module.getName().replace(' ', '_') + "." + Integer.toHexString(module.getModuleFilePath().hashCode())); } private boolean generateOutput(final CompileContextImpl context, final GeneratingCompiler compiler, final boolean forceGenerate) { final GeneratingCompiler.GenerationItem[] allItems = compiler.getGenerationItems(context); final List<GeneratingCompiler.GenerationItem> toGenerate = new ArrayList<GeneratingCompiler.GenerationItem>(); final StateCache<ValidityState> cache = getGeneratingCompilerCache(compiler); final Set<String> pathsToRemove = new HashSet<String>(Arrays.asList(cache.getUrls())); final Map<GeneratingCompiler.GenerationItem, String> itemToOutputPathMap = new THashMap<GeneratingCompiler.GenerationItem, String>(); ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { for (final GeneratingCompiler.GenerationItem item : allItems) { final Module itemModule = item.getModule(); final String outputDirPath = getGenerationOutputPath(compiler, itemModule); final String outputPath = outputDirPath + "/" + item.getPath(); itemToOutputPathMap.put(item, outputPath); final ValidityState savedState = cache.getState(outputPath); if (forceGenerate || savedState == null || !savedState.equalsTo(item.getValidityState())) { toGenerate.add(item); } else { pathsToRemove.remove(outputPath); } } } }); final List<File> filesToRefresh = new ArrayList<File>(); final List<File> generatedFiles = new ArrayList<File>(); final List<Module> affectedModules = new ArrayList<Module>(); try { if (pathsToRemove.size() > 0) { context.getProgressIndicator().pushState(); context.getProgressIndicator().setText(CompilerBundle.message("progress.synchronizing.output.directory")); for (final String path : pathsToRemove) { final File file = new File(path); final boolean deleted = file.delete(); if (deleted) { cache.remove(path); filesToRefresh.add(file); } } context.getProgressIndicator().popState(); } Map<Module, Set<GeneratingCompiler.GenerationItem>> moduleToItemMap = buildModuleToGenerationItemMap(toGenerate.toArray(new GeneratingCompiler.GenerationItem[toGenerate.size()])); List<Module> modules = new ArrayList<Module>(moduleToItemMap.size()); for (final Module module : moduleToItemMap.keySet()) { modules.add(module); } ModuleCompilerUtil.sortModules(myProject, modules); for (final Module module : modules) { context.getProgressIndicator().pushState(); try { final Set<GeneratingCompiler.GenerationItem> items = moduleToItemMap.get(module); if (items != null && items.size() > 0) { final VirtualFile outputDir = getGenerationOutputDir(compiler, module); final GeneratingCompiler.GenerationItem[] successfullyGenerated = compiler.generate(context, items.toArray(new GeneratingCompiler.GenerationItem[items.size()]), outputDir); context.getProgressIndicator().setText(CompilerBundle.message("progress.updating.caches")); if (successfullyGenerated.length > 0) { affectedModules.add(module); } for (final GeneratingCompiler.GenerationItem item : successfullyGenerated) { final String fullOutputPath = itemToOutputPathMap.get(item); cache.update(fullOutputPath, item.getValidityState()); final File file = new File(fullOutputPath); filesToRefresh.add(file); generatedFiles.add(file); } } } finally { context.getProgressIndicator().popState(); } } } finally { context.getProgressIndicator().pushState(); CompilerUtil.refreshIOFiles(filesToRefresh); if (forceGenerate && generatedFiles.size() > 0) { ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { List<VirtualFile> vFiles = new ArrayList<VirtualFile>(generatedFiles.size()); for (File generatedFile : generatedFiles) { final VirtualFile vFile = LocalFileSystem.getInstance().findFileByIoFile(generatedFile); if (vFile != null) { vFiles.add(vFile); } } final FileSetCompileScope additionalScope = new FileSetCompileScope( vFiles.toArray(new VirtualFile[vFiles.size()]), affectedModules.toArray(new Module[affectedModules.size()]) ); context.addScope(additionalScope); } }); } if (cache.isDirty()) { context.getProgressIndicator().setText(CompilerBundle.message("progress.saving.caches")); cache.save(); } context.getProgressIndicator().popState(); } return toGenerate.size() > 0 || filesToRefresh.size() > 0; } private boolean compileSources(final CompileContextImpl context, final VfsSnapshot snapshot, final TranslatingCompiler compiler, final boolean forceCompile, final boolean isRebuild, final boolean trackDependencies, final Set<File> outputDirectories) { final TranslatingCompilerStateCache cache = getTranslatingCompilerCache(compiler); final CompilerConfiguration compilerConfiguration = CompilerConfiguration.getInstance(myProject); context.getProgressIndicator().pushState(); final boolean[] wereFilesDeleted = new boolean[]{false}; final Set<VirtualFile> toCompile = new HashSet<VirtualFile>(); try { final Set<String> toDelete = new HashSet<String>(); final Set<String> urlsWithSourceRemoved = new HashSet<String>(); ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { findOutOfDateFiles(compiler, snapshot, forceCompile, cache, toCompile, context); if (trackDependencies && toCompile.size() > 0) { // should add dependent files final FileTypeManager fileTypeManager = FileTypeManager.getInstance(); final PsiManager psiManager = PsiManager.getInstance(myProject); final VirtualFile[] filesToCompile = toCompile.toArray(new VirtualFile[toCompile.size()]); Set<String> sourcesWithOutputRemoved = getSourcesWithOutputRemoved(cache); for (final VirtualFile file : filesToCompile) { if (fileTypeManager.getFileTypeByFile(file) == StdFileTypes.JAVA) { final PsiFile psiFile = psiManager.findFile(file); if (psiFile != null) { addDependentFiles(psiFile, toCompile, cache, snapshot, sourcesWithOutputRemoved, compiler, context); } } } } if (!isRebuild) { final ProgressIndicator progressIndicator = context.getProgressIndicator(); progressIndicator.pushState(); progressIndicator.setText(CompilerBundle.message("progress.searching.for.files.to.delete")); findFilesToDelete(snapshot, urlsWithSourceRemoved, cache, toCompile, context, toDelete, compilerConfiguration); progressIndicator.popState(); } } }); if (toDelete.size() > 0) { try { wereFilesDeleted[0] = syncOutputDir(urlsWithSourceRemoved, context, toDelete, cache, outputDirectories); } catch (CacheCorruptedException e) { LOG.info(e); context.requestRebuildNextTime(e.getMessage()); } } if (wereFilesDeleted[0] && toDelete.size() > 0) { CompilerUtil.refreshPaths(toDelete.toArray(new String[toDelete.size()])); } if ((wereFilesDeleted[0] || toCompile.size() > 0) && context.getMessageCount(CompilerMessageCategory.ERROR) == 0) { final TranslatingCompiler.ExitStatus exitStatus = compiler.compile(context, toCompile.toArray(new VirtualFile[toCompile.size()])); updateInternalCaches(cache, context, exitStatus.getSuccessfullyCompiled(), exitStatus.getFilesToRecompile()); } } finally { if (cache.isDirty()) { context.getProgressIndicator().setText(CompilerBundle.message("progress.saving.caches")); if (cache.isDirty()) { if (LOG.isDebugEnabled()) { LOG.debug("--Saving translating cache for compiler " + compiler.getDescription()); } cache.save(); if (LOG.isDebugEnabled()) { LOG.debug("--Done"); } } } context.getProgressIndicator().popState(); } return toCompile.size() > 0 || wereFilesDeleted[0]; } private Set<String> getSourcesWithOutputRemoved(TranslatingCompilerStateCache cache) { //final String[] outputUrls = cache.getOutputUrls(); final Set<String> set = new HashSet<String>(); for (Iterator<String> it = cache.getOutputUrlsIterator(); it.hasNext();) { String outputUrl = it.next(); if (!myOutputFilesOnDisk.contains(outputUrl)) { set.add(cache.getSourceUrl(outputUrl)); } } return set; } private void findFilesToDelete(VfsSnapshot snapshot, final Set<String> urlsWithSourceRemoved, final TranslatingCompilerStateCache cache, final Set<VirtualFile> toCompile, final CompileContextImpl context, final Set<String> toDelete, final CompilerConfiguration compilerConfiguration) { final List<String> toRemove = new ArrayList<String>(); final CompileScope scope = context.getCompileScope(); for (Iterator<String> it = cache.getOutputUrlsIterator(); it.hasNext();) { final String outputPath = it.next(); final String sourceUrl = cache.getSourceUrl(outputPath); final VirtualFile sourceFile = snapshot.getFileByUrl(sourceUrl); boolean needRecompile = false; boolean shouldDelete; if (myOutputFilesOnDisk.contains(outputPath)) { if (sourceFile == null) { shouldDelete = scope.belongs(sourceUrl); } else { if (toCompile.contains(sourceFile)) { // some crazy users store their resources (which is source file for us) directly in the output dir // we should not delete files which are both output and source files shouldDelete = !FileUtil.pathsEqual(outputPath, VirtualFileManager.extractPath(sourceUrl)); } else { final String currentOutputDir = getModuleOutputDirForFile(context, sourceFile); if (currentOutputDir != null) { final String className = cache.getClassName(outputPath); //noinspection HardCodedStringLiteral if (className == null || isUnderOutputDir(currentOutputDir, outputPath, className)) { shouldDelete = false; } else { // output for this source has been changed or the output dir was changed, need to recompile to the new output dir shouldDelete = true; needRecompile = true; } } else { shouldDelete = true; } } } } else { // output for this source has been deleted or the output dir was changed, need to recompile needRecompile = true; shouldDelete = true; // in case the output dir was changed, should delete from the previous location } if (shouldDelete) { toDelete.add(outputPath); } if (needRecompile) { if (sourceFile != null && scope.belongs(sourceUrl)) { if (!compilerConfiguration.isExcludedFromCompilation(sourceFile)) { toCompile.add(sourceFile); toRemove.add(outputPath); } } } if (sourceFile == null) { urlsWithSourceRemoved.add(outputPath); } } for (final String aToRemove : toRemove) { cache.remove(aToRemove); } } private static boolean isUnderOutputDir(final String outputDir, final String outputPath, final String className) { final int outputRootLen = outputPath.length() - className.length() - ".class".length() - 1; return (outputDir.length() == outputRootLen) && outputDir.regionMatches(!SystemInfo.isFileSystemCaseSensitive, 0, outputPath, 0, outputRootLen); } private static void updateInternalCaches(final TranslatingCompilerStateCache cache, final CompileContextImpl context, final TranslatingCompiler.OutputItem[] successfullyCompiled, final VirtualFile[] filesToRecompile) { ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { context.getProgressIndicator().setText(CompilerBundle.message("progress.updating.caches")); final FileTypeManager typeManager = FileTypeManager.getInstance(); if (LOG.isDebugEnabled()) { LOG.debug("Updating internal caches: successfully compiled " + successfullyCompiled.length + " files; toRecompile: " + filesToRecompile.length + " files"); } for (final TranslatingCompiler.OutputItem item : successfullyCompiled) { final String outputPath = item.getOutputPath(); final VirtualFile sourceFile = item.getSourceFile(); final String className; if (outputPath != null && StdFileTypes.JAVA.equals(typeManager.getFileTypeByFile(sourceFile))) { final String outputDir = item.getOutputRootDirectory(); if (outputDir != null) { if (!FileUtil.startsWith(outputPath, outputDir)) { LOG.error(outputPath + " does not start with " + outputDir); } className = MakeUtil.relativeClassPathToQName(outputPath.substring(outputDir.length(), outputPath.length()), '/'); } else { // outputDir might be null for package-info.java (package annotation) className = null; } } else { className = null; } if (LOG.isDebugEnabled()) { LOG.debug("Putting: [outputPath, className, sourceFile] = [" + outputPath + ";" + className + ";" + sourceFile.getPresentableUrl() + "]"); } cache.update(outputPath, className, sourceFile); } for (VirtualFile aFilesToRecompile : filesToRecompile) { cache.markAsModified(aFilesToRecompile); } } }); } private static boolean syncOutputDir(final Set<String> urlsWithSourceRemoved, final CompileContextImpl context, final Set<String> toDelete, final TranslatingCompilerStateCache cache, final Set<File> outputDirectories) throws CacheCorruptedException { DeleteHelper deleteHelper = new DeleteHelper(outputDirectories); int total = toDelete.size(); final DependencyCache dependencyCache = context.getDependencyCache(); final boolean isTestMode = ApplicationManager.getApplication().isUnitTestMode(); context.getProgressIndicator().pushState(); try { context.getProgressIndicator().setText(CompilerBundle.message("progress.synchronizing.output.directory")); int current = 0; boolean wereFilesDeleted = false; for (final String outputPath : toDelete) { context.getProgressIndicator().setFraction(((double)(++current)) / total); if (deleteHelper.delete(outputPath)) { wereFilesDeleted = true; String qName = cache.getClassName(outputPath); if (qName != null) { final int id = dependencyCache.getSymbolTable().getId(qName); dependencyCache.addTraverseRoot(id); if (urlsWithSourceRemoved.contains(outputPath)) { dependencyCache.markSourceRemoved(id); } } if (isTestMode) { CompilerManagerImpl.addDeletedPath(outputPath); } cache.remove(outputPath); } } return wereFilesDeleted; } finally { deleteHelper.finish(); context.getProgressIndicator().popState(); } } private void findOutOfDateFiles(final TranslatingCompiler compiler, final VfsSnapshot snapshot, final boolean forceCompile, final TranslatingCompilerStateCache cache, final Set<VirtualFile> toCompile, final CompileContext context) { final CompilerConfiguration compilerConfiguration = CompilerConfiguration.getInstance(myProject); snapshot.forEachUrl(new TObjectProcedure<String>() { public boolean execute(final String url) { final VirtualFile file = snapshot.getFileByUrl(url); if (compiler.isCompilableFile(file, context)) { if (!forceCompile && compilerConfiguration.isExcludedFromCompilation(file)) { return true; } if (forceCompile || file.getTimeStamp() != cache.getSourceTimestamp(url)) { if (LOG.isDebugEnabled()) { LOG.debug("File is out-of-date: " + url + "; current timestamp = " + file.getTimeStamp() + "; stored timestamp = " + cache.getSourceTimestamp(url)); } toCompile.add(file); } } return true; } }); } private void addDependentFiles(final PsiFile psiFile, Set<VirtualFile> toCompile, final TranslatingCompilerStateCache cache, VfsSnapshot snapshot, Set<String> sourcesWithOutputRemoved, TranslatingCompiler compiler, CompileContextImpl context) { final DependenciesBuilder builder = new ForwardDependenciesBuilder(myProject, new AnalysisScope(psiFile)); builder.analyze(); final Map<PsiFile, Set<PsiFile>> dependencies = builder.getDependencies(); final Set<PsiFile> dependentFiles = dependencies.get(psiFile); if (dependentFiles != null && dependentFiles.size() > 0) { for (final PsiFile dependentFile : dependentFiles) { if (dependentFile instanceof PsiCompiledElement) { continue; } final VirtualFile vFile = dependentFile.getVirtualFile(); if (vFile == null || toCompile.contains(vFile)) { continue; } String url = snapshot.getUrlByFile(vFile); if (url == null) { // the file does not belong to this snapshot url = vFile.getUrl(); } if (!sourcesWithOutputRemoved.contains(url)) { if (vFile.getTimeStamp() == cache.getSourceTimestamp(url)) { continue; } } if (!compiler.isCompilableFile(vFile, context)) { continue; } toCompile.add(vFile); addDependentFiles(dependentFile, toCompile, cache, snapshot, sourcesWithOutputRemoved, compiler, context); } } } private String getModuleOutputDirForFile(CompileContext context, VirtualFile file) { final Module module = context.getModuleByFile(file); if (module == null) { return null; // looks like file invalidated } final ProjectFileIndex fileIndex = myProjectRootManager.getFileIndex(); return getModuleOutputPath(module, fileIndex.isInTestSourceContent(file)); } // [mike] performance optimization - this method is accessed > 15,000 times in Aurora private String getModuleOutputPath(final Module module, boolean inTestSourceContent) { final Map<Module, String> map = inTestSourceContent? myModuleTestOutputPaths : myModuleOutputPaths; String path = map.get(module); if (path == null) { path = CompilerPaths.getModuleOutputPath(module, inTestSourceContent); /* if (!path.endsWith("/")) { path = path + "/"; } */ map.put(module, path); } return path; } private boolean processFiles(final FileProcessingCompilerAdapter adapter, final boolean forceCompile, final boolean checkScope) { final CompileContext context = adapter.getCompileContext(); final FileProcessingCompilerStateCache cache = getFileProcessingCompilerCache(adapter.getCompiler()); final FileProcessingCompiler.ProcessingItem[] items = adapter.getProcessingItems(); if (context.getMessageCount(CompilerMessageCategory.ERROR) > 0) { return false; } final CompileScope scope = context.getCompileScope(); final List<FileProcessingCompiler.ProcessingItem> toProcess = new ArrayList<FileProcessingCompiler.ProcessingItem>(); final Set<String> allUrls = new HashSet<String>(); ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { for (FileProcessingCompiler.ProcessingItem item : items) { final VirtualFile file = item.getFile(); final String url = file.getUrl(); allUrls.add(url); if (!forceCompile && cache.getTimestamp(url) == file.getTimeStamp()) { final ValidityState state = cache.getExtState(url); final ValidityState itemState = item.getValidityState(); if (state != null ? state.equalsTo(itemState) : itemState == null) { continue; } } toProcess.add(item); } } }); final String[] urls = cache.getUrls(); if (urls.length > 0) { context.getProgressIndicator().pushState(); context.getProgressIndicator().setText(CompilerBundle.message("progress.processing.outdated.files")); final List<String> urlsToRemove = new ArrayList<String>(); ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { for (final String url : urls) { if (!allUrls.contains(url)) { if (!checkScope || scope.belongs(url)) { urlsToRemove.add(url); } } } } }); if (urlsToRemove.size() > 0) { for (final String url : urlsToRemove) { adapter.processOutdatedItem(context, url, cache.getExtState(url)); cache.remove(url); } } context.getProgressIndicator().popState(); } if (toProcess.size() == 0) { return false; } context.getProgressIndicator().pushState(); final FileProcessingCompiler.ProcessingItem[] processed = adapter.process(toProcess.toArray(new FileProcessingCompiler.ProcessingItem[toProcess.size()])); context.getProgressIndicator().popState(); if (processed.length > 0) { context.getProgressIndicator().pushState(); context.getProgressIndicator().setText(CompilerBundle.message("progress.updating.caches")); try { List<VirtualFile> vFiles = new ArrayList<VirtualFile>(processed.length); for (int idx = 0; idx < processed.length; idx++) { vFiles.add(processed[idx].getFile()); } CompilerUtil.refreshVirtualFiles(vFiles); ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { for (FileProcessingCompiler.ProcessingItem item : processed) { cache.update(item.getFile(), item.getValidityState()); } } }); } finally { if (cache.isDirty()) { context.getProgressIndicator().setText(CompilerBundle.message("progress.saving.caches")); cache.save(); } context.getProgressIndicator().popState(); } } return true; } public TranslatingCompilerStateCache getTranslatingCompilerCache(TranslatingCompiler compiler) { Object cache = myCompilerToCacheMap.get(compiler); if (cache == null) { cache = new TranslatingCompilerStateCache(myCachesDirectoryPath, getIdPrefix(compiler), myStringInterner); myCompilerToCacheMap.put(compiler, cache); } else { LOG.assertTrue(cache instanceof TranslatingCompilerStateCache); } return (TranslatingCompilerStateCache)cache; } private FileProcessingCompilerStateCache getFileProcessingCompilerCache(FileProcessingCompiler compiler) { Object cache = myCompilerToCacheMap.get(compiler); if (cache == null) { cache = new FileProcessingCompilerStateCache(myCachesDirectoryPath, getIdPrefix(compiler), compiler, myStringInterner); myCompilerToCacheMap.put(compiler, cache); } else { LOG.assertTrue(cache instanceof FileProcessingCompilerStateCache); } return (FileProcessingCompilerStateCache)cache; } private StateCache<ValidityState> getGeneratingCompilerCache(final GeneratingCompiler compiler) { Object cache = myCompilerToCacheMap.get(compiler); if (cache == null) { cache = new StateCache<ValidityState>(myCachesDirectoryPath + File.separator + getIdPrefix(compiler) + "_timestamp.dat", myStringInterner) { public ValidityState read(DataInputStream stream) throws IOException { return compiler.createValidityState(stream); } public void write(ValidityState validityState, DataOutputStream stream) throws IOException { validityState.save(stream); } }; myCompilerToCacheMap.put(compiler, cache); } return (StateCache<ValidityState>)cache; } private void dropInternalCache(Compiler compiler) { myCompilerToCacheMap.remove(compiler); } private static String getIdPrefix(Compiler compiler) { @NonNls String description = compiler.getDescription(); return description.replaceAll("\\s+", "_").toLowerCase(); } public void executeCompileTask(final CompileTask task, final CompileScope scope, final String contentName, final Runnable onTaskFinished) { final CompilerProgressIndicator indicator = new CompilerProgressIndicator( myProject, CompilerWorkspaceConfiguration.getInstance(myProject).COMPILE_IN_BACKGROUND, contentName); final CompileContextImpl compileContext = new CompileContextImpl(myProject, indicator, scope, null, this, false); FileDocumentManager.getInstance().saveAllDocuments(); //noinspection HardCodedStringLiteral new Thread("Compile Task Thread") { public void run() { synchronized (CompilerManager.getInstance(myProject)) { ProgressManager.getInstance().runProcess(new Runnable() { public void run() { try { task.execute(compileContext); } catch (ProcessCanceledException ex) { // suppressed } finally { if (onTaskFinished != null) { onTaskFinished.run(); } } } }, compileContext.getProgressIndicator()); } } }.start(); } private boolean executeCompileTasks(CompileContext context, boolean beforeTasks) { final CompilerManager manager = CompilerManager.getInstance(myProject); final ProgressIndicator progressIndicator = context.getProgressIndicator(); try { CompileTask[] tasks = beforeTasks ? manager.getBeforeTasks() : manager.getAfterTasks(); if (tasks.length > 0) { progressIndicator.setText( beforeTasks ? CompilerBundle.message("progress.executing.precompile.tasks") : CompilerBundle.message("progress.executing.postcompile.tasks") ); for (CompileTask task : tasks) { if (!task.execute(context)) { return false; } } } } finally { WindowManager.getInstance().getStatusBar(myProject).setInfo(""); if (progressIndicator instanceof CompilerProgressIndicator) { ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { ((CompilerProgressIndicator)progressIndicator).showCompilerContent(); } }); } } return true; } // todo: add validation for module chunks: all modules that form a chunk must have the same JDK private boolean validateCompilerConfiguration(final CompileScope scope, boolean checkOutputAndSourceIntersection) { final Module[] scopeModules = scope.getAffectedModules()/*ModuleManager.getInstance(myProject).getModules()*/; final List<String> modulesWithoutOutputPathSpecified = new ArrayList<String>(); final List<String> modulesWithoutJdkAssigned = new ArrayList<String>(); final Set<File> nonExistingOutputPaths = new HashSet<File>(); for (final Module module : scopeModules) { if (ModuleType.J2EE_APPLICATION.equals(module.getModuleType())) { continue; // makes no sence to demand jdk & output paths for such modules } final boolean hasSources = hasSources(module, false); final boolean hasTestSources = hasSources(module, true); if (!hasSources && !hasTestSources) { // If module contains no sources, shouldn't have to select JDK or output directory (SCR #19333) // todo still there may be problems with this approach if some generated files are attributed by this module continue; } final ProjectJdk jdk = ModuleRootManager.getInstance(module).getJdk(); if (jdk == null) { modulesWithoutJdkAssigned.add(module.getName()); } final String outputPath = getModuleOutputPath(module, false); final String testsOutputPath = getModuleOutputPath(module, true); if (outputPath == null && testsOutputPath == null) { modulesWithoutOutputPathSpecified.add(module.getName()); } else { if (outputPath != null) { final File file = new File(outputPath.replace('/', File.separatorChar)); if (!file.exists()) { nonExistingOutputPaths.add(file); } } else { if (hasSources) { modulesWithoutOutputPathSpecified.add(module.getName()); } } if (testsOutputPath != null) { final File f = new File(testsOutputPath.replace('/', File.separatorChar)); if (!f.exists()) { nonExistingOutputPaths.add(f); } } else { if (hasTestSources) { modulesWithoutOutputPathSpecified.add(module.getName()); } } } } if (modulesWithoutJdkAssigned.size() > 0) { showNotSpecifiedError("error.jdk.not.specified", modulesWithoutJdkAssigned, ClasspathEditor.NAME); return false; } if (modulesWithoutOutputPathSpecified.size() > 0) { showNotSpecifiedError("error.output.not.specified", modulesWithoutOutputPathSpecified, ContentEntriesEditor.NAME); return false; } if (nonExistingOutputPaths.size() > 0) { for (File file : nonExistingOutputPaths) { final boolean succeeded = file.mkdirs(); if (!succeeded) { Messages.showMessageDialog(myProject, CompilerBundle.message("error.failed.to.create.directory", file.getPath()), CommonBundle.getErrorTitle(), Messages.getErrorIcon()); return false; } } final Boolean refreshSuccess = ApplicationManager.getApplication().runWriteAction(new Computable<Boolean>() { public Boolean compute() { LocalFileSystem.getInstance().refreshIoFiles(nonExistingOutputPaths); for (File file : nonExistingOutputPaths) { if (LocalFileSystem.getInstance().findFileByIoFile(file) == null) { return Boolean.FALSE; } } return Boolean.TRUE; } }); if (!refreshSuccess.booleanValue()) { return false; } } if (checkOutputAndSourceIntersection) { if (myShouldClearOutputDirectory) { if (!validateOutputAndSourcePathsIntersection()) { return false; } } } final List<Chunk<Module>> chunks = ModuleCompilerUtil.getSortedModuleChunks(myProject, scopeModules); for (final Chunk<Module> chunk : chunks) { final Set<Module> chunkModules = chunk.getNodes(); if (chunkModules.size() <= 1) { continue; // no need to check one-module chunks } ProjectJdk jdk = null; LanguageLevel languageLevel = null; for (final Module module : chunkModules) { final ProjectJdk moduleJdk = ModuleRootManager.getInstance(module).getJdk(); if (jdk == null) { jdk = moduleJdk; } else { if (!jdk.equals(moduleJdk)) { showCyclicModulesHaveDifferentJdksError(chunkModules.toArray(new Module[chunkModules.size()])); return false; } } LanguageLevel moduleLanguageLevel = module.getEffectiveLanguageLevel(); if (languageLevel == null) { languageLevel = moduleLanguageLevel; } else { if (!languageLevel.equals(moduleLanguageLevel)) { showCyclicModulesHaveDifferentLanguageLevel(chunkModules.toArray(new Module[chunkModules.size()])); return false; } } } } final Compiler[] allCompilers = CompilerManager.getInstance(myProject).getCompilers(Compiler.class); for (Compiler compiler : allCompilers) { if (!compiler.validateConfiguration(scope)) { return false; } } return J2EEModuleUtilEx.checkDependentModulesOutputPathConsistency(myProject, scopeModules, true); } private void showCyclicModulesHaveDifferentLanguageLevel(Module[] modulesInChunk) { LOG.assertTrue(modulesInChunk.length > 0); String moduleNameToSelect = modulesInChunk[0].getName(); final StringBuffer moduleNames = getModulesString(modulesInChunk); Messages.showMessageDialog(myProject, CompilerBundle.message("error.chunk.modules.must.have.same.language.level", moduleNames.toString()), CommonBundle.getErrorTitle(), Messages.getErrorIcon()); showConfigurationDialog(moduleNameToSelect, null); } private void showCyclicModulesHaveDifferentJdksError(Module[] modulesInChunk) { LOG.assertTrue(modulesInChunk.length > 0); String moduleNameToSelect = modulesInChunk[0].getName(); final StringBuffer moduleNames = getModulesString(modulesInChunk); Messages.showMessageDialog(myProject, CompilerBundle.message("error.chunk.modules.must.have.same.jdk", moduleNames.toString()), CommonBundle.getErrorTitle(), Messages.getErrorIcon()); showConfigurationDialog(moduleNameToSelect, null); } private static StringBuffer getModulesString(Module[] modulesInChunk) { final StringBuffer moduleNames = new StringBuffer(); for (Module module : modulesInChunk) { if (moduleNames.length() > 0) { moduleNames.append("\n"); } moduleNames.append("\"").append(module.getName()).append("\""); } return moduleNames; } private static boolean hasSources(Module module, boolean checkTestSources) { final ContentEntry[] contentEntries = ModuleRootManager.getInstance(module).getContentEntries(); for (final ContentEntry contentEntry : contentEntries) { final SourceFolder[] sourceFolders = contentEntry.getSourceFolders(); for (final SourceFolder sourceFolder : sourceFolders) { if (sourceFolder.getFile() == null) { continue; // skip invalid source folders } if (checkTestSources) { if (sourceFolder.isTestSource()) { return true; } } else { if (!sourceFolder.isTestSource()) { return true; } } } } return false; } private void showNotSpecifiedError(final @NonNls String resourceId, List<String> modules, String tabNameToSelect) { final StringBuffer names = new StringBuffer(); String nameToSelect = null; final int maxModulesToShow = 10; for (String name : modules.size() > maxModulesToShow ? modules.subList(0, maxModulesToShow) : modules) { if (nameToSelect == null) { nameToSelect = name; } if (names.length() > 0) { names.append(",\n"); } names.append("\""); names.append(name); names.append("\""); } if (modules.size() > maxModulesToShow) { names.append(",\n..."); } final String message = CompilerBundle.message(resourceId, modules.size(), names); if(ApplicationManager.getApplication().isUnitTestMode()) { LOG.error(message); } Messages.showMessageDialog(myProject, message, CommonBundle.getErrorTitle(), Messages.getErrorIcon()); showConfigurationDialog(nameToSelect, tabNameToSelect); } private boolean validateOutputAndSourcePathsIntersection() { final Module[] allModules = ModuleManager.getInstance(myProject).getModules(); final VirtualFile[] outputPaths = CompilerPathsEx.getOutputDirectories(allModules); final Set<VirtualFile> affectedOutputPaths = new HashSet<VirtualFile>(); for (Module allModule : allModules) { final ModuleRootManager rootManager = ModuleRootManager.getInstance(allModule); final VirtualFile[] sourceRoots = rootManager.getSourceRoots(); for (final VirtualFile outputPath : outputPaths) { for (VirtualFile sourceRoot : sourceRoots) { if (VfsUtil.isAncestor(outputPath, sourceRoot, true) || VfsUtil.isAncestor(sourceRoot, outputPath, false)) { affectedOutputPaths.add(outputPath); } } } } if (affectedOutputPaths.size() > 0) { final StringBuffer paths = new StringBuffer(); for (final VirtualFile affectedOutputPath : affectedOutputPaths) { if (paths.length() < 0) { paths.append("\n"); } paths.append(affectedOutputPath.getPath().replace('/', File.separatorChar)); } final int answer = Messages.showOkCancelDialog(myProject, CompilerBundle.message("warning.sources.under.output.paths", paths.toString()), CommonBundle.getErrorTitle(), Messages.getWarningIcon()); if (answer == 0) { myShouldClearOutputDirectory = false; return true; } else { return false; } } return true; } private void showConfigurationDialog(String moduleNameToSelect, String tabNameToSelect) { ModulesConfigurator.showDialog(myProject, moduleNameToSelect, tabNameToSelect, false); } }
package biomodel.gui.comp; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import org.sbml.jsbml.ext.comp.CompModelPlugin; import org.sbml.jsbml.ext.comp.CompSBasePlugin; import org.sbml.jsbml.ext.comp.Deletion; import org.sbml.jsbml.ext.comp.ReplacedBy; import org.sbml.jsbml.ext.comp.ReplacedElement; import org.sbml.jsbml.ext.comp.CompConstants; import org.sbml.jsbml.SBase; import org.sbml.jsbml.ext.comp.Submodel; import biomodel.annotation.AnnotationUtility; import biomodel.annotation.SBOLAnnotation; import biomodel.gui.sbol.SBOLField; import biomodel.gui.schematic.ModelEditor; import biomodel.gui.util.PropertyField; import biomodel.gui.util.PropertyList; import biomodel.parser.BioModel; import biomodel.util.GlobalConstants; import biomodel.util.SBMLutilities; import biomodel.util.Utility; import main.Gui; public class ComponentsPanel extends JPanel implements ActionListener { private static final long serialVersionUID = 1L; private String selected = ""; private String[] options = { "Ok", "Cancel" }; private ArrayList<String> portIds = null; private ArrayList<String> idRefs = null; private ArrayList<String> types = null; private ArrayList<JComboBox> portmapBox = null; private ArrayList<JComboBox> directionBox = null; private ArrayList<JComboBox> convBox = null; private BioModel bioModel = null; private PropertyList componentsList = null; private HashMap<String, PropertyField> fields = null; private SBOLField sbolField; private String selectedComponent, oldPort; private ModelEditor modelEditor; private String subModelId; private JComboBox timeConvFactorBox; private JComboBox extentConvFactorBox; private boolean paramsOnly; private BioModel subBioModel; public ComponentsPanel(String selected, PropertyList componentsList, BioModel bioModel, BioModel subBioModel, ArrayList<String> ports, String selectedComponent, String oldPort, boolean paramsOnly, ModelEditor gcmEditor) { super(new GridLayout(ports.size() + 6, 1)); this.selected = selected; this.componentsList = componentsList; this.bioModel = bioModel; this.modelEditor = gcmEditor; this.selectedComponent = selectedComponent; this.oldPort = oldPort; this.paramsOnly = paramsOnly; this.subBioModel = subBioModel; //this.setPreferredSize(new Dimension(800, 600)); fields = new HashMap<String, PropertyField>(); portIds = new ArrayList<String>(); idRefs = new ArrayList<String>(); types = new ArrayList<String>(); portmapBox = new ArrayList<JComboBox>(); directionBox = new ArrayList<JComboBox>(); convBox = new ArrayList<JComboBox>(); String[] directions = new String[2]; directions[0] = "< directions[1] = " if (bioModel.isGridEnabled()) { subModelId = "GRID__" + selectedComponent.replace(".xml",""); } else { subModelId = selected; } ArrayList <String> constParameterList = bioModel.getConstantUserParameters(); Collections.sort(constParameterList); String[] parameters = new String[constParameterList.size()+1]; parameters[0] = "(none)"; for (int l = 1; l < parameters.length; l++) { parameters[l] = constParameterList.get(l-1); } timeConvFactorBox = new JComboBox(parameters); extentConvFactorBox = new JComboBox(parameters); ArrayList <String> compartmentList = bioModel.getCompartments(); Collections.sort(compartmentList); String[] compsWithNone = new String[compartmentList.size() + 2]; compsWithNone[0] = "--none compsWithNone[1] = "--delete for (int l = 2; l < compsWithNone.length; l++) { compsWithNone[l] = compartmentList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.COMPARTMENT)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(compsWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convBox.add(convFactor); } } ArrayList <String> parameterList = bioModel.getParameters(); Collections.sort(parameterList); String[] paramsWithNone = new String[parameterList.size() + 2]; paramsWithNone[0] = "--none paramsWithNone[1] = "--delete for (int l = 2; l < paramsWithNone.length; l++) { paramsWithNone[l] = parameterList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.PARAMETER) || type.equals(GlobalConstants.LOCALPARAMETER)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(paramsWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convBox.add(convFactor); } } ArrayList <String> booleanList = bioModel.getBooleans(); Collections.sort(booleanList); String[] boolsWithNone = new String[booleanList.size() + 2]; boolsWithNone[0] = "--none boolsWithNone[1] = "--delete for (int l = 2; l < boolsWithNone.length; l++) { boolsWithNone[l] = booleanList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.BOOLEAN)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(boolsWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convBox.add(convFactor); } } ArrayList <String> placeList = bioModel.getPlaces(); Collections.sort(placeList); String[] placesWithNone = new String[placeList.size() + 2]; placesWithNone[0] = "--none placesWithNone[1] = "--delete for (int l = 2; l < placesWithNone.length; l++) { placesWithNone[l] = placeList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.PLACE)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(placesWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convBox.add(convFactor); } } ArrayList <String> speciesList = bioModel.getSpecies(); Collections.sort(speciesList); String[] specsWithNone = new String[speciesList.size() + 2]; specsWithNone[0] = "--none specsWithNone[1] = "--delete for (int l = 2; l < specsWithNone.length; l++) { specsWithNone[l] = speciesList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.SBMLSPECIES)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(specsWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convBox.add(convFactor); } } ArrayList <String> promoterList = bioModel.getPromoters(); Collections.sort(promoterList); String[] promsWithNone = new String[promoterList.size() + 2]; promsWithNone[0] = "--none promsWithNone[1] = "--delete for (int l = 2; l < promsWithNone.length; l++) { promsWithNone[l] = promoterList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.PROMOTER)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(promsWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convBox.add(convFactor); } } ArrayList <String> reactionList = bioModel.getReactions(); Collections.sort(reactionList); String[] reactionsWithNone = new String[reactionList.size() + 2]; reactionsWithNone[0] = "--none reactionsWithNone[1] = "--delete for (int l = 2; l < reactionsWithNone.length; l++) { reactionsWithNone[l] = reactionList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.SBMLREACTION)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(reactionsWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convBox.add(convFactor); } } ArrayList <String> functionList = bioModel.getFunctions(); Collections.sort(functionList); String[] functionsWithNone = new String[functionList.size() + 2]; functionsWithNone[0] = "--none functionsWithNone[1] = "--delete for (int l = 2; l < functionsWithNone.length; l++) { functionsWithNone[l] = functionList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.FUNCTION)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(functionsWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convBox.add(convFactor); } } ArrayList <String> unitList = bioModel.getUnits(); Collections.sort(unitList); String[] unitsWithNone = new String[unitList.size() + 2]; unitsWithNone[0] = "--none unitsWithNone[1] = "--delete for (int l = 2; l < unitsWithNone.length; l++) { unitsWithNone[l] = unitList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.UNIT)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(unitsWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convBox.add(convFactor); } } ArrayList <String> ruleList = bioModel.getAlgebraicRules(); Collections.sort(ruleList); String[] rulesWithNone = new String[ruleList.size() + 2]; rulesWithNone[0] = "--none rulesWithNone[1] = "--delete for (int l = 2; l < rulesWithNone.length; l++) { rulesWithNone[l] = ruleList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.ALGEBRAIC_RULE)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(rulesWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convFactor.setEnabled(false); convBox.add(convFactor); } } ruleList = bioModel.getAssignmentRules(); Collections.sort(ruleList); rulesWithNone = new String[ruleList.size() + 2]; rulesWithNone[0] = "--none rulesWithNone[1] = "--delete for (int l = 2; l < rulesWithNone.length; l++) { rulesWithNone[l] = ruleList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.ASSIGNMENT_RULE)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(rulesWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convFactor.setEnabled(false); convBox.add(convFactor); } } ruleList = bioModel.getRateRules(); Collections.sort(ruleList); rulesWithNone = new String[ruleList.size() + 2]; rulesWithNone[0] = "--none rulesWithNone[1] = "--delete for (int l = 2; l < rulesWithNone.length; l++) { rulesWithNone[l] = ruleList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.RATE_RULE)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(rulesWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convFactor.setEnabled(false); convBox.add(convFactor); } } ArrayList <String> constraintList = bioModel.getConstraints(); Collections.sort(constraintList); String[] constraintsWithNone = new String[constraintList.size() + 2]; constraintsWithNone[0] = "--none constraintsWithNone[1] = "--delete for (int l = 2; l < constraintsWithNone.length; l++) { constraintsWithNone[l] = constraintList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.CONSTRAINT)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(constraintsWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convFactor.setEnabled(false); convBox.add(convFactor); } } ArrayList <String> eventList = bioModel.getEvents(); Collections.sort(eventList); String[] eventsWithNone = new String[eventList.size() + 2]; eventsWithNone[0] = "--none eventsWithNone[1] = "--delete for (int l = 2; l < eventsWithNone.length; l++) { eventsWithNone[l] = eventList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.EVENT)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(eventsWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convFactor.setEnabled(false); convBox.add(convFactor); } } ArrayList <String> transitionList = bioModel.getTransitions(); Collections.sort(transitionList); String[] transWithNone = new String[transitionList.size() + 2]; transWithNone[0] = "--none transWithNone[1] = "--delete for (int l = 2; l < transWithNone.length; l++) { transWithNone[l] = transitionList.get(l - 2); } for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (type.equals(GlobalConstants.TRANSITION)) { portIds.add(portId); idRefs.add(idRef); types.add(type); JComboBox port = new JComboBox(transWithNone); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convFactor.setEnabled(false); convBox.add(convFactor); } } String[] Choices = new String[2]; Choices[0] = "--include Choices[1] = "--delete for (int i = 0; i < ports.size(); i++) { String type = ports.get(i).split(":")[0]; String portId = ports.get(i).split(":")[1]; String idRef = ports.get(i).split(":")[2]; if (!type.equals(GlobalConstants.COMPARTMENT) && !type.equals(GlobalConstants.PARAMETER) && !type.equals(GlobalConstants.LOCALPARAMETER) && !type.equals(GlobalConstants.EVENT) && !type.equals(GlobalConstants.SBMLSPECIES) && !type.equals(GlobalConstants.SBMLREACTION) && !type.equals(GlobalConstants.FUNCTION) && !type.equals(GlobalConstants.UNIT) && !type.equals(GlobalConstants.ASSIGNMENT_RULE) && !type.equals(GlobalConstants.RATE_RULE) && !type.equals(GlobalConstants.ALGEBRAIC_RULE) && !type.equals(GlobalConstants.CONSTRAINT) && !type.equals(GlobalConstants.PROMOTER) && !type.equals(GlobalConstants.BOOLEAN) && !type.equals(GlobalConstants.PLACE) && !type.equals(GlobalConstants.TRANSITION)) { portIds.add(portId); idRefs.add(idRef.replace("init__","")); types.add(type); JComboBox port = new JComboBox(Choices); portmapBox.add(port); JComboBox dirport = new JComboBox(directions); dirport.setEnabled(false); directionBox.add(dirport); JComboBox convFactor = new JComboBox(parameters); convBox.add(convFactor); } } Submodel instance = bioModel.getSBMLCompModel().getListOfSubmodels().get(subModelId); // ID field PropertyField field = new PropertyField(GlobalConstants.ID, "", null, null, Utility.IDDimString, paramsOnly, "default", false); fields.put(GlobalConstants.ID, field); add(field); // Name field if (instance != null) { field = new PropertyField(GlobalConstants.NAME, instance.getName(), null, null, Utility.NAMEstring, paramsOnly, "default", false); } else { field = new PropertyField(GlobalConstants.NAME, "", null, null, Utility.NAMEstring, paramsOnly, "default", false); } fields.put(GlobalConstants.NAME, field); add(field); JLabel timeConvFactorLabel = new JLabel("Time Conversion Factor"); JLabel extentConvFactorLabel = new JLabel("Extent Conversion Factor"); JPanel timePanel = new JPanel(); timePanel.setLayout(new GridLayout(1, 2)); JPanel extentPanel = new JPanel(); extentPanel.setLayout(new GridLayout(1, 2)); timePanel.add(timeConvFactorLabel); timePanel.add(timeConvFactorBox); extentPanel.add(extentConvFactorLabel); extentPanel.add(extentConvFactorBox); if (instance != null && instance.isSetTimeConversionFactor()) { timeConvFactorBox.setSelectedItem(instance.getTimeConversionFactor()); } if (instance != null && instance.isSetExtentConversionFactor()) { extentConvFactorBox.setSelectedItem(instance.getExtentConversionFactor()); } add(timePanel); add(extentPanel); // Parse out SBOL annotations and add to SBOL field if(!paramsOnly) { // Field for annotating submodel with SBOL DNA components List<URI> sbolURIs = new LinkedList<URI>(); String sbolStrand = AnnotationUtility.parseSBOLAnnotation(instance, sbolURIs); sbolField = new SBOLField(sbolURIs, sbolStrand, GlobalConstants.SBOL_DNA_COMPONENT, gcmEditor, 2, false); add(sbolField); } // Port Map field JPanel headingPanel = new JPanel(); JLabel typeLabel = new JLabel("Type"); JLabel portLabel = new JLabel("Port"); JLabel dirLabel = new JLabel("Direction"); JLabel replLabel = new JLabel("Replacement"); JLabel convLabel = new JLabel("Conversion"); headingPanel.setLayout(new GridLayout(1, 5)); headingPanel.add(typeLabel); headingPanel.add(portLabel); headingPanel.add(dirLabel); headingPanel.add(replLabel); headingPanel.add(convLabel); if (portIds.size()>0) { add(headingPanel); } //add(new JLabel("Ports")); for (int i = 0; i < portIds.size(); i++) { JPanel tempPanel = new JPanel(); JLabel tempLabel = new JLabel(idRefs.get(i)); JLabel tempLabel2 = new JLabel(types.get(i)); tempPanel.setLayout(new GridLayout(1, 5)); tempPanel.add(tempLabel2); tempPanel.add(tempLabel); tempPanel.add(directionBox.get(i)); tempPanel.add(portmapBox.get(i)); tempPanel.add(convBox.get(i)); add(tempPanel); directionBox.get(i).addActionListener(this); portmapBox.get(i).addActionListener(this); } if (instance!=null) { for (int j = 0; j < instance.getListOfDeletions().size(); j++) { Deletion deletion = instance.getListOfDeletions().get(j); int l = portIds.indexOf(deletion.getPortRef()); if (l >= 0) { portmapBox.get(l).setSelectedItem("--delete } } } ArrayList<SBase> elements = SBMLutilities.getListOfAllElements(bioModel.getSBMLDocument().getModel()); for (int j = 0; j < elements.size(); j++) { SBase sbase = elements.get(j); CompSBasePlugin sbmlSBase = (CompSBasePlugin)sbase.getExtension(CompConstants.namespaceURI); if (sbmlSBase!=null) { if (sbase.getElementName().equals(GlobalConstants.ASSIGNMENT_RULE)|| sbase.getElementName().equals(GlobalConstants.RATE_RULE)|| sbase.getElementName().equals(GlobalConstants.ALGEBRAIC_RULE)|| sbase.getElementName().equals(GlobalConstants.CONSTRAINT)) { getPortMap(sbmlSBase,sbase.getMetaId()); } else { getPortMap(sbmlSBase,SBMLutilities.getId(sbase)); } } } updateComboBoxEnabling(); String oldName = null; if (selected != null) { oldName = selected; fields.get(GlobalConstants.ID).setValue(selected); } boolean display = false; while (!display) { display = openGui(oldName); } } private void updateComboBoxEnabling() { for (int i = 0; i < portmapBox.size(); i++) { if (portmapBox.get(i).getSelectedIndex()<2 && (directionBox.get(i).getSelectedIndex()!=0 || convBox.get(i).getSelectedIndex()!=0)) { directionBox.get(i).setSelectedIndex(0); directionBox.get(i).setEnabled(false); convBox.get(i).setSelectedIndex(0); convBox.get(i).setEnabled(false); } else if (portmapBox.get(i).getSelectedIndex()<2) { directionBox.get(i).setEnabled(false); convBox.get(i).setEnabled(false); } else if (directionBox.get(i).getSelectedIndex()==1 && convBox.get(i).getSelectedIndex()!=0) { directionBox.get(i).setEnabled(true); convBox.get(i).setSelectedIndex(0); convBox.get(i).setEnabled(false); } else if (directionBox.get(i).getSelectedIndex()==1) { directionBox.get(i).setEnabled(true); convBox.get(i).setEnabled(false); } else { directionBox.get(i).setEnabled(true); convBox.get(i).setEnabled(true); } } } private void getPortMap(CompSBasePlugin sbmlSBase,String id) { for (int k = 0; k < sbmlSBase.getListOfReplacedElements().size(); k++) { ReplacedElement replacement = sbmlSBase.getListOfReplacedElements().get(k); if (replacement.getSubmodelRef().equals(subModelId)) { if (replacement.isSetPortRef()) { int l = portIds.indexOf(replacement.getPortRef()); if (l >= 0) { portmapBox.get(l).setSelectedItem(id); if (!portmapBox.get(l).getSelectedItem().equals(id)) { portmapBox.get(l).addItem(id); portmapBox.get(l).setSelectedItem(id); } directionBox.get(l).setSelectedIndex(0); convBox.get(l).setSelectedIndex(0); if (replacement.isSetConversionFactor()) { convBox.get(l).setSelectedItem(replacement.getConversionFactor()); } } } else if (replacement.isSetDeletion()) { Deletion deletion = bioModel.getSBMLCompModel().getListOfSubmodels().get(subModelId).getListOfDeletions().get(replacement.getDeletion()); if (deletion!=null) { int l = portIds.indexOf(deletion.getPortRef()); if (l >= 0) { portmapBox.get(l).setSelectedItem(id); if (!portmapBox.get(l).getSelectedItem().equals(id)) { portmapBox.get(l).addItem(id); portmapBox.get(l).setSelectedItem(id); } directionBox.get(l).setSelectedIndex(0); convBox.get(l).setSelectedIndex(0); } } } } } if (sbmlSBase.isSetReplacedBy()) { ReplacedBy replacement = sbmlSBase.getReplacedBy(); if (replacement.getSubmodelRef().equals(subModelId)) { if (replacement.isSetPortRef()) { int l = portIds.indexOf(replacement.getPortRef()); if (l >= 0) { portmapBox.get(l).setSelectedItem(id); if (!portmapBox.get(l).getSelectedItem().equals(id)) { portmapBox.get(l).addItem(id); portmapBox.get(l).setSelectedItem(id); } directionBox.get(l).setSelectedIndex(1); convBox.get(l).setSelectedIndex(0); } } } } } private boolean checkValues() { for (PropertyField f : fields.values()) { if (!f.isValidValue()) { return false; } } return true; } private boolean removePortMaps(CompSBasePlugin sbmlSBase) { int j = 0; boolean result = false; while (j < sbmlSBase.getListOfReplacedElements().size()) { ReplacedElement replacement = sbmlSBase.getListOfReplacedElements().get(j); if (replacement.getSubmodelRef().equals(subModelId) && ((replacement.isSetPortRef())||(replacement.isSetDeletion()))) { sbmlSBase.removeReplacedElement(replacement); result = true; } j++; } if (sbmlSBase.isSetReplacedBy()) { ReplacedBy replacement = sbmlSBase.getReplacedBy(); if (replacement.getSubmodelRef().equals(subModelId) && (replacement.isSetPortRef())) { sbmlSBase.unsetReplacedBy(); result = true; } } return result; } private boolean openGui(String oldName) { int value = JOptionPane.showOptionDialog(Gui.frame, this, "Component Editor", JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE, null, options, options[0]); if (value == JOptionPane.YES_OPTION) { if (!checkValues()) { Utility.createErrorMessage("Error", "Illegal values entered."); return false; } // TODO: extract id plus dimensios using checkSizeParameters String id = fields.get(GlobalConstants.ID).getValue(); if (oldName == null) { if (bioModel.isSIdInUse(id)) { Utility.createErrorMessage("Error", "Id already exists."); return false; } } else if (!oldName.equals(id)) { if (bioModel.isSIdInUse(id)) { Utility.createErrorMessage("Error", "Id already exists."); return false; } } // Checks whether SBOL annotation on model needs to be deleted later when annotating component with SBOL // boolean removeModelSBOLAnnotationFlag = false; // if (!paramsOnly && sbolField.getSBOLURIs().size() > 0 && // bioModel.getElementSBOLCount() == 0 && bioModel.getModelSBOLAnnotationFlag()) { // Object[] options = { "OK", "Cancel" }; // int choice = JOptionPane.showOptionDialog(null, // "SBOL associated to model elements can't coexist with SBOL associated to model itself unless" + // " the latter was previously generated from the former. Remove SBOL associated to model?", // "Warning", JOptionPane.DEFAULT_OPTION, JOptionPane.WARNING_MESSAGE, null, options, options[0]); // if (choice == JOptionPane.OK_OPTION) // removeModelSBOLAnnotationFlag = true; // else // return false; Submodel instance = bioModel.getSBMLCompModel().getListOfSubmodels().get(subModelId); if (instance != null) { instance.setName(fields.get(GlobalConstants.NAME).getValue()); // TODO: add/remove dimensions from the instance //long k = 0; while (instance.getListOfDeletions().size()>0) { Deletion deletion = instance.getListOfDeletions().get(0); instance.removeDeletion(deletion); /* if (deletion.isSetPortRef() && portIds.contains(deletion.getPortRef())) { } else { k++; } */ } if (timeConvFactorBox.getSelectedItem().equals("(none)")) { instance.unsetTimeConversionFactor(); } else { instance.setTimeConversionFactor((String)timeConvFactorBox.getSelectedItem()); } if (extentConvFactorBox.getSelectedItem().equals("(none)")) { instance.unsetExtentConversionFactor(); } else { instance.setExtentConversionFactor((String)extentConvFactorBox.getSelectedItem()); } } else { Utility.createErrorMessage("Error", "Submodel is missing."); return false; } ArrayList<SBase> elements = SBMLutilities.getListOfAllElements(bioModel.getSBMLDocument().getModel()); for (int j = 0; j < elements.size(); j++) { SBase sbase = elements.get(j); CompSBasePlugin sbmlSBase = (CompSBasePlugin)sbase.getExtension(CompConstants.namespaceURI); if (sbmlSBase!=null) { if (removePortMaps(sbmlSBase)) { elements = SBMLutilities.getListOfAllElements(bioModel.getSBMLDocument().getModel()); } } } for (int i = 0; i < portIds.size(); i++) { String subId = id; if (subModelId.startsWith("GRID__")) subId = subModelId; String portId = portIds.get(i); //String type = types.get(i); String portmapId = (String)portmapBox.get(i).getSelectedItem(); if (!portmapId.equals("--none--")&&!portmapId.equals("--delete--")&&!portmapId.equals("--include--")) { CompSBasePlugin sbmlSBase = null; SBase sbase = SBMLutilities.getElementBySId(bioModel.getSBMLDocument().getModel(), portmapId); if (sbase!=null) { sbmlSBase = (CompSBasePlugin)sbase.getExtension(CompConstants.namespaceURI); if (sbmlSBase != null) { if (directionBox.get(i).getSelectedIndex()==0) { ReplacedElement replacement = sbmlSBase.createReplacedElement(); replacement.setSubmodelRef(subId); replacement.setPortRef(portId); if (!convBox.get(i).getSelectedItem().equals("(none)")) { replacement.setConversionFactor((String)convBox.get(i).getSelectedItem()); } } else { boolean skip = false; if (sbmlSBase.isSetReplacedBy()) { ReplacedBy replacement = sbmlSBase.getReplacedBy(); if (!replacement.getSubmodelRef().equals(subId) || !replacement.getPortRef().equals(portId)) { Utility.createErrorMessage("Error", portmapId + " is already replaced by " + replacement.getPortRef().replace(GlobalConstants.INPUT+"__", "").replace(GlobalConstants.OUTPUT+"__", "") + " from subModel " + replacement.getSubmodelRef() + "\nCannot also replace with " + portId.replace(GlobalConstants.INPUT+"__", "").replace(GlobalConstants.OUTPUT+"__", "") + " from subModel " + subId); skip = true; } } if (!skip) { ReplacedBy replacement = sbmlSBase.createReplacedBy(); replacement.setSubmodelRef(subId); replacement.setPortRef(portId); } } } } else { sbase = SBMLutilities.getElementByMetaId(bioModel.getSBMLDocument().getModel(), portmapId); sbmlSBase = (CompSBasePlugin)sbase.getExtension(CompConstants.namespaceURI); if (sbmlSBase != null) { if (directionBox.get(i).getSelectedIndex()==0) { /* TODO: Code below uses just a replacement */ ReplacedElement replacement = sbmlSBase.createReplacedElement(); replacement.setSubmodelRef(subId); replacement.setPortRef(portId); if (!convBox.get(i).getSelectedItem().equals("(none)")) { replacement.setConversionFactor((String)convBox.get(i).getSelectedItem()); } String speciesId = portId.replace(GlobalConstants.INPUT+"__", "").replace(GlobalConstants.OUTPUT+"__", ""); CompModelPlugin subCompModel = subBioModel.getSBMLCompModel(); Submodel submodel = bioModel.getSBMLCompModel().getListOfSubmodels().get(subId); BioModel.addImplicitDeletions(subCompModel, submodel, speciesId); /* Code below using replacement and deletion */ /* ReplacedElement replacement = sbmlSBase.createReplacedElement(); replacement.setSubmodelRef(subId); Submodel submodel = bioModel.getSBMLCompModel().getListOfSubmodels().get(subId); Deletion deletion = submodel.createDeletion(); deletion.setPortRef(portId); deletion.setId("delete_"+portId); replacement.setDeletion("delete_"+portId); */ } else { ReplacedBy replacement = sbmlSBase.createReplacedBy(); replacement.setSubmodelRef(subId); replacement.setPortRef(portId); String speciesId = portId.replace(GlobalConstants.INPUT+"__", "").replace(GlobalConstants.OUTPUT+"__", ""); CompModelPlugin subCompModel = subBioModel.getSBMLCompModel(); Submodel submodel = bioModel.getSBMLCompModel().getListOfSubmodels().get(subId); bioModel.addImplicitReplacedBys(subCompModel,submodel,speciesId,SBMLutilities.getId(sbase)); } } } } else if (portmapId.equals("--delete Submodel submodel = bioModel.getSBMLCompModel().getListOfSubmodels().get(subId); Deletion deletion = submodel.createDeletion(); deletion.setPortRef(portId); String speciesId = portId.replace(GlobalConstants.INPUT+"__", "").replace(GlobalConstants.OUTPUT+"__", ""); CompModelPlugin subCompModel = subBioModel.getSBMLCompModel(); BioModel.addImplicitDeletions(subCompModel, submodel, speciesId); } } if (selected != null && oldName != null && !oldName.equals(id)) { bioModel.changeComponentName(oldName, id); } String newPort = bioModel.getComponentPortMap(id); componentsList.removeItem(oldName + " " + selectedComponent.replace(".xml", "") + " " + oldPort); componentsList.addItem(id + " " + selectedComponent.replace(".xml", "") + " " + newPort); componentsList.setSelectedValue(id + " " + selectedComponent.replace(".xml", "") + " " + newPort, true); if (!paramsOnly) { // Add SBOL annotation to submodel if (sbolField.getSBOLURIs().size() > 0 || sbolField.getSBOLStrand().equals(GlobalConstants.SBOL_ASSEMBLY_MINUS_STRAND)) { SBOLAnnotation sbolAnnot = new SBOLAnnotation(instance.getMetaId(), sbolField.getSBOLURIs(), sbolField.getSBOLStrand()); AnnotationUtility.setSBOLAnnotation(instance, sbolAnnot); } else AnnotationUtility.removeSBOLAnnotation(instance); } modelEditor.setDirty(true); } else if (value == JOptionPane.NO_OPTION) { return true; } return true; } @Override public void actionPerformed(ActionEvent e) { if (e.getActionCommand().equals("comboBoxChanged")) { updateComboBoxEnabling(); } } }
// IPLabReader.java package loci.formats.in; import java.io.IOException; import loci.common.*; import loci.formats.*; import loci.formats.meta.FilterMetadata; import loci.formats.meta.MetadataStore; public class IPLabReader extends FormatReader { // -- Fields -- /** Bytes per pixel. */ private int bps; /** Total number of pixel bytes. */ private int dataSize; // -- Constructor -- /** Constructs a new IPLab reader. */ public IPLabReader() { super("IPLab", "ipl"); blockCheckLen = 12; suffixNecessary = false; // allow extensionless IPLab files } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#isThisType(RandomAccessStream) */ public boolean isThisType(RandomAccessStream stream) throws IOException { if (!FormatTools.validStream(stream, blockCheckLen, false)) return false; String s = stream.readString(4); boolean big = s.equals("iiii"); boolean little = s.equals("mmmm"); if (!big && !little) return false; stream.order(little); int size = stream.readInt(); if (size != 4) return false; // first block size should be 4 int version = stream.readInt(); return version >= 0x100e; } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); FormatTools.checkPlaneNumber(this, no); FormatTools.checkBufferSize(this, buf.length, w, h); int numPixels = getSizeX() * getSizeY() * getSizeC() * bps; in.seek(numPixels * (no / getSizeC()) + 44); readPlane(in, x, y, w, h, buf); return buf; } // -- IFormatHandler API methods -- /* @see loci.formats.IFormatHandler#close() */ public void close() throws IOException { super.close(); bps = dataSize = 0; } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { if (debug) debug("IPLabReader.initFile(" + id + ")"); super.initFile(id); in = new RandomAccessStream(id); status("Populating metadata"); core[0].littleEndian = in.readString(4).equals("iiii"); in.order(isLittleEndian()); in.skipBytes(12); // read axis sizes from header dataSize = in.readInt() - 28; core[0].sizeX = in.readInt(); core[0].sizeY = in.readInt(); core[0].sizeC = in.readInt(); core[0].sizeZ = in.readInt(); core[0].sizeT = in.readInt(); int filePixelType = in.readInt(); core[0].imageCount = getSizeZ() * getSizeT(); addMeta("Width", getSizeX()); addMeta("Height", getSizeY()); addMeta("Channels", getSizeC()); addMeta("ZDepth", getSizeZ()); addMeta("TDepth", getSizeT()); String ptype; switch (filePixelType) { case 0: ptype = "8 bit unsigned"; core[0].pixelType = FormatTools.UINT8; break; case 1: ptype = "16 bit signed short"; core[0].pixelType = FormatTools.INT16; break; case 2: ptype = "16 bit unsigned short"; core[0].pixelType = FormatTools.UINT16; break; case 3: ptype = "32 bit signed long"; core[0].pixelType = FormatTools.INT32; break; case 4: ptype = "32 bit single-precision float"; core[0].pixelType = FormatTools.FLOAT; break; case 5: ptype = "Color24"; core[0].pixelType = FormatTools.UINT32; break; case 6: ptype = "Color48"; core[0].pixelType = FormatTools.UINT16; break; case 10: ptype = "64 bit double-precision float"; core[0].pixelType = FormatTools.DOUBLE; break; default: ptype = "reserved"; // for values 7-9 } bps = FormatTools.getBytesPerPixel(getPixelType()); addMeta("PixelType", ptype); in.skipBytes(dataSize); core[0].dimensionOrder = "XY"; if (getSizeC() > 1) core[0].dimensionOrder += "CZT"; else core[0].dimensionOrder += "ZTC"; core[0].rgb = getSizeC() > 1; core[0].interleaved = false; core[0].indexed = false; core[0].falseColor = false; core[0].metadataComplete = true; // The metadata store we're working with. MetadataStore store = new FilterMetadata(getMetadataStore(), isMetadataFiltered()); store.setImageName("", 0); MetadataTools.setDefaultCreationDate(store, id, 0); MetadataTools.populatePixels(store, this, true); status("Reading tags"); byte[] tagBytes = new byte[4]; in.read(tagBytes); String tag = new String(tagBytes); while (!tag.equals("fini") && in.getFilePointer() < in.length() - 4) { int size = in.readInt(); if (tag.equals("clut")) { // read in Color Lookup Table if (size == 8) { // indexed lookup table in.skipBytes(4); int type = in.readInt(); String[] types = new String[] { "monochrome", "reverse monochrome", "BGR", "classify", "rainbow", "red", "green", "blue", "cyan", "magenta", "yellow", "saturated pixels" }; String clutType = (type >= 0 && type < types.length) ? types[type] : "unknown"; addMeta("LUT type", clutType); } else { // explicitly defined lookup table // length is 772 in.skipBytes(772); } } else if (tag.equals("norm")) { // read in normalization information if (size != (44 * getSizeC())) { throw new FormatException("Bad normalization settings"); } String[] types = new String[] { "user", "plane", "sequence", "saturated plane", "saturated sequence", "ROI" }; for (int i=0; i<getSizeC(); i++) { int source = in.readInt(); String sourceType = (source >= 0 && source < types.length) ? types[source] : "user"; addMeta("NormalizationSource" + i, sourceType); double min = in.readDouble(); double max = in.readDouble(); double gamma = in.readDouble(); double black = in.readDouble(); double white = in.readDouble(); addMeta("NormalizationMin" + i, min); addMeta("NormalizationMax" + i, max); addMeta("NormalizationGamma" + i, gamma); addMeta("NormalizationBlack" + i, black); addMeta("NormalizationWhite" + i, white); // CTR CHECK //store.setDisplayChannel(new Integer(core[0].sizeC), // new Double(black), new Double(white), new Float(gamma), null); } } else if (tag.equals("head")) { // read in header labels for (int i=0; i<size / 22; i++) { int num = in.readShort(); addMeta("Header" + num, in.readString(20)); } } else if (tag.equals("mmrc")) { in.skipBytes(size); } else if (tag.equals("roi ")) { // read in ROI information in.skipBytes(4); int roiLeft = in.readInt(); int roiTop = in.readInt(); int roiRight = in.readInt(); int roiBottom = in.readInt(); int numRoiPts = in.readInt(); Integer x0 = new Integer(roiLeft); Integer x1 = new Integer(roiRight); Integer y0 = new Integer(roiBottom); Integer y1 = new Integer(roiTop); // TODO //store.setDisplayROIX0(x0, 0, 0); //store.setDisplayROIY0(y0, 0, 0); //store.setDisplayROIX1(x1, 0, 0); //store.setDisplayROIY1(y1, 0, 0); in.skipBytes(8 * numRoiPts); } else if (tag.equals("mask")) { // read in Segmentation Mask in.skipBytes(size); } else if (tag.equals("unit")) { // read in units for (int i=0; i<4; i++) { int xResStyle = in.readInt(); float unitsPerPixel = in.readFloat(); int xUnitName = in.readInt(); addMeta("ResolutionStyle" + i, xResStyle); addMeta("UnitsPerPixel" + i, unitsPerPixel); switch (xUnitName) { case 2: unitsPerPixel *= 1000; break; case 3: unitsPerPixel *= 10000; break; case 4: unitsPerPixel *= 1000000; break; case 5: // inch unitsPerPixel *= 3937; break; case 6: unitsPerPixel *= 47244; break; } if (i == 0) { Float pixelSize = new Float(unitsPerPixel); store.setDimensionsPhysicalSizeX(pixelSize, 0, 0); store.setDimensionsPhysicalSizeY(pixelSize, 0, 0); } addMeta("UnitName" + i, xUnitName); } } else if (tag.equals("view")) { // read in view in.skipBytes(size); } else if (tag.equals("plot")) { // read in plot // skipping this field for the moment in.skipBytes(size); } else if (tag.equals("note")) { // read in notes (image info) String descriptor = in.readString(64); String notes = in.readString(512); addMeta("Descriptor", descriptor); addMeta("Notes", notes); store.setImageDescription(notes, 0); } else if (tagBytes[0] == 0x1a && tagBytes[1] == (byte) 0xd9 && tagBytes[2] == (byte) 0x8b && tagBytes[3] == (byte) 0xef) { int units = in.readInt(); for (int i=0; i<getSizeT(); i++) { float timepoint = in.readFloat(); // normalize to seconds switch (units) { case 0: // time stored in milliseconds timepoint /= 1000; break; case 2: // time stored in minutes timepoint *= 60; break; case 3: // time stored in hours timepoint *= 60 * 60; break; } addMeta("Timestamp " + i, timepoint); for (int c=0; c<getSizeC(); c++) { for (int z=0; z<getSizeZ(); z++) { int plane = getIndex(z, c, i); store.setPlaneTimingDeltaT(new Float(timepoint), 0, 0, plane); } } if (i == 1) { store.setDimensionsTimeIncrement(new Float(timepoint), 0, 0); } } } else in.skipBytes(size); if (in.getFilePointer() + 4 <= in.length()) { in.read(tagBytes); tag = new String(tagBytes); } else { tag = "fini"; } if (in.getFilePointer() >= in.length() && !tag.equals("fini")) { tag = "fini"; } } } }
// LeicaReader.java package loci.formats.in; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Hashtable; import java.util.StringTokenizer; import java.util.Vector; import loci.common.DataTools; import loci.common.DateTools; import loci.common.Location; import loci.common.RandomAccessInputStream; import loci.formats.AxisGuesser; import loci.formats.CoreMetadata; import loci.formats.FilePattern; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.MetadataTools; import loci.formats.meta.FilterMetadata; import loci.formats.meta.MetadataStore; import loci.formats.tiff.IFD; import loci.formats.tiff.IFDList; import loci.formats.tiff.TiffConstants; import loci.formats.tiff.TiffParser; public class LeicaReader extends FormatReader { // -- Constants - public static final String[] LEI_SUFFIX = {"lei"}; /** All Leica TIFFs have this tag. */ private static final int LEICA_MAGIC_TAG = 33923; /** Format for dates. */ private static final String DATE_FORMAT = "yyyy:MM:dd,HH:mm:ss:SSS"; /** IFD tags. */ private static final Integer SERIES = new Integer(10); private static final Integer IMAGES = new Integer(15); private static final Integer DIMDESCR = new Integer(20); private static final Integer FILTERSET = new Integer(30); private static final Integer TIMEINFO = new Integer(40); private static final Integer SCANNERSET = new Integer(50); private static final Integer EXPERIMENT = new Integer(60); private static final Integer LUTDESC = new Integer(70); private static final Integer CHANDESC = new Integer(80); private static final Integer SEQUENTIALSET = new Integer(90); private static final Integer SEQ_SCANNERSET = new Integer(200); private static final Integer SEQ_FILTERSET = new Integer(700); private static final int SEQ_SCANNERSET_END = 300; private static final int SEQ_FILTERSET_END = 800; private static final Hashtable<String, Integer> CHANNEL_PRIORITIES = createChannelPriorities(); private static Hashtable<String, Integer> createChannelPriorities() { Hashtable<String, Integer> h = new Hashtable<String, Integer>(); h.put("red", new Integer(0)); h.put("green", new Integer(1)); h.put("blue", new Integer(2)); h.put("cyan", new Integer(3)); h.put("magenta", new Integer(4)); h.put("yellow", new Integer(5)); h.put("black", new Integer(6)); h.put("gray", new Integer(7)); h.put("", new Integer(8)); return h; } // -- Static fields -- private static Hashtable<Integer, String> dimensionNames = makeDimensionTable(); // -- Fields -- protected IFDList ifds; /** Array of IFD-like structures containing metadata. */ protected IFDList headerIFDs; /** Helper readers. */ protected MinimalTiffReader tiff; /** Array of image file names. */ protected Vector[] files; /** Number of series in the file. */ private int numSeries; /** Name of current LEI file */ private String leiFilename; private Vector<String> seriesNames; private Vector<String> seriesDescriptions; private int lastPlane = 0; private double[][] physicalSizes; private double[] pinhole, exposureTime; private int nextDetector = 0, nextChannel = 0; private Vector<Integer> activeChannelIndices = new Vector<Integer>(); private boolean sequential = false; private Vector[] channelNames; private Vector[] emWaves; private Vector[] exWaves; private boolean[][] cutInPopulated; private boolean[][] cutOutPopulated; // -- Constructor -- /** Constructs a new Leica reader. */ public LeicaReader() { super("Leica", new String[] {"lei", "tif", "tiff"}); domains = new String[] {FormatTools.LM_DOMAIN}; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#isSingleFile(String) */ public boolean isSingleFile(String id) throws FormatException, IOException { return false; } /* @see loci.formats.IFormatReader#isThisType(String, boolean) */ public boolean isThisType(String name, boolean open) { if (checkSuffix(name, LEI_SUFFIX)) return true; if (!checkSuffix(name, TiffReader.TIFF_SUFFIXES)) return false; if (!open) return false; // not allowed to touch the file system // check for that there is an .lei file in the same directory String prefix = name; if (prefix.indexOf(".") != -1) { prefix = prefix.substring(0, prefix.lastIndexOf(".")); } Location lei = new Location(prefix + ".lei"); if (!lei.exists()) { lei = new Location(prefix + ".LEI"); while (!lei.exists() && prefix.indexOf("_") != -1) { prefix = prefix.substring(0, prefix.lastIndexOf("_")); lei = new Location(prefix + ".lei"); if (!lei.exists()) lei = new Location(prefix + ".LEI"); } } return lei.exists(); } /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { TiffParser tp = new TiffParser(stream); IFD ifd = tp.getFirstIFD(); if (ifd == null) return false; return ifd.containsKey(new Integer(LEICA_MAGIC_TAG)); } /* @see loci.formats.IFormatReader#get8BitLookupTable() */ public byte[][] get8BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); try { tiff.setId((String) files[series].get(lastPlane)); return tiff.get8BitLookupTable(); } catch (IOException e) { LOGGER.debug("Failed to retrieve lookup table", e); } return null; } /* @see loci.formats.IFormatReader#get16BitLookupTable() */ public short[][] get16BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); try { tiff.setId((String) files[series].get(lastPlane)); return tiff.get16BitLookupTable(); } catch (IOException e) { LOGGER.debug("Failed to retrieve lookup table", e); } return null; } /* @see loci.formats.IFormatReader#fileGroupOption(String) */ public int fileGroupOption(String id) throws FormatException, IOException { return FormatTools.MUST_GROUP; } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); lastPlane = no; if (no < files[series].size()) { String filename = (String) files[series].get(no); if (new Location(filename).exists()) { tiff.setId(filename); return tiff.openBytes(0, buf, x, y, w, h); } } // imitate Leica's software and return a blank plane if the // appropriate TIFF file is missing return buf; } /* @see loci.formats.IFormatReader#getSeriesUsedFiles(boolean) */ public String[] getSeriesUsedFiles(boolean noPixels) { FormatTools.assertId(currentId, true, 1); Vector<String> v = new Vector<String>(); if (leiFilename != null) v.add(leiFilename); if (!noPixels) { v.addAll(files[getSeries()]); } return v.toArray(new String[v.size()]); } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (tiff != null) tiff.close(fileOnly); if (!fileOnly) { leiFilename = null; files = null; ifds = headerIFDs = null; tiff = null; seriesNames = null; numSeries = 0; lastPlane = 0; physicalSizes = null; seriesDescriptions = null; pinhole = exposureTime = null; nextDetector = 0; nextChannel = 0; sequential = false; activeChannelIndices.clear(); channelNames = null; emWaves = null; exWaves = null; cutInPopulated = null; cutOutPopulated = null; } } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { close(); if (checkSuffix(id, TiffReader.TIFF_SUFFIXES) && isGroupFiles()) { // need to find the associated .lei file if (ifds == null) super.initFile(id); in = new RandomAccessInputStream(id); TiffParser tp = new TiffParser(in); in.order(tp.checkHeader().booleanValue()); in.seek(0); LOGGER.info("Finding companion file name"); // open the TIFF file and look for the "Image Description" field ifds = tp.getIFDs(); if (ifds == null) throw new FormatException("No IFDs found"); String descr = ifds.get(0).getComment(); // remove anything of the form "[blah]" descr = descr.replaceAll("\\[.*.\\]\n", ""); // each remaining line in descr is a (key, value) pair, // where '=' separates the key from the value String lei = id.substring(0, id.lastIndexOf(File.separator) + 1); StringTokenizer lines = new StringTokenizer(descr, "\n"); String line = null, key = null, value = null; while (lines.hasMoreTokens()) { line = lines.nextToken(); if (line.indexOf("=") == -1) continue; key = line.substring(0, line.indexOf("=")).trim(); value = line.substring(line.indexOf("=") + 1).trim(); addGlobalMeta(key, value); if (key.startsWith("Series Name")) lei += value; } // now open the LEI file Location l = new Location(lei).getAbsoluteFile(); if (l.exists()) { initFile(lei); return; } else { l = l.getParentFile(); String[] list = l.list(); for (int i=0; i<list.length; i++) { if (checkSuffix(list[i], LEI_SUFFIX)) { initFile( new Location(l.getAbsolutePath(), list[i]).getAbsolutePath()); return; } } } throw new FormatException("LEI file not found."); } else if (checkSuffix(id, TiffReader.TIFF_SUFFIXES) && !isGroupFiles()) { super.initFile(id); TiffReader r = new TiffReader(); r.setMetadataStore(getMetadataStore()); r.setId(id); core = r.getCoreMetadata(); metadata = r.getMetadata(); metadataStore = r.getMetadataStore(); r.close(); files = new Vector[] {new Vector()}; files[0].add(id); tiff = new MinimalTiffReader(); return; } // parse the LEI file super.initFile(id); leiFilename = new File(id).exists() ? new Location(id).getAbsolutePath() : id; in = new RandomAccessInputStream(id); seriesNames = new Vector<String>(); byte[] fourBytes = new byte[4]; in.read(fourBytes); core[0].littleEndian = (fourBytes[0] == TiffConstants.LITTLE && fourBytes[1] == TiffConstants.LITTLE && fourBytes[2] == TiffConstants.LITTLE && fourBytes[3] == TiffConstants.LITTLE); in.order(isLittleEndian()); LOGGER.info("Reading metadata blocks"); in.skipBytes(8); int addr = in.readInt(); headerIFDs = new IFDList(); while (addr != 0) { IFD ifd = new IFD(); headerIFDs.add(ifd); in.seek(addr + 4); int tag = in.readInt(); while (tag != 0) { // create the IFD structure int offset = in.readInt(); long pos = in.getFilePointer(); in.seek(offset + 12); int size = in.readInt(); byte[] data = new byte[size]; in.read(data); ifd.putIFDValue(tag, data); in.seek(pos); tag = in.readInt(); } addr = in.readInt(); } numSeries = headerIFDs.size(); core = new CoreMetadata[numSeries]; for (int i=0; i<numSeries; i++) { core[i] = new CoreMetadata(); } files = new Vector[numSeries]; channelNames = new Vector[getSeriesCount()]; emWaves = new Vector[getSeriesCount()]; exWaves = new Vector[getSeriesCount()]; cutInPopulated = new boolean[getSeriesCount()][]; cutOutPopulated = new boolean[getSeriesCount()][]; for (int i=0; i<getSeriesCount(); i++) { channelNames[i] = new Vector(); emWaves[i] = new Vector(); exWaves[i] = new Vector(); } // determine the length of a filename int nameLength = 0; int maxPlanes = 0; LOGGER.info("Parsing metadata blocks"); core[0].littleEndian = !isLittleEndian(); int seriesIndex = 0; boolean[] valid = new boolean[numSeries]; for (int i=0; i<headerIFDs.size(); i++) { IFD ifd = headerIFDs.get(i); valid[i] = true; if (ifd.get(SERIES) != null) { byte[] temp = (byte[]) ifd.get(SERIES); nameLength = DataTools.bytesToInt(temp, 8, isLittleEndian()) * 2; } Vector<String> f = new Vector<String>(); byte[] tempData = (byte[]) ifd.get(IMAGES); RandomAccessInputStream data = new RandomAccessInputStream(tempData); data.order(isLittleEndian()); int tempImages = data.readInt(); if (((long) tempImages * nameLength) > data.length()) { data.order(!isLittleEndian()); tempImages = data.readInt(); data.order(isLittleEndian()); } core[i].sizeX = data.readInt(); core[i].sizeY = data.readInt(); data.skipBytes(4); int samplesPerPixel = data.readInt(); core[i].rgb = samplesPerPixel > 1; core[i].sizeC = samplesPerPixel; File dirFile = new File(id).getAbsoluteFile(); String[] listing = null; String dirPrefix = ""; if (dirFile.exists()) { listing = dirFile.getParentFile().list(); dirPrefix = dirFile.getParent(); if (!dirPrefix.endsWith(File.separator)) dirPrefix += File.separator; } else { listing = (String[]) Location.getIdMap().keySet().toArray(new String[0]); } Vector<String> list = new Vector<String>(); for (int k=0; k<listing.length; k++) { if (checkSuffix(listing[k], TiffReader.TIFF_SUFFIXES)) { list.add(listing[k]); } } boolean tiffsExist = false; String prefix = ""; for (int j=0; j<tempImages; j++) { // read in each filename prefix = getString(data, nameLength); f.add(dirPrefix + prefix); // test to make sure the path is valid Location test = new Location(f.get(f.size() - 1)); if (test.exists()) list.remove(prefix); if (!tiffsExist) tiffsExist = test.exists(); } data.close(); tempData = null; // all of the TIFF files were renamed if (!tiffsExist) { // Strategy for handling renamed files: // 1) Assume that files for each series follow a pattern. // 2) Assign each file group to the first series with the correct count. LOGGER.info("Handling renamed TIFF files"); listing = list.toArray(new String[list.size()]); // grab the file patterns Vector<String> filePatterns = new Vector<String>(); for (String q : listing) { Location l = new Location(dirPrefix, q).getAbsoluteFile(); FilePattern pattern = new FilePattern(l); AxisGuesser guess = new AxisGuesser(pattern, "XYZCT", 1, 1, 1, false); String fp = pattern.getPattern(); if (guess.getAxisCountS() >= 1) { String pre = pattern.getPrefix(guess.getAxisCountS()); Vector<String> fileList = new Vector<String>(); for (int n=0; n<listing.length; n++) { Location p = new Location(dirPrefix, listing[n]); if (p.getAbsolutePath().startsWith(pre)) { fileList.add(listing[n]); } } fp = FilePattern.findPattern(l.getAbsolutePath(), dirPrefix, fileList.toArray(new String[fileList.size()])); } if (fp != null && !filePatterns.contains(fp)) { filePatterns.add(fp); } } for (String q : filePatterns) { String[] pattern = new FilePattern(q).getFiles(); if (pattern.length == tempImages) { // make sure that this pattern hasn't already been used boolean validPattern = true; for (int n=0; n<i; n++) { if (files[n] == null) continue; if (files[n].contains(pattern[0])) { validPattern = false; break; } } if (validPattern) { files[i] = new Vector<String>(); files[i].addAll(Arrays.asList(pattern)); } } } } else files[i] = f; if (files[i] == null) valid[i] = false; else { core[i].imageCount = files[i].size(); if (core[i].imageCount > maxPlanes) maxPlanes = core[i].imageCount; } } int invalidCount = 0; for (int i=0; i<valid.length; i++) { if (!valid[i]) invalidCount++; } numSeries -= invalidCount; int[] count = new int[getSeriesCount()]; for (int i=0; i<getSeriesCount(); i++) { count[i] = core[i].imageCount; } Vector[] tempFiles = files; IFDList tempIFDs = headerIFDs; core = new CoreMetadata[numSeries]; files = new Vector[numSeries]; headerIFDs = new IFDList(); int index = 0; for (int i=0; i<numSeries; i++) { core[i] = new CoreMetadata(); while (!valid[index]) index++; core[i].imageCount = count[index]; files[i] = tempFiles[index]; Object[] sorted = files[i].toArray(); Arrays.sort(sorted); files[i].clear(); files[i].addAll(Arrays.asList(sorted)); headerIFDs.add(tempIFDs.get(index)); index++; } tiff = new MinimalTiffReader(); LOGGER.info("Populating metadata"); if (headerIFDs == null) headerIFDs = ifds; int fileLength = 0; int resolution = -1; String[][] timestamps = new String[headerIFDs.size()][]; seriesDescriptions = new Vector<String>(); physicalSizes = new double[headerIFDs.size()][5]; pinhole = new double[headerIFDs.size()]; exposureTime = new double[headerIFDs.size()]; for (int i=0; i<headerIFDs.size(); i++) { IFD ifd = headerIFDs.get(i); core[i].littleEndian = isLittleEndian(); setSeries(i); Object[] keys = ifd.keySet().toArray(); Arrays.sort(keys); for (int q=0; q<keys.length; q++) { byte[] tmp = (byte[]) ifd.get(keys[q]); if (tmp == null) continue; RandomAccessInputStream stream = new RandomAccessInputStream(tmp); stream.order(isLittleEndian()); if (keys[q].equals(SERIES)) { addSeriesMeta("Version", stream.readInt()); addSeriesMeta("Number of Series", stream.readInt()); fileLength = stream.readInt(); addSeriesMeta("Length of filename", fileLength); int extLen = stream.readInt(); if (extLen > fileLength) { stream.seek(8); core[0].littleEndian = !isLittleEndian(); stream.order(isLittleEndian()); fileLength = stream.readInt(); extLen = stream.readInt(); } addSeriesMeta("Length of file extension", extLen); addSeriesMeta("Image file extension", getString(stream, extLen)); } else if (keys[q].equals(IMAGES)) { core[i].imageCount = stream.readInt(); core[i].sizeX = stream.readInt(); core[i].sizeY = stream.readInt(); addSeriesMeta("Number of images", getImageCount()); addSeriesMeta("Image width", getSizeX()); addSeriesMeta("Image height", getSizeY()); addSeriesMeta("Bits per Sample", stream.readInt()); addSeriesMeta("Samples per pixel", stream.readInt()); String name = getString(stream, fileLength * 2); if (name.indexOf(".") != -1) { name = name.substring(0, name.lastIndexOf(".")); } String[] tokens = name.split("_"); StringBuffer buf = new StringBuffer(); for (int p=1; p<tokens.length; p++) { String lcase = tokens[p].toLowerCase(); if (!lcase.startsWith("ch0") && !lcase.startsWith("c0") && !lcase.startsWith("z0") && !lcase.startsWith("t0")) { if (buf.length() > 0) buf.append("_"); buf.append(tokens[p]); } } seriesNames.add(buf.toString()); } else if (keys[q].equals(DIMDESCR)) { addSeriesMeta("Voxel Version", stream.readInt()); core[i].rgb = stream.readInt() == 20; addSeriesMeta("VoxelType", isRGB() ? "RGB" : "gray"); int bpp = stream.readInt(); addSeriesMeta("Bytes per pixel", bpp); switch (bpp) { case 1: core[i].pixelType = FormatTools.UINT8; break; case 3: core[i].pixelType = FormatTools.UINT8; core[i].sizeC = 3; core[i].rgb = true; break; case 2: core[i].pixelType = FormatTools.UINT16; break; case 6: core[i].pixelType = FormatTools.UINT16; core[i].sizeC = 3; core[i].rgb = true; break; case 4: core[i].pixelType = FormatTools.UINT32; break; default: throw new FormatException("Unsupported bytes per pixel (" + bpp + ")"); } core[i].dimensionOrder = "XY"; resolution = stream.readInt(); core[i].bitsPerPixel = resolution; addSeriesMeta("Real world resolution", resolution); addSeriesMeta("Maximum voxel intensity", getString(stream, true)); addSeriesMeta("Minimum voxel intensity", getString(stream, true)); int len = stream.readInt(); stream.skipBytes(len * 2 + 4); len = stream.readInt(); for (int j=0; j<len; j++) { int dimId = stream.readInt(); String dimType = dimensionNames.get(new Integer(dimId)); if (dimType == null) dimType = ""; int size = stream.readInt(); int distance = stream.readInt(); int strlen = stream.readInt() * 2; String[] sizeData = getString(stream, strlen).split(" "); String physicalSize = sizeData[0]; String unit = ""; if (sizeData.length > 1) unit = sizeData[1]; double physical = Double.parseDouble(physicalSize) / size; if (unit.equals("m")) { physical *= 1000000; } if (dimType.equals("x")) { core[i].sizeX = size; physicalSizes[i][0] = physical; } else if (dimType.equals("y")) { core[i].sizeY = size; physicalSizes[i][1] = physical; } else if (dimType.equals("channel")) { if (getSizeC() == 0) core[i].sizeC = 1; core[i].sizeC *= size; if (getDimensionOrder().indexOf("C") == -1) { core[i].dimensionOrder += "C"; } physicalSizes[i][3] = physical; } else if (dimType.equals("z")) { core[i].sizeZ = size; } else { core[i].sizeT = size; if (getDimensionOrder().indexOf("T") == -1) { core[i].dimensionOrder += "T"; } physicalSizes[i][4] = physical; } String dimPrefix = "Dim" + j; addSeriesMeta(dimPrefix + " type", dimType); addSeriesMeta(dimPrefix + " size", size); addSeriesMeta(dimPrefix + " distance between sub-dimensions", distance); addSeriesMeta(dimPrefix + " physical length", physicalSize + " " + unit); addSeriesMeta(dimPrefix + " physical origin", getString(stream, true)); } addSeriesMeta("Series name", getString(stream, false)); String description = getString(stream, false); seriesDescriptions.add(description); addSeriesMeta("Series description", description); } else if (keys[q].equals(TIMEINFO)) { int nDims = stream.readInt(); addSeriesMeta("Number of time-stamped dimensions", nDims); addSeriesMeta("Time-stamped dimension", stream.readInt()); for (int j=0; j<nDims; j++) { String dimPrefix = "Dimension " + j; addSeriesMeta(dimPrefix + " ID", stream.readInt()); addSeriesMeta(dimPrefix + " size", stream.readInt()); addSeriesMeta(dimPrefix + " distance", stream.readInt()); } int numStamps = stream.readInt(); addSeriesMeta("Number of time-stamps", numStamps); timestamps[i] = new String[numStamps]; for (int j=0; j<numStamps; j++) { timestamps[i][j] = getString(stream, 64); addSeriesMeta("Timestamp " + j, timestamps[i][j]); } if (stream.getFilePointer() < stream.length()) { int numTMs = stream.readInt(); addSeriesMeta("Number of time-markers", numTMs); for (int j=0; j<numTMs; j++) { int numDims = stream.readInt(); String time = "Time-marker " + j + " Dimension "; for (int k=0; k<numDims; k++) { addSeriesMeta(time + k + " coordinate", stream.readInt()); } addSeriesMeta("Time-marker " + j, getString(stream, 64)); } } } else if (keys[q].equals(EXPERIMENT)) { stream.skipBytes(8); String description = getString(stream, true); addSeriesMeta("Image Description", description); addSeriesMeta("Main file extension", getString(stream, true)); addSeriesMeta("Image format identifier", getString(stream, true)); addSeriesMeta("Single image extension", getString(stream, true)); } else if (keys[q].equals(LUTDESC)) { int nChannels = stream.readInt(); if (nChannels > 0) core[i].indexed = true; addSeriesMeta("Number of LUT channels", nChannels); addSeriesMeta("ID of colored dimension", stream.readInt()); for (int j=0; j<nChannels; j++) { String p = "LUT Channel " + j; addSeriesMeta(p + " version", stream.readInt()); addSeriesMeta(p + " inverted?", stream.read() == 1); addSeriesMeta(p + " description", getString(stream, false)); addSeriesMeta(p + " filename", getString(stream, false)); String lut = getString(stream, false); addSeriesMeta(p + " name", lut); stream.skipBytes(8); } } else if (keys[q].equals(CHANDESC)) { int nBands = stream.readInt(); for (int band=0; band<nBands; band++) { String p = "Band #" + (band + 1) + " "; addSeriesMeta(p + "Lower wavelength", stream.readDouble()); stream.skipBytes(4); addSeriesMeta(p + "Higher wavelength", stream.readDouble()); stream.skipBytes(4); addSeriesMeta(p + "Gain", stream.readDouble()); addSeriesMeta(p + "Offset", stream.readDouble()); } } stream.close(); } core[i].orderCertain = true; core[i].littleEndian = isLittleEndian(); core[i].falseColor = true; core[i].metadataComplete = true; core[i].interleaved = false; } for (int i=0; i<numSeries; i++) { setSeries(i); if (getSizeZ() == 0) core[i].sizeZ = 1; if (getSizeT() == 0) core[i].sizeT = 1; if (getSizeC() == 0) core[i].sizeC = 1; if (getImageCount() == 0) core[i].imageCount = 1; if (getImageCount() == 1 && getSizeZ() * getSizeT() > 1) { core[i].sizeZ = 1; core[i].sizeT = 1; } if (getSizeY() == 1) { // XZ or XT scan if (getSizeZ() > 1 && getImageCount() == getSizeC() * getSizeT()) { core[i].sizeY = getSizeZ(); core[i].sizeZ = 1; } else if (getSizeT() > 1 && getImageCount() == getSizeC() * getSizeZ()) { core[i].sizeY = getSizeT(); core[i].sizeT = 1; } } if (isRGB()) core[i].indexed = false; if (getDimensionOrder().indexOf("C") == -1) { core[i].dimensionOrder += "C"; } if (getDimensionOrder().indexOf("Z") == -1) { core[i].dimensionOrder += "Z"; } if (getDimensionOrder().indexOf("T") == -1) { core[i].dimensionOrder += "T"; } } MetadataStore store = new FilterMetadata(getMetadataStore(), isMetadataFiltered()); MetadataTools.populatePixels(store, this, true); for (int i=0; i<numSeries; i++) { IFD ifd = headerIFDs.get(i); long firstPlane = 0; if (i < timestamps.length && timestamps[i] != null && timestamps[i].length > 0) { firstPlane = DateTools.getTime(timestamps[i][0], DATE_FORMAT); store.setImageCreationDate( DateTools.formatDate(timestamps[i][0], DATE_FORMAT), i); } else { MetadataTools.setDefaultCreationDate(store, id, i); } store.setImageName(seriesNames.get(i), i); store.setImageDescription(seriesDescriptions.get(i), i); String instrumentID = MetadataTools.createLSID("Instrument", i); store.setInstrumentID(instrumentID, i); // parse instrument data nextDetector = 0; nextChannel = 0; cutInPopulated[i] = new boolean[core[i].sizeC]; cutOutPopulated[i] = new boolean[core[i].sizeC]; Object[] keys = ifd.keySet().toArray(); Arrays.sort(keys); int nextInstrumentBlock = 1; sequential = DataTools.indexOf(keys, SEQ_SCANNERSET) != -1; for (int q=0; q<keys.length; q++) { if (keys[q].equals(FILTERSET) || keys[q].equals(SCANNERSET) || keys[q].equals(SEQ_SCANNERSET) || keys[q].equals(SEQ_FILTERSET) || (((Integer) keys[q]).intValue() > SEQ_SCANNERSET.intValue() && ((Integer) keys[q]).intValue() < SEQ_SCANNERSET_END) || (((Integer) keys[q]).intValue() > SEQ_FILTERSET.intValue() && ((Integer) keys[q]).intValue() < SEQ_FILTERSET_END)) { if (sequential && (keys[q].equals(FILTERSET) || keys[q].equals(SCANNERSET))) { continue; } byte[] tmp = (byte[]) ifd.get(keys[q]); if (tmp == null) continue; RandomAccessInputStream stream = new RandomAccessInputStream(tmp); stream.order(isLittleEndian()); parseInstrumentData(stream, store, i, nextInstrumentBlock++); stream.close(); } } activeChannelIndices.clear(); // link Instrument and Image store.setImageInstrumentRef(instrumentID, i); store.setDimensionsPhysicalSizeX(new Double(physicalSizes[i][0]), i, 0); store.setDimensionsPhysicalSizeY(new Double(physicalSizes[i][1]), i, 0); store.setDimensionsPhysicalSizeZ(new Double(physicalSizes[i][2]), i, 0); if ((int) physicalSizes[i][3] > 0) { store.setDimensionsWaveIncrement( new Integer((int) physicalSizes[i][3]), i, 0); } if ((int) physicalSizes[i][4] > 0) { store.setDimensionsTimeIncrement(new Double(physicalSizes[i][4]), i, 0); } for (int j=0; j<core[i].imageCount; j++) { if (timestamps[i] != null && j < timestamps[i].length) { long time = DateTools.getTime(timestamps[i][j], DATE_FORMAT); double elapsedTime = (double) (time - firstPlane) / 1000; store.setPlaneTimingDeltaT(new Double(elapsedTime), i, 0, j); store.setPlaneTimingExposureTime( new Double(exposureTime[i]), i, 0, j); } } } setSeries(0); } // -- Helper methods -- private void parseInstrumentData(RandomAccessInputStream stream, MetadataStore store, int series, int blockNum) throws IOException { setSeries(series); // read 24 byte SAFEARRAY stream.skipBytes(4); int cbElements = stream.readInt(); stream.skipBytes(8); int nElements = stream.readInt(); stream.skipBytes(4); for (int j=0; j<nElements; j++) { stream.seek(24 + j * cbElements); String contentID = getString(stream, 128); String description = getString(stream, 64); String data = getString(stream, 64); int dataType = stream.readShort(); stream.skipBytes(6); // read data switch (dataType) { case 2: data = String.valueOf(stream.readShort()); break; case 3: data = String.valueOf(stream.readInt()); break; case 4: data = String.valueOf(stream.readFloat()); break; case 5: data = String.valueOf(stream.readDouble()); break; case 7: case 11: data = stream.read() == 0 ? "false" : "true"; break; case 17: data = stream.readString(1); break; } String[] tokens = contentID.split("\\|"); if (tokens[0].startsWith("CDetectionUnit")) { // detector information if (tokens[1].startsWith("PMT")) { try { if (tokens[2].equals("VideoOffset")) { store.setDetectorOffset(new Double(data), series, nextDetector); } else if (tokens[2].equals("HighVoltage")) { store.setDetectorVoltage(new Double(data), series, nextDetector); nextDetector++; } else if (tokens[2].equals("State")) { store.setDetectorType("PMT", series, nextDetector); // link Detector to Image, if the detector was actually used if (data.equals("Active")) { String index = tokens[1].substring(tokens[1].indexOf(" ") + 1); int channelIndex = -1; try { channelIndex = Integer.parseInt(index) - 1; } catch (NumberFormatException e) { } if (channelIndex >= 0) { activeChannelIndices.add(new Integer(channelIndex)); } String detectorID = MetadataTools.createLSID("Detector", series, nextDetector); store.setDetectorID(detectorID, series, nextDetector); if (nextDetector == 0) { // link every channel to the first detector in the beginning // if additional detectors are found, the links will be // overwritten for (int c=0; c<getEffectiveSizeC(); c++) { store.setDetectorSettingsDetector(detectorID, series, c); } } if (nextChannel < getEffectiveSizeC()) { store.setDetectorSettingsDetector( detectorID, series, nextChannel++); } } } } catch (NumberFormatException e) { LOGGER.debug("Failed to parse detector metadata", e); } } } else if (tokens[0].startsWith("CTurret")) { // objective information int objective = Integer.parseInt(tokens[3]); if (tokens[2].equals("NumericalAperture")) { store.setObjectiveLensNA(new Double(data), series, objective); } else if (tokens[2].equals("Objective")) { String[] objectiveData = data.split(" "); StringBuffer model = new StringBuffer(); String mag = null, na = null; String immersion = null, correction = null; for (int i=0; i<objectiveData.length; i++) { if (objectiveData[i].indexOf("x") != -1 && mag == null && na == null) { int xIndex = objectiveData[i].indexOf("x"); mag = objectiveData[i].substring(0, xIndex).trim(); na = objectiveData[i].substring(xIndex + 1).trim(); } else if (mag == null && na == null) { model.append(objectiveData[i]); model.append(" "); } else if (correction == null) { correction = objectiveData[i]; } else if (immersion == null) { immersion = objectiveData[i]; } } if (immersion == null || immersion.trim().equals("")) { immersion = "Unknown"; } if (correction == null) correction = "Unknown"; store.setObjectiveImmersion(immersion, series, objective); store.setObjectiveCorrection(correction.trim(), series, objective); store.setObjectiveModel(model.toString().trim(), series, objective); store.setObjectiveLensNA(new Double(na), series, objective); store.setObjectiveNominalMagnification( new Integer((int) Double.parseDouble(mag)), series, objective); } else if (tokens[2].equals("OrderNumber")) { store.setObjectiveSerialNumber(data, series, objective); } else if (tokens[2].equals("RefractionIndex")) { store.setObjectiveSettingsRefractiveIndex(new Double(data), series); } // link Objective to Image String objectiveID = MetadataTools.createLSID("Objective", series, objective); store.setObjectiveID(objectiveID, series, objective); if (objective == 0) { store.setObjectiveSettingsObjective(objectiveID, series); } } else if (tokens[0].startsWith("CSpectrophotometerUnit")) { int ndx = tokens[1].lastIndexOf(" "); int channel = Integer.parseInt(tokens[1].substring(ndx + 1)) - 1; if (tokens[2].equals("Wavelength")) { Integer wavelength = new Integer((int) Double.parseDouble(data)); store.setFilterModel(tokens[1], series, channel); String filterID = MetadataTools.createLSID("Filter", series, channel); store.setFilterID(filterID, series, channel); int index = activeChannelIndices.indexOf(new Integer(channel)); if (index >= 0 && index < core[series].sizeC) { store.setLogicalChannelSecondaryEmissionFilter(filterID, series, index); if (tokens[3].equals("0") && !cutInPopulated[series][index]) { store.setTransmittanceRangeCutIn(wavelength, series, channel); cutInPopulated[series][index] = true; } else if (tokens[3].equals("1") && !cutOutPopulated[series][index]) { store.setTransmittanceRangeCutOut(wavelength, series, channel); cutOutPopulated[series][index] = true; } } } else if (tokens[2].equals("Stain")) { if (activeChannelIndices.contains(new Integer(channel))) { int nNames = channelNames[series].size(); String prevValue = nNames == 0 ? "" : (String) channelNames[series].get(nNames - 1); if (!prevValue.equals(data)) { channelNames[series].add(data); } } } } else if (tokens[0].startsWith("CXYZStage")) { // NB: there is only one stage position specified for each series if (tokens[2].equals("XPos")) { for (int q=0; q<core[series].imageCount; q++) { store.setStagePositionPositionX(new Double(data), series, 0, q); } } else if (tokens[2].equals("YPos")) { for (int q=0; q<core[series].imageCount; q++) { store.setStagePositionPositionY(new Double(data), series, 0, q); } } else if (tokens[2].equals("ZPos")) { for (int q=0; q<core[series].imageCount; q++) { store.setStagePositionPositionZ(new Double(data), series, 0, q); } } } else if (tokens[0].equals("CScanActuator") && tokens[1].equals("Z Scan Actuator") && tokens[2].equals("Position")) { double pos = Double.parseDouble(data) * 1000000; for (int q=0; q<core[series].imageCount; q++) { store.setStagePositionPositionZ(new Double(pos), series, 0, q); } } if (contentID.equals("dblVoxelX")) { physicalSizes[series][0] = Double.parseDouble(data); } else if (contentID.equals("dblVoxelY")) { physicalSizes[series][1] = Double.parseDouble(data); } else if (contentID.equals("dblStepSize")) { physicalSizes[series][2] = Double.parseDouble(data) * 1000000; } else if (contentID.equals("dblPinhole")) { // pinhole is stored in meters pinhole[series] = Double.parseDouble(data) * 1000000; } else if (contentID.startsWith("nDelayTime")) { exposureTime[series] = Double.parseDouble(data); if (contentID.endsWith("_ms")) { exposureTime[series] /= 1000; } } addSeriesMeta("Block " + blockNum + " " + contentID, data); } stream.close(); // populate saved LogicalChannel data for (int i=0; i<getSeriesCount(); i++) { setSeries(i); for (int channel=0; channel<getEffectiveSizeC(); channel++) { if (channel < channelNames[i].size()) { String name = (String) channelNames[i].get(channel); if (name != null && !name.trim().equals("") && !name.equals("None")) { store.setLogicalChannelName(name, i, channel); } } if (channel < emWaves[i].size()) { store.setLogicalChannelEmWave((Integer) emWaves[i].get(channel), i, channel); } if (channel < exWaves[i].size()) { store.setLogicalChannelExWave((Integer) exWaves[i].get(channel), i, channel); } if (i < pinhole.length) { store.setLogicalChannelPinholeSize( new Double(pinhole[i]), i, channel); } } } setSeries(0); } private boolean usedFile(String s) { if (files == null) return false; for (int i=0; i<files.length; i++) { if (files[i] == null) continue; for (int j=0; j<files[i].size(); j++) { if (((String) files[i].get(j)).endsWith(s)) return true; } } return false; } private String getString(RandomAccessInputStream stream, int len) throws IOException { return DataTools.stripString(stream.readString(len)); } private String getString(RandomAccessInputStream stream, boolean doubleLength) throws IOException { int len = stream.readInt(); if (doubleLength) len *= 2; return getString(stream, len); } private static Hashtable<Integer, String> makeDimensionTable() { Hashtable<Integer, String> table = new Hashtable<Integer, String>(); table.put(new Integer(0), "undefined"); table.put(new Integer(120), "x"); table.put(new Integer(121), "y"); table.put(new Integer(122), "z"); table.put(new Integer(116), "t"); table.put(new Integer(6815843), "channel"); table.put(new Integer(6357100), "wave length"); table.put(new Integer(7602290), "rotation"); table.put(new Integer(7798904), "x-wide for the motorized xy-stage"); table.put(new Integer(7798905), "y-wide for the motorized xy-stage"); table.put(new Integer(7798906), "z-wide for the z-stage-drive"); table.put(new Integer(4259957), "user1 - unspecified"); table.put(new Integer(4325493), "user2 - unspecified"); table.put(new Integer(4391029), "user3 - unspecified"); table.put(new Integer(6357095), "graylevel"); table.put(new Integer(6422631), "graylevel1"); table.put(new Integer(6488167), "graylevel2"); table.put(new Integer(6553703), "graylevel3"); table.put(new Integer(7864398), "logical x"); table.put(new Integer(7929934), "logical y"); table.put(new Integer(7995470), "logical z"); table.put(new Integer(7602254), "logical t"); table.put(new Integer(7077966), "logical lambda"); table.put(new Integer(7471182), "logical rotation"); table.put(new Integer(5767246), "logical x-wide"); table.put(new Integer(5832782), "logical y-wide"); table.put(new Integer(5898318), "logical z-wide"); return table; } }
// ScanrReader.java package loci.formats.in; import java.io.IOException; import java.util.Vector; import loci.common.DataTools; import loci.common.Location; import loci.common.RandomAccessInputStream; import loci.common.XMLTools; import loci.formats.CoreMetadata; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.MetadataTools; import loci.formats.meta.FilterMetadata; import loci.formats.meta.MetadataStore; import loci.formats.tiff.IFD; import loci.formats.tiff.TiffParser; import org.xml.sax.Attributes; import org.xml.sax.helpers.DefaultHandler; public class ScanrReader extends FormatReader { // -- Constants -- private static final String XML_FILE = "experiment_descriptor.xml"; private static final String EXPERIMENT_FILE = "experiment_descriptor.dat"; private static final String ACQUISITION_FILE = "AcquisitionLog.dat"; // -- Fields -- private Vector<String> metadataFiles = new Vector<String>(); private int wellRows, wellColumns; private int fieldRows, fieldColumns; private Vector<String> channelNames = new Vector<String>(); private Vector<String> wellLabels = new Vector<String>(); private String plateName; private String[] tiffs; private MinimalTiffReader reader; // -- Constructor -- /** Constructs a new ScanR reader. */ public ScanrReader() { super("Olympus ScanR", new String[] {"dat", "xml", "tif"}); domains = new String[] {FormatTools.HCS_DOMAIN}; suffixSufficient = false; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#fileGroupOption(String) */ public int fileGroupOption(String id) throws FormatException, IOException { return FormatTools.MUST_GROUP; } /* @see loci.formats.IFormatReader#isThisType(String, boolean) */ public boolean isThisType(String name, boolean open) { String localName = new Location(name).getName(); if (localName.equals(XML_FILE) || localName.equals(EXPERIMENT_FILE) || localName.equals(ACQUISITION_FILE)) { return true; } return super.isThisType(name, open); } /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { TiffParser p = new TiffParser(stream); IFD ifd = p.getFirstIFD(); if (ifd == null) return false; Object s = ifd.getIFDValue(IFD.SOFTWARE); if (s == null) return false; String software = s instanceof String[] ? ((String[]) s)[0] : s.toString(); return software.trim().equals("National Instruments IMAQ"); } /* @see loci.formats.IFormatReader#getSeriesUsedFiles(boolean) */ public String[] getSeriesUsedFiles(boolean noPixels) { FormatTools.assertId(currentId, true, 1); Vector<String> files = new Vector<String>(); for (String file : metadataFiles) { if (file != null) files.add(file); } if (!noPixels && tiffs != null) { int offset = getSeries() * getImageCount(); for (int i=0; i<getImageCount(); i++) { if (tiffs[offset + i] != null) { files.add(tiffs[offset + i]); } } } return files.toArray(new String[files.size()]); } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { if (reader != null) { reader.close(); } reader = null; tiffs = null; plateName = null; channelNames.clear(); fieldRows = fieldColumns = 0; wellRows = wellColumns = 0; metadataFiles.clear(); wellLabels.clear(); } } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); int index = getSeries() * getImageCount() + no; if (tiffs[index] != null) { reader.setId(tiffs[index]); reader.openBytes(0, buf, x, y, w, h); reader.close(); } return buf; } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { debug("ScanrReader.initFile(" + id + ")"); super.initFile(id); // make sure we have the .xml file if (!checkSuffix(id, "xml") && isGroupFiles()) { Location parent = new Location(id).getAbsoluteFile().getParentFile(); if (checkSuffix(id, "tif")) { parent = parent.getParentFile(); } String[] list = parent.list(); for (String file : list) { if (file.equals(XML_FILE)) { id = new Location(parent, file).getAbsolutePath(); super.initFile(id); break; } } if (!checkSuffix(id, "xml")) { throw new FormatException("Could not find " + XML_FILE + " in " + parent.getAbsolutePath()); } } else if (!isGroupFiles() && checkSuffix(id, "tif")) { TiffReader r = new TiffReader(); r.setMetadataStore(getMetadataStore()); r.setId(id); core = r.getCoreMetadata(); metadata = r.getMetadata(); metadataStore = r.getMetadataStore(); r.close(); tiffs = new String[] {id}; reader = new MinimalTiffReader(); return; } Location dir = new Location(id).getAbsoluteFile().getParentFile(); String[] list = dir.list(true); for (String file : list) { Location f = new Location(dir, file); if (!f.isDirectory()) { metadataFiles.add(f.getAbsolutePath()); } } // parse XML metadata String xml = DataTools.readFile(id); XMLTools.parseXML(xml, new ScanrHandler()); Vector<String> uniqueRows = new Vector<String>(); Vector<String> uniqueColumns = new Vector<String>(); for (String well : wellLabels) { if (!Character.isLetter(well.charAt(0))) continue; String row = well.substring(0, 1).trim(); String column = well.substring(1).trim(); if (!uniqueRows.contains(row) && row.length() > 0) uniqueRows.add(row); if (!uniqueColumns.contains(column) && column.length() > 0) { uniqueColumns.add(column); } } wellRows = uniqueRows.size(); wellColumns = uniqueColumns.size(); if (wellRows * wellColumns == 0) { if (wellLabels.size() <= 96) { wellColumns = 12; } else if (wellLabels.size() <= 384) { wellColumns = 24; } wellRows = wellLabels.size() / wellColumns; if (wellRows * wellColumns < wellLabels.size()) wellRows++; } int nChannels = getSizeC() == 0 ? channelNames.size() : getSizeC(); if (nChannels == 0) nChannels = 1; int nSlices = getSizeZ() == 0 ? 1 : getSizeZ(); int nTimepoints = getSizeT(); int nWells = wellRows * wellColumns; int nPos = fieldRows * fieldColumns; if (nPos == 0) nPos = 1; // get list of TIFF files dir = new Location(dir, "data"); list = dir.list(true); if (nTimepoints == 0) { nTimepoints = list.length / (nChannels * nWells * nPos * nSlices); if (nTimepoints == 0) nTimepoints = 1; } tiffs = new String[nChannels * nWells * nPos * nTimepoints * nSlices]; int next = 0; for (int well=0; well<nWells; well++) { String wellPos = getBlock(well + 1, "W"); for (int pos=0; pos<nPos; pos++) { String posPos = getBlock(pos + 1, "P"); for (int z=0; z<nSlices; z++) { String zPos = getBlock(z, "Z"); for (int t=0; t<nTimepoints; t++) { String tPos = getBlock(t, "T"); for (int c=0; c<nChannels; c++) { for (String file : list) { if (file.indexOf(wellPos) != -1 && file.indexOf(zPos) != -1 && file.indexOf(posPos) != -1 && file.indexOf(tPos) != -1 && file.indexOf(channelNames.get(c)) != -1) { tiffs[next++] = new Location(dir, file).getAbsolutePath(); break; } } } } } } } reader = new MinimalTiffReader(); reader.setId(tiffs[0]); int sizeX = reader.getSizeX(); int sizeY = reader.getSizeY(); int pixelType = reader.getPixelType(); // we strongly suspect that ScanR incorrectly records the // signedness of the pixels switch (pixelType) { case FormatTools.INT8: pixelType = FormatTools.UINT8; break; case FormatTools.UINT8: pixelType = FormatTools.INT8; break; case FormatTools.INT16: pixelType = FormatTools.UINT16; break; case FormatTools.UINT16: pixelType = FormatTools.INT16; break; } boolean rgb = reader.isRGB(); boolean interleaved = reader.isInterleaved(); boolean indexed = reader.isIndexed(); boolean littleEndian = reader.isLittleEndian(); reader.close(); core = new CoreMetadata[nWells * nPos]; for (int i=0; i<getSeriesCount(); i++) { core[i] = new CoreMetadata(); core[i].sizeC = nChannels; core[i].sizeZ = nSlices; core[i].sizeT = nTimepoints; core[i].sizeX = sizeX; core[i].sizeY = sizeY; core[i].pixelType = pixelType; core[i].rgb = rgb; core[i].interleaved = interleaved; core[i].indexed = indexed; core[i].littleEndian = littleEndian; core[i].dimensionOrder = "XYCTZ"; core[i].imageCount = nSlices * nTimepoints * nChannels; } MetadataStore store = new FilterMetadata(getMetadataStore(), isMetadataFiltered()); MetadataTools.populatePixels(store, this); // populate LogicalChannel data for (int i=0; i<getSeriesCount(); i++) { for (int c=0; c<getSizeC(); c++) { store.setLogicalChannelName(channelNames.get(c), i, c); } } if (wellRows > 26) { store.setPlateRowNamingConvention("1", 0); store.setPlateColumnNamingConvention("A", 0); } else { store.setPlateRowNamingConvention("A", 0); store.setPlateColumnNamingConvention("1", 0); } store.setPlateName(plateName, 0); int nFields = fieldRows * fieldColumns; for (int i=0; i<getSeriesCount(); i++) { MetadataTools.setDefaultCreationDate(store, id, i); int field = i % nFields; int well = i / nFields; int wellRow = well / wellColumns; int wellCol = well % wellColumns; store.setWellColumn(new Integer(wellCol), 0, well); store.setWellRow(new Integer(wellRow), 0, well); store.setWellSampleIndex(new Integer(i), 0, well, field); String imageID = MetadataTools.createLSID("Image", i); store.setWellSampleImageRef(imageID, 0, well, field); store.setImageID(imageID, i); String row = String.valueOf(wellRows > 26 ? wellRow + 1 : (char) ('A' + wellRow)); String col = String.valueOf(wellRows > 26 ? (char) ('A' + wellCol) : wellCol + 1); String name = "Well " + row + col + ", Field " + (field + 1) + " (Spot " + (i + 1) + ")"; store.setImageName(name, i); } } // -- Helper class -- class ScanrHandler extends DefaultHandler { private String key, value; private String qName; // -- DefaultHandler API methods -- public void characters(char[] ch, int start, int length) { String v = new String(ch, start, length); if (v.trim().length() == 0) return; if (qName.equals("Name")) { key = v; } else if (qName.equals("Val")) { value = v.trim(); addGlobalMeta(key, value); if (key.equals("columns/well")) { fieldColumns = Integer.parseInt(value); } else if (key.equals("rows/well")) { fieldRows = Integer.parseInt(value); } else if (key.equals("# slices")) { core[0].sizeZ = Integer.parseInt(value); } else if (key.equals("timeloop real")) { core[0].sizeT = Integer.parseInt(value); } else if (key.equals("name")) { channelNames.add(value); } else if (key.equals("plate name")) { plateName = value; } else if (key.equals("idle")) { int lastIndex = channelNames.size() - 1; if (value.equals("0") && !channelNames.get(lastIndex).equals("Autofocus")) { core[0].sizeC++; } else channelNames.remove(lastIndex); } else if (key.equals("well selection table + cDNA")) { wellLabels.add(value); } } } public void startElement(String uri, String localName, String qName, Attributes attributes) { this.qName = qName; } } // -- Helper methods -- private String getBlock(int index, String axis) { String b = String.valueOf(index); while (b.length() < 5) b = "0" + b; return axis + b; } }
package loci.common; import org.slf4j.LoggerFactory; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.classic.encoder.PatternLayoutEncoder; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.Appender; import ch.qos.logback.core.ConsoleAppender; import ch.qos.logback.core.joran.util.ConfigurationWatchListUtil; /** * A utility class with convenience methods for logback. */ public final class LogbackTools { // -- Constructor -- private LogbackTools() { } /** * Checks whether logback has been configured * * @return {@code} true if logging was successfully enabled */ public static synchronized boolean isEnabled() { Logger root = (Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME); LoggerContext loggerContext = root.getLoggerContext(); return (ConfigurationWatchListUtil.getMainWatchURL(loggerContext) != null || (loggerContext.getProperty("caller") == "Bio-Formats")); } /** * Sets the level of the root logger * * @param level A string indicating the desired level * (i.e.: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN). */ public static synchronized void setRootLevel(String level) { Logger root = (Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME); root.setLevel(Level.toLevel(level)); } /** * Attempts to enable SLF4J logging via logback without an external * configuration file. * * @return {@code} true if logging was successfully enabled */ public static synchronized boolean enableLogging() { Logger root = (Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME); LoggerContext context = root.getLoggerContext(); if (!root.iteratorForAppenders().hasNext()) { context.reset(); context.putProperty("caller", "Bio-Formats"); PatternLayoutEncoder layout = new PatternLayoutEncoder(); layout.setContext(context); layout.setPattern("%m%n"); layout.start(); ConsoleAppender<ILoggingEvent> appender = new ConsoleAppender<ILoggingEvent>(); appender.setContext(context); appender.setEncoder(layout); appender.start(); root.addAppender(appender); } else { Appender defaultAppender = root.iteratorForAppenders().next(); if (defaultAppender instanceof ConsoleAppender) { context.reset(); context.putProperty("caller", "Bio-Formats"); PatternLayoutEncoder layout = new PatternLayoutEncoder(); layout.setContext(context); layout.setPattern("%m%n"); layout.start(); defaultAppender.setContext(context); ((ConsoleAppender) defaultAppender).setEncoder(layout); defaultAppender.start(); root.addAppender(defaultAppender); } } return true; } public static synchronized void enableIJLogging(boolean debug, Appender<ILoggingEvent> appender) { try { Object logger = LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME); if (!(logger instanceof Logger)) return; Logger root = (Logger) logger; if (debug) { root.setLevel(Level.DEBUG); } else { root.setLevel(Level.INFO); } appender.setContext(root.getLoggerContext()); root.addAppender(appender); } catch (Exception e) { e.printStackTrace(); } } }
package ome.scifio.apng; import java.awt.image.BufferedImage; import java.awt.image.IndexColorModel; import java.awt.image.WritableRaster; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.zip.CRC32; import java.util.zip.DeflaterOutputStream; import javax.imageio.ImageIO; import net.imglib2.meta.Axes; import net.imglib2.meta.AxisType; import ome.scifio.AbstractChecker; import ome.scifio.AbstractFormat; import ome.scifio.AbstractMetadata; import ome.scifio.AbstractParser; import ome.scifio.AbstractTranslator; import ome.scifio.AbstractWriter; import ome.scifio.ImageMetadata; import ome.scifio.DatasetMetadata; import ome.scifio.CoreTranslator; import ome.scifio.Field; import ome.scifio.FieldPrinter; import ome.scifio.FormatException; import ome.scifio.SCIFIO; import ome.scifio.common.Constants; import ome.scifio.common.DataTools; import ome.scifio.discovery.SCIFIOFormat; import ome.scifio.discovery.SCIFIOTranslator; import ome.scifio.gui.AWTImageTools; import ome.scifio.gui.BIFormatReader; import ome.scifio.io.RandomAccessInputStream; import ome.scifio.io.RandomAccessOutputStream; import ome.scifio.io.StreamTools; import ome.scifio.util.FormatTools; @SCIFIOFormat public class APNGFormat extends AbstractFormat<APNGFormat.Metadata, APNGFormat.Checker, APNGFormat.Parser, APNGFormat.Reader, APNGFormat.Writer> { // -- Constants -- public static final byte[] PNG_SIGNATURE = new byte[] { (byte) 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a}; // -- Constructor -- public APNGFormat() throws FormatException { this(null); } public APNGFormat(final SCIFIO ctx) throws FormatException { super(ctx, "Animated PNG", "png", Metadata.class, Checker.class, Parser.class, Reader.class, Writer.class); } /** * File format SCIFIO Metadata for Animated Portable Network Graphics * (APNG) images. * */ public static class Metadata extends AbstractMetadata { // -- Fields -- private List<APNGIDATChunk> idat; private List<APNGfcTLChunk> fctl; private APNGacTLChunk actl; private APNGIHDRChunk ihdr; private APNGPLTEChunk plte; private APNGIENDChunk iend; // true if the default image is not part of the animation private boolean separateDefault; // -- Constructor -- public Metadata() { this(null); } public Metadata(final SCIFIO context) { super(context); fctl = new ArrayList<APNGfcTLChunk>(); idat = new ArrayList<APNGIDATChunk>(); } // -- Getters and Setters -- public List<APNGIDATChunk> getIdat() { return idat; } public void setIdat(final List<APNGIDATChunk> idat) { this.idat = idat; } public void addIdat(final APNGIDATChunk idat) { this.idat.add(idat); } public void setSeparateDefault(final boolean separateDefault) { this.separateDefault = separateDefault; } public boolean isSeparateDefault() { return separateDefault; } public List<APNGfcTLChunk> getFctl() { return fctl; } public void setFctl(final List<APNGfcTLChunk> fctl) { this.fctl = fctl; } public APNGacTLChunk getActl() { return actl; } public void setActl(final APNGacTLChunk actl) { this.actl = actl; } public APNGIHDRChunk getIhdr() { return ihdr; } public void setIhdr(final APNGIHDRChunk ihdr) { this.ihdr = ihdr; } public APNGPLTEChunk getPlte() { return plte; } public void setPlte(final APNGPLTEChunk plte) { this.plte = plte; } public APNGIENDChunk getIend() { return iend; } public void setIend(APNGIENDChunk iend) { this.iend = iend; } // -- Helper Methods -- /* @see Metadata#resetMeta() */ public void reset() { super.reset(getClass()); fctl = new ArrayList<APNGfcTLChunk>(); idat = new ArrayList<APNGIDATChunk>(); } } /** * File format SCIFIO Checker for Animated Portable Network Graphics * (APNG) images. * */ public static class Checker extends AbstractChecker<Metadata> { // -- Fields -- // -- Constructor -- /** Constructs a new APNGChecker */ public Checker(final SCIFIO ctx) { super(ctx); suffixNecessary = false; } public Checker() { this(null); } // -- Checker API Methods -- /* @see ome.scifio.Checker#isFormat(RandomAccessInputStream stream) */ @Override public boolean isFormat(final RandomAccessInputStream stream) throws IOException { final int blockLen = 8; if (!StreamTools.validStream(stream, blockLen, false)) return false; final byte[] signature = new byte[blockLen]; stream.read(signature); if (signature[0] != (byte) 0x89 || signature[1] != 0x50 || signature[2] != 0x4e || signature[3] != 0x47 || signature[4] != 0x0d || signature[5] != 0x0a || signature[6] != 0x1a || signature[7] != 0x0a) { return false; } return true; } // -- MetadataHandler API Methods -- /* @see MetadataHandler#getMetadataTypes() */ public Class<Metadata> getMetadataType() { return Metadata.class; } } /** * File format SCIFIO Parser for Animated Portable Network Graphics * (APNG) images. * */ public static class Parser extends AbstractParser<Metadata> { // -- Fields -- // -- Constructor -- /** Constructs a new APNGParser. */ public Parser() { this(null); } public Parser(final SCIFIO ctx) { super(ctx); } // -- Parser API Methods -- /* @see ome.scifio.AbstractParser#parse(RandomAccessInputStream stream) */ @Override public Metadata parse(final RandomAccessInputStream stream) throws IOException, FormatException { super.parse(stream); // check that this is a valid PNG file final byte[] signature = new byte[8]; in.read(signature); if (signature[0] != (byte) 0x89 || signature[1] != 0x50 || signature[2] != 0x4e || signature[3] != 0x47 || signature[4] != 0x0d || signature[5] != 0x0a || signature[6] != 0x1a || signature[7] != 0x0a) { throw new FormatException("Invalid PNG signature."); } // For determining if the first frame is also the default image boolean sawFctl = false; // read data chunks - each chunk consists of the following: // 1) 32 bit length // 2) 4 char type // 3) 'length' bytes of data // 4) 32 bit CRC while (in.getFilePointer() < in.length()) { final int length = in.readInt(); final String type = in.readString(4); final long offset = in.getFilePointer(); APNGChunk chunk = null; if (type.equals("acTL")) { chunk = new APNGacTLChunk(); ((APNGacTLChunk) chunk).setNumFrames(in.readInt()); ((APNGacTLChunk) chunk).setNumPlays(in.readInt()); metadata.setActl(((APNGacTLChunk) chunk)); } else if (type.equals("fcTL")) { sawFctl = true; chunk = new APNGfcTLChunk(); ((APNGfcTLChunk) chunk).setSequenceNumber(in.readInt()); ((APNGfcTLChunk) chunk).setWidth(in.readInt()); ((APNGfcTLChunk) chunk).setHeight(in.readInt()); ((APNGfcTLChunk) chunk).setxOffset(in.readInt()); ((APNGfcTLChunk) chunk).setyOffset(in.readInt()); ((APNGfcTLChunk) chunk).setDelayNum(in.readShort()); ((APNGfcTLChunk) chunk).setDelayDen(in.readShort()); ((APNGfcTLChunk) chunk).setDisposeOp(in.readByte()); ((APNGfcTLChunk) chunk).setBlendOp(in.readByte()); metadata.getFctl().add(((APNGfcTLChunk) chunk)); } else if (type.equals("IDAT")) { metadata.setSeparateDefault(!sawFctl); chunk = new APNGIDATChunk(); metadata.addIdat(((APNGIDATChunk) chunk)); in.skipBytes(length); } else if (type.equals("fdAT")) { chunk = new APNGfdATChunk(); ((APNGfdATChunk) chunk).setSequenceNumber(in.readInt()); metadata.getFctl() .get(metadata.getFctl().size() - 1) .addChunk(((APNGfdATChunk) chunk)); in.skipBytes(length - 4); } else if (type.equals("IHDR")) { chunk = new APNGIHDRChunk(); ((APNGIHDRChunk) chunk).setWidth(in.readInt()); ((APNGIHDRChunk) chunk).setHeight(in.readInt()); ((APNGIHDRChunk) chunk).setBitDepth(in.readByte()); ((APNGIHDRChunk) chunk).setColourType(in.readByte()); ((APNGIHDRChunk) chunk).setCompressionMethod(in.readByte()); ((APNGIHDRChunk) chunk).setFilterMethod(in.readByte()); ((APNGIHDRChunk) chunk).setInterlaceMethod(in.readByte()); metadata.setIhdr(((APNGIHDRChunk) chunk)); } else if (type.equals("PLTE")) { chunk = new APNGPLTEChunk(); final byte[] red = new byte[length / 3]; final byte[] blue = new byte[length / 3]; final byte[] green = new byte[length / 3]; for (int i = 0; i < length; i += 3) { red[i] = in.readByte(); green[i] = in.readByte(); blue[i] = in.readByte(); } ((APNGPLTEChunk) chunk).setRed(red); ((APNGPLTEChunk) chunk).setGreen(green); ((APNGPLTEChunk) chunk).setBlue(blue); metadata.setPlte(((APNGPLTEChunk) chunk)); } else if(type.equals("IEND")) { chunk = new APNGIENDChunk(); in.skipBytes((int) (in.length() - in.getFilePointer())); metadata.setIend((APNGIENDChunk) chunk); } else in.skipBytes(length); if (chunk != null) { chunk.setOffset(offset); chunk.setLength(length); } if (in.getFilePointer() < in.length() - 4) { in.skipBytes(4); // skip the CRC } } return metadata; } } /** * File format SCIFIO Reader for Animated Portable Network Graphics (APNG) * images. * */ public static class Reader extends BIFormatReader<Metadata> { // -- Fields -- // Cached copy of the last plane that was returned. private BufferedImage lastPlane; // Plane index of the last plane that was returned. private int lastPlaneIndex = -1; // -- Constructor -- /** Constructs a new APNGReader. */ public Reader() { this(null); } public Reader(final SCIFIO ctx) { super(ctx); } // -- Reader API Methods -- /* @see ome.scifio.Reader#openPlane(int, int, int, int, int) */ @Override public Object openPlane(final int imageIndex, final int planeIndex, final int x, final int y, final int w, final int h) throws FormatException, IOException { FormatTools.checkPlaneParameters( this, imageIndex, planeIndex, -1, x, y, w, h); // If the last processed (cached) plane is requested, return it if (planeIndex == lastPlaneIndex && lastPlane != null) { return AWTImageTools.getSubimage( lastPlane, dMeta.isLittleEndian(planeIndex), x, y, w, h); } // The default frame is requested and we can use the standard Java ImageIO to extract it if (planeIndex == 0) { in.seek(0); final DataInputStream dis = new DataInputStream(new BufferedInputStream(in, 4096)); lastPlane = ImageIO.read(dis); lastPlaneIndex = 0; if (x == 0 && y == 0 && w == dMeta.getAxisLength(imageIndex, Axes.X) && h == dMeta.getAxisLength(imageIndex, Axes.Y)) { return lastPlane; } return AWTImageTools.getSubimage( lastPlane, dMeta.isLittleEndian(imageIndex), x, y, w, h); } // For a non-default frame, the appropriate chunks will be used to create a new image, // which will be read with the standard Java ImageIO and pasted onto frame 0. final ByteArrayOutputStream stream = new ByteArrayOutputStream(); stream.write(APNGFormat.PNG_SIGNATURE); final int[] coords = metadata.getFctl().get(planeIndex).getFrameCoordinates(); // process IHDR chunk final APNGIHDRChunk ihdr = metadata.getIhdr(); processChunk( imageIndex, ihdr.getLength(), ihdr.getOffset(), coords, stream, true); // process fcTL and fdAT chunks final APNGfcTLChunk fctl = metadata.getFctl().get( metadata.isSeparateDefault() ? planeIndex - 1 : planeIndex); // fdAT chunks are converted to IDAT chunks, as we are essentially building a standalone single-frame image for (final APNGfdATChunk fdat : fctl.getFdatChunks()) { in.seek(fdat.getOffset() + 4); byte[] b = new byte[fdat.getLength() + 8]; DataTools.unpackBytes( fdat.getLength() - 4, b, 0, 4, dMeta.isLittleEndian(imageIndex)); b[4] = 'I'; b[5] = 'D'; b[6] = 'A'; b[7] = 'T'; in.read(b, 8, b.length - 12); final int crc = (int) computeCRC(b, b.length - 4); DataTools.unpackBytes( crc, b, b.length - 4, 4, dMeta.isLittleEndian(imageIndex)); stream.write(b); b = null; } // process PLTE chunks final APNGPLTEChunk plte = metadata.getPlte(); if (plte != null) { processChunk( imageIndex, plte.getLength(), plte.getOffset(), coords, stream, false); } final RandomAccessInputStream s = new RandomAccessInputStream(stream.toByteArray()); final DataInputStream dis = new DataInputStream(new BufferedInputStream(s, 4096)); final BufferedImage bi = ImageIO.read(dis); dis.close(); // Recover first plane lastPlane = null; openPlane( imageIndex, 0, 0, 0, dMeta.getAxisLength(imageIndex, Axes.X), dMeta.getAxisLength(imageIndex, Axes.Y)); // paste current image onto first plane final WritableRaster firstRaster = lastPlane.getRaster(); final WritableRaster currentRaster = bi.getRaster(); firstRaster.setDataElements(coords[0], coords[1], currentRaster); lastPlane = new BufferedImage(lastPlane.getColorModel(), firstRaster, false, null); lastPlaneIndex = planeIndex; return lastPlane; } // -- Helper methods -- private long computeCRC(final byte[] buf, final int len) { final CRC32 crc = new CRC32(); crc.update(buf, 0, len); return crc.getValue(); } private void processChunk(final int imageIndex, final int length, final long offset, final int[] coords, final ByteArrayOutputStream stream, final boolean isIHDR) throws IOException { byte[] b = new byte[length + 12]; DataTools.unpackBytes(length, b, 0, 4, dMeta.isLittleEndian(imageIndex)); final byte[] typeBytes = (isIHDR ? "IHDR".getBytes(Constants.ENCODING) : "PLTE".getBytes(Constants.ENCODING)); System.arraycopy(typeBytes, 0, b, 4, 4); in.seek(offset); in.read(b, 8, b.length - 12); if (isIHDR) { DataTools.unpackBytes( coords[2], b, 8, 4, dMeta.isLittleEndian(imageIndex)); DataTools.unpackBytes( coords[3], b, 12, 4, dMeta.isLittleEndian(imageIndex)); } final int crc = (int) computeCRC(b, b.length - 4); DataTools.unpackBytes( crc, b, b.length - 4, 4, dMeta.isLittleEndian(imageIndex)); stream.write(b); b = null; } } /** * The SCIFIO file format writer for PNG and APNG files. * */ public static class Writer extends AbstractWriter<Metadata> { // -- Fields -- // Number of frames written private int numFrames = 0; // Pointer to position in acTL chunk to write the number of frames in this // image private long numFramesPointer = 0; // Current sequence number, shared by fcTL and fdAT frames to indicate // ordering private int nextSequenceNumber; private boolean littleEndian; // -- Constructor -- public Writer() { this(null); } public Writer(final SCIFIO ctx) { super(ctx); } // -- Writer API Methods -- /** * @see ome.scifio.Writer#saveBytes(int, byte[], int, int, int, int) */ public void saveBytes(final int imageIndex, final int planeIndex, final byte[] buf, final int x, final int y, final int w, final int h) throws FormatException, IOException { checkParams(imageIndex, planeIndex, buf, x, y, w, h); if (!isFullPlane(imageIndex, x, y, w, h)) { throw new FormatException( "APNGWriter does not yet support saving image tiles."); } final int width = dMeta.getAxisLength(imageIndex, Axes.X); final int height = dMeta.getAxisLength(imageIndex, Axes.Y); if (!initialized[imageIndex][planeIndex]) { if (numFrames == 0) { if (!metadata.isSeparateDefault()) { // first frame is default image writeFCTL(width, height, planeIndex); } writePLTE(); } initialized[imageIndex][planeIndex] = true; } // write the data for this frame if (numFrames == 0) { // This is the first frame, and also the default image writePixels(imageIndex, "IDAT", buf, x, y, w, h); } else { writeFCTL(width, height, planeIndex); writePixels(imageIndex, "fdAT", buf, x, y, w, h); } numFrames++; } /* @see ome.scifio.Writer#canDoStacks() */ public boolean canDoStacks() { return true; } /* @see ome.scifio.Writer#getPixelTypes(String) */ public int[] getPixelTypes(final String codec) { return new int[] { FormatTools.INT8, FormatTools.UINT8, FormatTools.INT16, FormatTools.UINT16 }; } // -- APNGWriter Methods -- /* @see ome.scifio.Writer#close() */ public void close() throws IOException { if (out != null) { writeFooter(); } super.close(); numFrames = 0; numFramesPointer = 0; nextSequenceNumber = 0; littleEndian = false; } /* @see ome.scifio.Writer#setDest(RandomAccessOutputStream, int) */ public void setDest(final RandomAccessOutputStream out, final int imageIndex) throws FormatException, IOException { super.setDest(out, imageIndex); initialize(imageIndex); } // -- Helper Methods -- private void initialize(final int imageIndex) throws FormatException, IOException { if (out.length() == 0) { final int width = dMeta.getAxisLength(imageIndex, Axes.X); final int height = dMeta.getAxisLength(imageIndex, Axes.Y); final int bytesPerPixel = FormatTools.getBytesPerPixel(dMeta .getPixelType(imageIndex)); final int nChannels = dMeta.getAxisLength(imageIndex, Axes.CHANNEL); final boolean indexed = getColorModel() != null && (getColorModel() instanceof IndexColorModel); littleEndian = dMeta.isLittleEndian(imageIndex); // write 8-byte PNG signature out.write(APNGFormat.PNG_SIGNATURE); // write IHDR chunk out.writeInt(13); final byte[] b = new byte[17]; b[0] = 'I'; b[1] = 'H'; b[2] = 'D'; b[3] = 'R'; DataTools.unpackBytes(width, b, 4, 4, false); DataTools.unpackBytes(height, b, 8, 4, false); b[12] = (byte) (bytesPerPixel * 8); if (indexed) b[13] = (byte) 3; else if (nChannels == 1) b[13] = (byte) 0; else if (nChannels == 2) b[13] = (byte) 4; else if (nChannels == 3) b[13] = (byte) 2; else if (nChannels == 4) b[13] = (byte) 6; b[14] = metadata.getIhdr().getCompressionMethod(); b[15] = metadata.getIhdr().getFilterMethod(); b[16] = metadata.getIhdr().getInterlaceMethod(); out.write(b); out.writeInt(crc(b)); // write acTL chunk final APNGacTLChunk actl = metadata.getActl(); out.writeInt(8); out.writeBytes("acTL"); numFramesPointer = out.getFilePointer(); out.writeInt(actl == null ? 0 : actl.getNumFrames()); out.writeInt(actl == null ? 0 : actl.getNumPlays()); out.writeInt(0); // save a place for the CRC } } private int crc(final byte[] buf) { return crc(buf, 0, buf.length); } private int crc(final byte[] buf, final int off, final int len) { final CRC32 crc = new CRC32(); crc.update(buf, off, len); return (int) crc.getValue(); } private void writeFCTL(final int width, final int height, final int planeIndex) throws IOException { out.writeInt(26); final APNGfcTLChunk fctl = metadata.getFctl().get( metadata.isSeparateDefault() ? planeIndex - 1 : planeIndex); final byte[] b = new byte[30]; DataTools.unpackBytes(22, b, 0, 4, false); b[0] = 'f'; b[1] = 'c'; b[2] = 'T'; b[3] = 'L'; DataTools.unpackBytes(nextSequenceNumber++, b, 4, 4, false); DataTools.unpackBytes(fctl.getWidth(), b, 8, 4, false); DataTools.unpackBytes(fctl.getHeight(), b, 12, 4, false); DataTools.unpackBytes(fctl.getxOffset(), b, 16, 4, false); DataTools.unpackBytes(fctl.getyOffset(), b, 20, 4, false); DataTools.unpackBytes(fctl.getDelayNum(), b, 24, 2, false); DataTools.unpackBytes(fctl.getDelayDen(), b, 26, 2, false); b[28] = fctl.getDisposeOp(); b[29] = fctl.getBlendOp(); out.write(b); out.writeInt(crc(b)); } private void writePLTE() throws IOException { if (!(getColorModel() instanceof IndexColorModel)) return; final IndexColorModel model = (IndexColorModel) getColorModel(); final byte[][] lut = new byte[3][256]; model.getReds(lut[0]); model.getGreens(lut[1]); model.getBlues(lut[2]); out.writeInt(768); final byte[] b = new byte[772]; b[0] = 'P'; b[1] = 'L'; b[2] = 'T'; b[3] = 'E'; for (int i = 0; i < lut[0].length; i++) { for (int j = 0; j < lut.length; j++) { b[i * lut.length + j + 4] = lut[j][i]; } } out.write(b); out.writeInt(crc(b)); } private void writePixels(final int imageIndex, final String chunk, final byte[] stream, final int x, final int y, final int width, final int height) throws FormatException, IOException { final int sizeC = dMeta.getAxisLength(imageIndex, Axes.CHANNEL); final int pixelType = dMeta.getPixelType(imageIndex); final boolean signed = FormatTools.isSigned(pixelType); if (!isFullPlane(imageIndex, x, y, width, height)) { throw new FormatException( "APNGWriter does not support writing tiles."); } final ByteArrayOutputStream s = new ByteArrayOutputStream(); s.write(chunk.getBytes(Constants.ENCODING)); if (chunk.equals("fdAT")) { s.write(DataTools.intToBytes(nextSequenceNumber++, false)); } final DeflaterOutputStream deflater = new DeflaterOutputStream(s); final int planeSize = stream.length / sizeC; final int rowLen = stream.length / height; final int bytesPerPixel = stream.length / (width * height * sizeC); final byte[] rowBuf = new byte[rowLen]; for (int i = 0; i < height; i++) { deflater.write(0); if (interleaved) { if (littleEndian) { for (int col = 0; col < width * sizeC; col++) { final int offset = (i * sizeC * width + col) * bytesPerPixel; final int pixel = DataTools.bytesToInt(stream, offset, bytesPerPixel, littleEndian); DataTools.unpackBytes(pixel, rowBuf, col * bytesPerPixel, bytesPerPixel, false); } } else System.arraycopy(stream, i * rowLen, rowBuf, 0, rowLen); } else { final int max = (int) Math.pow(2, bytesPerPixel * 8 - 1); for (int col = 0; col < width; col++) { for (int c = 0; c < sizeC; c++) { final int offset = c * planeSize + (i * width + col) * bytesPerPixel; int pixel = DataTools.bytesToInt(stream, offset, bytesPerPixel, littleEndian); if (signed) { if (pixel < max) pixel += max; else pixel -= max; } final int output = (col * sizeC + c) * bytesPerPixel; DataTools.unpackBytes(pixel, rowBuf, output, bytesPerPixel, false); } } } deflater.write(rowBuf); } deflater.finish(); final byte[] b = s.toByteArray(); // write chunk length out.writeInt(b.length - 4); out.write(b); // write checksum out.writeInt(crc(b)); } private void writeFooter() throws IOException { // write IEND chunk out.writeInt(0); out.writeBytes("IEND"); out.writeInt(crc("IEND".getBytes(Constants.ENCODING))); // update frame count out.seek(numFramesPointer); out.writeInt(numFrames); out.skipBytes(4); final byte[] b = new byte[12]; b[0] = 'a'; b[1] = 'c'; b[2] = 'T'; b[3] = 'L'; DataTools.unpackBytes(numFrames, b, 4, 4, false); DataTools.unpackBytes(metadata.getActl() == null ? 0 : metadata.getActl().getNumPlays(), b, 8, 4, false); out.writeInt(crc(b)); } } /** * This class can be used for translating Metadata in the Core SCIFIO format * to Metadata for writing Animated Portable Network Graphics (APNG) * files. * * Note that Metadata translated from Core is only write-safe. * * If trying to read, there should already exist an originally-parsed APNG * Metadata object which can be used. * * Note also that any APNG image written must be reparsed, as the Metadata used * to write it can not be guaranteed valid. * */ @SCIFIOTranslator(metaIn = DatasetMetadata.class, metaOut = Metadata.class) public static class CoreAPNGTranslator extends AbstractTranslator<DatasetMetadata, Metadata> { // -- Constructors -- public CoreAPNGTranslator() { this(null); } public CoreAPNGTranslator(SCIFIO ctx) { super(ctx); } // -- Translator API Methods -- @Override public void translate(final DatasetMetadata source, final Metadata dest) { super.translate(source, dest); final APNGIHDRChunk ihdr = dest.getIhdr() == null ? new APNGIHDRChunk() : dest.getIhdr(); final APNGPLTEChunk plte = dest.getPlte() == null ? new APNGPLTEChunk() : dest.getPlte(); final APNGacTLChunk actl = dest.getActl() == null ? new APNGacTLChunk() : dest.getActl(); final List<APNGfcTLChunk> fctl = new ArrayList<APNGfcTLChunk>(); dest.setIhdr(ihdr); dest.setPlte(plte); dest.setActl(actl); dest.setFctl(fctl); ihdr.setWidth(source.getAxisLength(0, Axes.X)); ihdr.setHeight(source.getAxisLength(0, Axes.Y)); ihdr.setBitDepth((byte) source.getBitsPerPixel(0)); ihdr.setFilterMethod((byte) 0); ihdr.setCompressionMethod((byte) 0); ihdr.setInterlaceMethod((byte) 0); final int sizec = source.getAxisLength(0, Axes.CHANNEL); final boolean rgb = source.isRGB(0); final boolean indexed = source.isIndexed(0); if (indexed) { ihdr.setColourType((byte) 0x2); byte[][] lut = null; try { lut = source.get8BitLookupTable(0); plte.setRed(lut[0]); plte.setGreen(lut[1]); plte.setBlue(lut[2]); } catch (final FormatException e) { LOGGER.error("Format error when finding 8bit lookup table", e); } catch (final IOException e) { LOGGER.error("IO error when finding 8bit lookup table", e); } } else if (sizec == 2) { ihdr.setColourType((byte) 0x4); } else if (sizec == 4) { ihdr.setColourType((byte) 0x6); } else if (!rgb) { ihdr.setColourType((byte) 0x0); } else { ihdr.setColourType((byte) 0x3); } actl.setNumFrames(source.getAxisLength(0, Axes.TIME)); for (int i = 0; i < actl.getNumFrames(); i++) { final APNGfcTLChunk frame = new APNGfcTLChunk(); frame.setHeight(ihdr.getHeight()); frame.setWidth(ihdr.getWidth()); frame.setxOffset(0); frame.setyOffset(0); frame.setSequenceNumber(i); frame.setDelayDen((short) 0); frame.setDelayNum((short) 0); frame.setBlendOp((byte) 0); frame.setDisposeOp((byte) 0); fctl.add(frame); } dest.setSeparateDefault(true); } } /** * File format SCIFIO Translator for Animated Portable Network Graphics * (APNG) images to the Core SCIFIO image type. * */ @SCIFIOTranslator(metaIn = Metadata.class, metaOut = DatasetMetadata.class) public static class APNGCoreTranslator extends AbstractTranslator<Metadata, DatasetMetadata> implements CoreTranslator { // -- Constructors -- public APNGCoreTranslator() { this(null); } public APNGCoreTranslator(final SCIFIO ctx) { super(ctx); } // -- Translator API Methods -- @Override public void translate(final Metadata source, final DatasetMetadata dest) { super.translate(source, dest); final ImageMetadata imageMeta = new ImageMetadata(); dest.add(imageMeta); imageMeta.setInterleaved(false); imageMeta.setOrderCertain(true); imageMeta.setFalseColor(true); imageMeta.setIndexed(false); boolean indexed = false; boolean rgb = true; int sizec = 1; switch (source.getIhdr().getColourType()) { case 0x0: rgb = false; break; case 0x2: indexed = true; sizec = 3; break; case 0x3: break; case 0x4: rgb = false; sizec = 2; break; case 0x6: sizec = 4; break; } if (indexed) { final byte[][] lut = new byte[3][0]; lut[0] = source.getPlte().getRed(); lut[1] = source.getPlte().getGreen(); lut[2] = source.getPlte().getBlue(); imageMeta.setLut(lut); } final APNGacTLChunk actl = source.getActl(); final int planeCount = actl == null ? 1 : actl.getNumFrames(); imageMeta.setAxisTypes(new AxisType[] { Axes.X, Axes.Y, Axes.CHANNEL, Axes.TIME, Axes.Z}); imageMeta.setAxisLengths(new int[] { source.getIhdr().getWidth(), source.getIhdr().getHeight(), sizec, planeCount, 1}); final int bpp = source.getIhdr().getBitDepth(); imageMeta.setBitsPerPixel(bpp); try { imageMeta.setPixelType(FormatTools.pixelTypeFromBytes( bpp / 8, false, false)); } catch (final FormatException e) { LOGGER.error("Failed to find pixel type from bytes: " + (bpp/8), e); } imageMeta.setRgb(rgb); imageMeta.setIndexed(indexed); imageMeta.setPlaneCount(planeCount); imageMeta.setLittleEndian(false); // Some anciliary chunks may not have been parsed imageMeta.setMetadataComplete(false); imageMeta.setThumbnail(false); //coreMeta.setThumbSizeX(source.thumbSizeX); //coreMeta.setThumbSizeY(source.thumbSizeY); //coreMeta.setcLengths(source.cLengths); //coreMeta.setcTypes(source.cTypes); //TODO could generate this via fields? //coreMeta.setImageMetadata(source.imageMetadata); } } /** * A parent class for all APNG Chunk classes. * * Provides a length and offset (in the overall file stream) * field. * * Each chunk should instantiate and define its own CHUNK_SIGNATURE. * */ public static class APNGChunk { // -- Fields -- // Offset in the file data stream. Points to the start of the // data of the chunk, which comes after an entry for the length // and the chunk's signature. private long offset; // Length of the chunk private int length; // Unique chunk type signature (e.g. "IHDR") protected byte[] CHUNK_SIGNATURE; // -- Methods -- public byte[] getCHUNK_SIGNATURE() { return CHUNK_SIGNATURE; } public int[] getFrameCoordinates() { return new int[0]; } public void setOffset(final long offset) { this.offset = offset; } public long getOffset() { return offset; } public void setLength(final int length) { this.length = length; } public int getLength() { return length; } @Override public String toString() { return new FieldPrinter(this).toString(); } } /** * Represents the IHDR chunk of the APNG image format. * * The IHDR chunk is a critical chunk for all APNG * and PNG images. It contains basic information * about the image. * * The IHDR is always the first chunk of a correct * PNG or APNG image file. * */ public static class APNGIHDRChunk extends APNGChunk { // -- Constructor -- public APNGIHDRChunk() { CHUNK_SIGNATURE = new byte[] {(byte) 0x49, 0x48, 0x44, 0x52}; } // -- Fields -- @Field(label = "Width") private int width; @Field(label = "height") private int height; @Field(label = "Bit depth") private byte bitDepth; @Field(label = "Colour type") private byte colourType; @Field(label = "Compression Method") private byte compressionMethod; @Field(label = "Filter method") private byte filterMethod; @Field(label = "Interlace method") private byte interlaceMethod; // -- Methods -- public int getWidth() { return width; } public void setWidth(final int width) { this.width = width; } public int getHeight() { return height; } public void setHeight(final int height) { this.height = height; } public byte getBitDepth() { return bitDepth; } public void setBitDepth(final byte bitDepth) { this.bitDepth = bitDepth; } public byte getColourType() { return colourType; } public void setColourType(final byte colourType) { this.colourType = colourType; } public byte getCompressionMethod() { return compressionMethod; } public void setCompressionMethod(final byte compressionMethod) { this.compressionMethod = compressionMethod; } public byte getFilterMethod() { return filterMethod; } public void setFilterMethod(final byte filterMethod) { this.filterMethod = filterMethod; } public byte getInterlaceMethod() { return interlaceMethod; } public void setInterlaceMethod(final byte interlaceMethod) { this.interlaceMethod = interlaceMethod; } } /** * Represents the PLTE chunk of the APNG image format. * * The PLTE chunk contains color palette data for the current * image and is only present in certain ARGB color formats. * */ public static class APNGPLTEChunk extends APNGChunk { // -- Constructor -- public APNGPLTEChunk() { CHUNK_SIGNATURE = new byte[] {(byte) 0x50, 0x4C, 0x54, 0x45}; } // -- Fields -- // Red palette entries private byte[] red; // Green palette entries private byte[] green; // Blue palette entries private byte[] blue; // -- Methods -- public byte[] getRed() { return red; } public void setRed(final byte[] red) { this.red = red; } public byte[] getGreen() { return green; } public void setGreen(final byte[] green) { this.green = green; } public byte[] getBlue() { return blue; } public void setBlue(final byte[] blue) { this.blue = blue; } } /** * Represents the fcTL chunk of the APNG image format. * * The fcTL chunk contains metadata for a matching fdAT * chunk, or IDAT chunk (if the default image is also * the first frame of the animation). * */ public static class APNGfcTLChunk extends APNGChunk { // -- Fields -- /** Sequence number of the animation chunk, starting from 0 */ @Field(label = "sequence_number") private int sequenceNumber; /** Width of the following frame */ @Field(label = "width") private int width; /** Height of the following frame */ @Field(label = "height") private int height; /** X position at which to render the following frame */ @Field(label = "x_offset") private int xOffset; /** Y position at which to render the following frame */ @Field(label = "y_offset") private int yOffset; /** Frame delay fraction numerator */ @Field(label = "delay_num") private short delayNum; /** Frame delay fraction denominator */ @Field(label = "delay_den") private short delayDen; /** Type of frame area disposal to be done after rendering this frame */ @Field(label = "dispose_op") private byte disposeOp; /** Type of frame area rendering for this frame */ @Field(label = "blend_op") private byte blendOp; private final List<APNGfdATChunk> fdatChunks; // -- Constructor -- public APNGfcTLChunk() { fdatChunks = new ArrayList<APNGfdATChunk>(); CHUNK_SIGNATURE = new byte[] {(byte) 0x66, 0x63, 0x54, 0x4C}; } // -- Methods -- public void addChunk(final APNGfdATChunk chunk) { fdatChunks.add(chunk); } public int getSequenceNumber() { return sequenceNumber; } public void setSequenceNumber(final int sequenceNumber) { this.sequenceNumber = sequenceNumber; } public int getWidth() { return width; } public void setWidth(final int width) { this.width = width; } public int getHeight() { return height; } public void setHeight(final int height) { this.height = height; } public int getxOffset() { return xOffset; } public void setxOffset(final int xOffset) { this.xOffset = xOffset; } public int getyOffset() { return yOffset; } public void setyOffset(final int yOffset) { this.yOffset = yOffset; } public short getDelayNum() { return delayNum; } public void setDelayNum(final short delayNum) { this.delayNum = delayNum; } public short getDelayDen() { return delayDen; } public void setDelayDen(final short delayDen) { this.delayDen = delayDen; } public byte getDisposeOp() { return disposeOp; } public void setDisposeOp(final byte disposeOp) { this.disposeOp = disposeOp; } public byte getBlendOp() { return blendOp; } public void setBlendOp(final byte blendOp) { this.blendOp = blendOp; } public List<APNGfdATChunk> getFdatChunks() { return fdatChunks; } // -- Helper Method -- @Override public int[] getFrameCoordinates() { return new int[] {xOffset, yOffset, width, height}; } } /** * Represents the IDAT chunk of the APNG image format. * * The IDAT chunk is simply a dump of compressed image * data for a single plane (the default image for the file). * */ public static class APNGIDATChunk extends APNGChunk { // -- Constructor -- public APNGIDATChunk() { CHUNK_SIGNATURE = new byte[] {(byte) 0x49, 0x44, 0x41, 0x54}; } } /** * Represents the acTL chunk of the APNG image format. * * There is one acTL chunk per APNG image, and is not * present in PNG files. * * The acTL chunk contains metadata describing the number * of frames in the image, and how many times the animation * sequence should be played. * */ public static class APNGacTLChunk extends APNGChunk { // -- Constructor -- public APNGacTLChunk() { CHUNK_SIGNATURE = new byte[] {(byte) 0x61, 0x63, 0x54, 0x4C}; } // -- Fields -- /** Sequence number of the animation chunk, starting from 0 */ @Field(label = "sequence_number") private int sequenceNumber; /** Number of frames in this APNG file */ @Field(label = "num_frames") private int numFrames; /** Times to play the animation sequence */ @Field(label = "num_plays") private int numPlays; // -- Methods -- public int getNumFrames() { return numFrames; } public void setNumFrames(final int numFrames) { this.numFrames = numFrames; } public int getNumPlays() { return numPlays; } public void setNumPlays(final int numPlays) { this.numPlays = numPlays; } public int getSequenceNumber() { return sequenceNumber; } public void setSequenceNumber(final int sequenceNumber) { this.sequenceNumber = sequenceNumber; } } /** * Represents the fdAT chunk of the APNG image format. * * The fdAT chunk is identical in concept to the IDAT chunk: * a container for compressed image data for a single frame. * * In the case of fdAT chunks, the image is of a non-default * frame. * * Each fdAT chunk is paired with an fcTL chunk. * */ public static class APNGfdATChunk extends APNGChunk { // -- Constructor -- public APNGfdATChunk() { CHUNK_SIGNATURE = new byte[] {(byte) 0x66, 0x64, 0x41, 0x54}; } // -- Fields -- /** Sequence number of the animation chunk, starting from 0 */ @Field(label = "sequence_number") private int sequenceNumber; // -- Methods -- public int getSequenceNumber() { return sequenceNumber; } public void setSequenceNumber(final int sequenceNumber) { this.sequenceNumber = sequenceNumber; } } /** * This class represents the critical IEND chunk that signifies * the end of a PNG stream. * * @author Mark Hiner * */ public static class APNGIENDChunk extends APNGChunk { // -- Constructor -- public APNGIENDChunk() { CHUNK_SIGNATURE = new byte[] {(byte) 0x49, 0x45, 0x4E, 0x44}; } } }
package ibis.util; /** * Class for doing some recurring statistical calculations. */ public final class Stats { /** * Prevent anyone from creating a <code>Stats</code> object. */ private Stats() { } /** * Calculates the mean of an array of numbers. * @param data the numbers to calculate the mean of. * @return the mean. */ public static double mean(double[] data) { return mean(data, 0, data.length); } /** * Calculates the mean of a subset of an array of numbers. * * @param data the numbers to calculate the mean of * @param off offset * @param len length * @return the mean. */ public static double mean(double[] data, int off, int len) { double total = 0; for (int i = off; i < (off+len); i++) { total += data[i]; } if (len == 0) return 0.0; return total / len; } public static double stdDev(double[] data) { return stdDev(data, 0, data.length); } public static double stdDev(double[] data, int off, int len) { double mean = mean(data, off, len); double sum = 0; for (int i = off; i < (off+len); i++) { double v = data[i] - mean; sum += v * v; } if (len <= 1) return 0.0; return Math.sqrt(sum / (len - 1)); } /** * Calculates a speed in Mb/s given the size in bytes and the time * in milliseconds. The result is rounded to one hundreth of an integer * @param bytes size of the data in bytes * @param millis the time of the measurement in milliseconds * @return the speed. */ public static double mbs(double bytes, double millis) { return round(((bytes / millis) * 1000.0) / (1024.0 * 1024.0)); } /** * Calculates a speed in Mb/s given the size in bytes and a number of * times in milliseconds. The result is rounded to one hundreth of an * integer. * * @param bytes size of the data in bytes * @param millis a number of time measurements in milliseconds * @param off the first measurement used in the calculation * @param len the number of measurements used in the calculation * @return the speed. */ public static double mbs(double bytes, double[] millis, int off, int len) { double mean = mean(millis, off, len); return mbs(bytes, mean); } /** * Rounds up a double. Rounds to one hundreth of an integer. * @param v the value to be rounded. * @return the rounded value. */ public static double round(double v) { return (Math.ceil(v*100.0)/100.0); } /** * Returns the standard deviation as a percentage of the mean. * The result is rounded up to a hundredth of an integer. * * @param data array with input data (numbers) * @param off offset in the data at which to start calculating * @param len number of array elements to use for the calculation * @return the standard deviation as a percentage. */ public static double stdDevError(double[] data, int off, int len) { double mean = mean(data, off, len); double stdDev = stdDev(data, off, len); return round((stdDev / mean) * 100.0); } }
package imageFX; public class DYColor { /** * This class contains all the major color HEX code. * * Colors are stored in the format ARGB * A = Alpha * R = Red * G = Green * B = Blue * * By Default Alpha is set to 255 i.e. FF (in HEX). * So all color starts with 0xFF. */ /////////////////////////////// DYColor //////////////////////////////////// public static final int DYColor1 = 0xFF786786; public static final int DYColor_1to6 = 0xFF123456; public static final int DYColor_ABCDEF = 0xFFABCDEF; public static final int DYColor_Dawood = 0xFF141295; public static final int DYColor_DY = 0xFF044590; public static final int DYColor_JB007 = 0xFF007007; public static final int DYColor_MAC = 0xFF4D4143; public static final int DYColor_Palindrome123 = 0xFF123321; public static final int DYColor_PalindromeABC = 0xFFABCCBA; public static final int DYColor_Yusuf = 0xFF211090; /////////////////////////////// Color ////////////////////////////////////// public static final int Acid_green = 0xFFA8BB19; public static final int Aero = 0xFF7CB9E8; public static final int Aero_blue = 0xFFC9FFE5; public static final int African_violet = 0xFFB284BE; public static final int Air_Force_blue_RAF = 0xFF5D8AA8; public static final int Air_Force_blue_USAF = 0xFF00308F; public static final int Air_superiority_blue = 0xFF72A0C1; public static final int Alabama_Crimson = 0xFFAF002A; public static final int Alice_blue = 0xFFF0F8FF; public static final int Alizarin_crimson = 0xFFE32636; public static final int Alloy_orange = 0xFFC46210; public static final int Almond = 0xFFEFDECD; public static final int Amaranth = 0xFFE52B50; public static final int Amaranth_pink = 0xFFF19CBB; public static final int Amaranth_purple = 0xFFAB274F; public static final int Amazon = 0xFF3B7A57; public static final int Amber = 0xFFFFBF00; public static final int SAE_ECE_Amber_color = 0xFFFF7E00; public static final int Amethyst = 0xFF9966CC; public static final int Android_green = 0xFFA4C639; public static final int Anti_flash_white = 0xFFF2F3F4; public static final int Antique_brass = 0xFFCD9575; public static final int Antique_fuchsia = 0xFF665D1E; public static final int Antique_ruby = 0xFF915C83; public static final int Antique_white = 0xFFFAEBD7; public static final int Ao_English = 0xFF008000; public static final int Apple_green = 0xFF8DB600; public static final int Apricot = 0xFFFBCEB1; public static final int Aqua = 0xFF00FFFF; public static final int Aquamarine = 0xFF7FFFD4; public static final int Army_green = 0xFF4B5320; public static final int Arsenic = 0xFF3B444B; public static final int Artichoke = 0xFF8F9779; public static final int Arylide_yellow = 0xFFE9D66B; public static final int Ash_grey = 0xFFB2BEB5; public static final int Asparagus = 0xFF87A96B; public static final int Atomic_tangerine = 0xFFFF9966; public static final int Auburn = 0xFFA52A2A; public static final int Aureolin = 0xFFFDEE00; public static final int AuroMetalSaurus = 0xFF6E7F80; public static final int Avocado = 0xFF568203; public static final int Azure = 0xFF007FFF; public static final int Azure_mist_web = 0xFFF0FFFF; public static final int Baby_blue = 0xFF89CFF0; public static final int Baby_blue_eyes = 0xFFA1CAF1; public static final int Baby_pink = 0xFFF4C2C2; public static final int Baby_powder = 0xFFFEFEFA; public static final int Baker_Miller_pink = 0xFFFF91AF; public static final int Ball_blue = 0xFF21ABCD; public static final int Banana_Mania = 0xFFFAE7B5; public static final int Banana_yellow = 0xFFFFE135; public static final int Bangladesh_green = 0xFF006A4E; public static final int Barbie_pink = 0xFFE0218A; public static final int Barn_red = 0xFF7C0A02; public static final int Battleship_grey = 0xFF848482; public static final int Bazaar = 0xFF98777B; public static final int Beau_blue = 0xFFBCD4E6; public static final int Beaver = 0xFF9F8170; public static final int Beige = 0xFFF5F5DC; public static final int B_dazzled_blue = 0xFF2E5894; public static final int Big_dip_o_ruby = 0xFF9C2542; public static final int Bisque = 0xFFFFE4C4; public static final int Bistre = 0xFF3D2B1F; public static final int Bistre_brown = 0xFF967117; public static final int Bitter_lemon = 0xFFCAE00D; public static final int Bitter_lime = 0xFFBFFF00; public static final int Bittersweet = 0xFFFE6F5E; public static final int Bittersweet_shimmer = 0xFFBF4F51; public static final int Black = 0xFF000000; public static final int Black_bean = 0xFF3D0C02; public static final int Black_leather_jacket = 0xFF253529; public static final int Black_olive = 0xFF3B3C36; public static final int Blanched_almond = 0xFFFFEBCD; public static final int Blast_off_bronze = 0xFFA57164; public static final int Bleu_de_France = 0xFF318CE7; public static final int Blizzard_Blue = 0xFFACE5EE; public static final int Blue = 0xFFFAF0BE; public static final int Blue_Crayola = 0xFF0000FF; public static final int Blue_NCS = 0xFF0093AF; public static final int Blue_Pantone = 0xFF0087BD; public static final int Blue_RYB = 0xFF333399; public static final int Blue_Bell = 0xFF0247FE; public static final int Blue_green = 0xFF6699CC; public static final int Blue_sapphire = 0xFF0D98BA; public static final int Blue_violet = 0xFF126180; public static final int Blue_yonder = 0xFF8A2BE2; public static final int Blueberry = 0xFF5072A7; public static final int Bluebonnet = 0xFF4F86F7; public static final int Blush = 0xFF1C1CF0; public static final int Bole = 0xFFDE5D83; public static final int Bondi_blue = 0xFF79443B; public static final int Bone = 0xFF0095B6; public static final int Boston_University_Red = 0xFFE3DAC9; public static final int Bottle_green = 0xFFCC0000; public static final int Boysenberry = 0xFF006A4E; public static final int Brandeis_blue = 0xFF873260; public static final int Brass = 0xFF0070FF; public static final int Brick_red = 0xFFB5A642; public static final int Bright_cerulean = 0xFFCB4154; public static final int Bright_green = 0xFF1DACD6; public static final int Bright_lavender = 0xFF66FF00; public static final int Bright_lilac = 0xFFBF94E4; public static final int Bright_maroon = 0xFFD891EF; public static final int Bright_navy_blue = 0xFFC32148; public static final int Bright_pink = 0xFF1974D2; public static final int Bright_turquoise = 0xFFFF007F; public static final int Bright_ube = 0xFF08E8DE; public static final int Brilliant_lavender = 0xFFD19FE8; public static final int Brilliant_rose = 0xFFF4BBFF; public static final int Brink_pink = 0xFFFF55A3; public static final int British_racing_green = 0xFFFB607F; public static final int Bronze = 0xFF004225; public static final int Bronze_Yellow = 0xFFCD7F32; public static final int Brown_traditional = 0xFF737000; public static final int Brown_web = 0xFF964B00; public static final int Brown_nose = 0xFFA52A2A; public static final int Brunswick_green = 0xFF6B4423; public static final int Bubble_gum = 0xFF1B4D3E; public static final int Bubbles = 0xFFFFC1CC; public static final int Buff = 0xFFE7FEFF; public static final int Bud_green = 0xFFF0DC82; public static final int Bulgarian_rose = 0xFF7BB661; public static final int Burgundy = 0xFF480607; public static final int Burlywood = 0xFF800020; public static final int Burnt_orange = 0xFFDEB887; public static final int Burnt_sienna = 0xFFCC5500; public static final int Burnt_umber = 0xFFE97451; public static final int Byzantine = 0xFF8A3324; public static final int Byzantium = 0xFFBD33A4; public static final int Cadet = 0xFF702963; public static final int Cadet_blue = 0xFF536872; public static final int Cadet_grey = 0xFF5F9EA0; public static final int Cadmium_green = 0xFF91A3B0; public static final int Cadmium_orange = 0xFF006B3C; public static final int Cadmium_red = 0xFFED872D; public static final int Cadmium_yellow = 0xFFE30022; public static final int Caf_au_lait = 0xFFFFF600; public static final int Caf_noir = 0xFFA67B5B; public static final int Cal_Poly_Pomona_green = 0xFF4B3621; public static final int Cambridge_Blue = 0xFF1E4D2B; public static final int Camel = 0xFFA3C1AD; public static final int Cameo_pink = 0xFFC19A6B; public static final int Camouflage_green = 0xFFEFBBCC; public static final int Canary_yellow = 0xFF78866B; public static final int Candy_apple_red = 0xFFFFEF00; public static final int Candy_pink = 0xFFFF0800; public static final int Capri = 0xFFE4717A; public static final int Caput_mortuum = 0xFF00BFFF; public static final int Cardinal = 0xFF592720; public static final int Caribbean_green = 0xFFC41E3A; public static final int Carmine = 0xFF00CC99; public static final int Carmine_M_amp_P = 0xFF960018; public static final int Carmine_pink = 0xFFD70040; public static final int Carmine_red = 0xFFEB4C42; public static final int Carnation_pink = 0xFFFF0038; public static final int Carnelian = 0xFFFFA6C9; public static final int Carolina_blue = 0xFFB31B1B; public static final int Carrot_orange = 0xFF56A0D3; public static final int Castleton_green = 0xFFED9121; public static final int Catalina_blue = 0xFF00563F; public static final int Catawba = 0xFF062A78; public static final int Cedar_Chest = 0xFF703642; public static final int Ceil = 0xFFC95A49; public static final int Celadon = 0xFF92A1CF; public static final int Celadon_blue = 0xFFACE1AF; public static final int Celadon_green = 0xFF007BA7; public static final int Celeste = 0xFF2F847C; public static final int Celestial_blue = 0xFFB2FFFF; public static final int Cerise = 0xFF4997D0; public static final int Cerise_pink = 0xFFDE3163; public static final int Cerulean = 0xFFEC3B83; public static final int Cerulean_blue = 0xFF007BA7; public static final int Cerulean_frost = 0xFF2A52BE; public static final int CG_Blue = 0xFF6D9BC3; public static final int CG_Red = 0xFF007AA5; public static final int Chamoisee = 0xFFE03C31; public static final int Champagne = 0xFFA0785A; public static final int Charcoal = 0xFFF7E7CE; public static final int Charleston_green = 0xFF36454F; public static final int Charm_pink = 0xFF232B2B; public static final int Chartreuse_traditional = 0xFFE68FAC; public static final int Chartreuse_web = 0xFFDFFF00; public static final int Cherry = 0xFF7FFF00; public static final int Cherry_blossom_pink = 0xFFDE3163; public static final int Chestnut = 0xFFFFB7C5; public static final int China_pink = 0xFF954535; public static final int China_rose = 0xFFDE6FA1; public static final int Chinese_red = 0xFFA8516E; public static final int Chinese_violet = 0xFFAA381E; public static final int Chocolate_traditional = 0xFF856088; public static final int Chocolate_web = 0xFF7B3F00; public static final int Chrome_yellow = 0xFFD2691E; public static final int Cinereous = 0xFFFFA700; public static final int Cinnabar = 0xFF98817B; public static final int Cinnamon = 0xFFE34234; public static final int Citrine = 0xFFD2691E; public static final int Citron = 0xFFE4D00A; public static final int Claret = 0xFF9EA91F; public static final int Classic_rose = 0xFF7F1734; public static final int Cobalt = 0xFFFBCCE7; public static final int Cocoa_brown = 0xFF0047AB; public static final int Coconut = 0xFFD2691E; public static final int Coffee = 0xFF965A3E; public static final int Columbia_blue = 0xFF6F4E37; public static final int Congo_pink = 0xFFC4D8E2; public static final int Cool_grey = 0xFFF88379; public static final int Copper = 0xFF8C92AC; public static final int Copper_Crayola = 0xFFB87333; public static final int Copper_penny = 0xFFDA8A67; public static final int Copper_red = 0xFFAD6F69; public static final int Copper_rose = 0xFFCB6D51; public static final int Coquelicot = 0xFF996666; public static final int Coral = 0xFFFF3800; public static final int Coral_pink = 0xFFFF7F50; public static final int Coral_red = 0xFFF88379; public static final int Cordovan = 0xFFFF4040; public static final int Corn = 0xFF893F45; public static final int Cornell_Red = 0xFFFBEC5D; public static final int Cornflower_blue = 0xFFB31B1B; public static final int Cornsilk = 0xFF6495ED; public static final int Cosmic_latte = 0xFFFFF8DC; public static final int Cotton_candy = 0xFFFFF8E7; public static final int Cream = 0xFFFFBCD9; public static final int Crimson = 0xFFFFFDD0; public static final int Crimson_glory = 0xFFDC143C; public static final int Cyan = 0xFFBE0032; public static final int Cyan_process = 0xFF00FFFF; public static final int Cyber_grape = 0xFF00B7EB; public static final int Cyber_yellow = 0xFF58427C; public static final int Daffodil = 0xFFFFD300; public static final int Dandelion = 0xFFFFFF31; public static final int Dark_blue = 0xFFF0E130; public static final int Dark_blue_gray = 0xFF00008B; public static final int Dark_brown = 0xFF666699; public static final int Dark_byzantium = 0xFF654321; public static final int Dark_candy_apple_red = 0xFF5D3954; public static final int Dark_cerulean = 0xFFA40000; public static final int Dark_chestnut = 0xFF08457E; public static final int Dark_coral = 0xFF986960; public static final int Dark_cyan = 0xFFCD5B45; public static final int Dark_electric_blue = 0xFF008B8B; public static final int Dark_goldenrod = 0xFF536878; public static final int Dark_gray_X11 = 0xFFB8860B; public static final int Dark_green = 0xFFA9A9A9; public static final int Dark_green_X11 = 0xFF013220; public static final int Dark_imperial_blue = 0xFF006400; public static final int Dark_jungle_green = 0xFF00416A; public static final int Dark_khaki = 0xFF1A2421; public static final int Dark_lava = 0xFFBDB76B; public static final int Dark_lavender = 0xFF483C32; public static final int Dark_liver = 0xFF734F96; public static final int Dark_liver_horses = 0xFF534B4F; public static final int Dark_magenta = 0xFF543D37; public static final int Dark_medium_gray = 0xFF8B008B; public static final int Dark_midnight_blue = 0xFFA9A9A9; public static final int Dark_moss_green = 0xFF003366; public static final int Dark_olive_green = 0xFF4A5D23; public static final int Dark_orange = 0xFF556B2F; public static final int Dark_orchid = 0xFFFF8C00; public static final int Dark_pastel_blue = 0xFF9932CC; public static final int Dark_pastel_green = 0xFF779ECB; public static final int Dark_pastel_purple = 0xFF03C03C; public static final int Dark_pastel_red = 0xFF966FD6; public static final int Dark_pink = 0xFFC23B22; public static final int Dark_powder_blue = 0xFFE75480; public static final int Dark_puce = 0xFF003399; public static final int Dark_raspberry = 0xFF4F3A3C; public static final int Dark_red = 0xFF872657; public static final int Dark_salmon = 0xFF8B0000; public static final int Dark_scarlet = 0xFFE9967A; public static final int Dark_sea_green = 0xFF560319; public static final int Dark_sienna = 0xFF8FBC8F; public static final int Dark_sky_blue = 0xFF3C1414; public static final int Dark_slate_blue = 0xFF8CBED6; public static final int Dark_slate_gray = 0xFF483D8B; public static final int Dark_spring_green = 0xFF2F4F4F; public static final int Dark_tan = 0xFF177245; public static final int Dark_tangerine = 0xFF918151; public static final int Dark_taupe = 0xFFFFA812; public static final int Dark_terra_cotta = 0xFF483C32; public static final int Dark_turquoise = 0xFFCC4E5C; public static final int Dark_vanilla = 0xFF00CED1; public static final int Dark_violet = 0xFFD1BEA8; public static final int Dark_yellow = 0xFF9400D3; public static final int Dartmouth_green = 0xFF9B870C; public static final int Davy_s_grey = 0xFF00703C; public static final int Debian_red = 0xFF555555; public static final int Deep_carmine = 0xFFD70A53; public static final int Deep_carmine_pink = 0xFFA9203E; public static final int Deep_carrot_orange = 0xFFEF3038; public static final int Deep_cerise = 0xFFE9692C; public static final int Deep_champagne = 0xFFDA3287; public static final int Deep_chestnut = 0xFFFAD6A5; public static final int Deep_coffee = 0xFFB94E48; public static final int Deep_fuchsia = 0xFF704241; public static final int Deep_jungle_green = 0xFFC154C1; public static final int Deep_lemon = 0xFF004B49; public static final int Deep_lilac = 0xFFF5C71A; public static final int Deep_magenta = 0xFF9955BB; public static final int Deep_mauve = 0xFFCC00CC; public static final int Deep_moss_green = 0xFFD473D4; public static final int Deep_peach = 0xFF355E3B; public static final int Deep_pink = 0xFFFFCBA4; public static final int Deep_puce = 0xFFFF1493; public static final int Deep_ruby = 0xFFA95C68; public static final int Deep_saffron = 0xFF843F5B; public static final int Deep_sky_blue = 0xFFFF9933; public static final int Deep_Space_Sparkle = 0xFF00BFFF; public static final int Deep_Taupe = 0xFF4A646C; public static final int Deep_Tuscan_red = 0xFF7E5E60; public static final int Deer = 0xFF66424D; public static final int Denim = 0xFFBA8759; public static final int Desert = 0xFF1560BD; public static final int Desert_sand = 0xFFC19A6B; public static final int Desire = 0xFFEDC9AF; public static final int Diamond = 0xFFEA3C53; public static final int Dim_gray = 0xFFB9F2FF; public static final int Dirt = 0xFF696969; public static final int Dodger_blue = 0xFF9B7653; public static final int Dogwood_rose = 0xFF1E90FF; public static final int Dollar_bill = 0xFFD71868; public static final int Donkey_brown = 0xFF85BB65; public static final int Drab = 0xFF664C28; public static final int Duke_blue = 0xFF967117; public static final int Dust_storm = 0xFF00009C; public static final int Dutch_white = 0xFFE5CCC9; public static final int Earth_yellow = 0xFFEFDFBB; public static final int Ebony = 0xFFE1A95F; public static final int Ecru = 0xFF555D50; public static final int Eerie_black = 0xFFC2B280; public static final int Eggplant = 0xFF1B1B1B; public static final int Eggshell = 0xFF614051; public static final int Egyptian_blue = 0xFFF0EAD6; public static final int Electric_blue = 0xFF1034A6; public static final int Electric_crimson = 0xFF7DF9FF; public static final int Electric_cyan = 0xFFFF003F; public static final int Electric_green = 0xFF00FFFF; public static final int Electric_indigo = 0xFF00FF00; public static final int Electric_lavender = 0xFF6F00FF; public static final int Electric_lime = 0xFFF4BBFF; public static final int Electric_purple = 0xFFCCFF00; public static final int Electric_ultramarine = 0xFFBF00FF; public static final int Electric_violet = 0xFF3F00FF; public static final int Electric_yellow = 0xFF8F00FF; public static final int Emerald = 0xFFFFFF33; public static final int Eminence = 0xFF50C878; public static final int English_green = 0xFF6C3082; public static final int English_lavender = 0xFF1B4D3E; public static final int English_red = 0xFFB48395; public static final int English_violet = 0xFFAB4B52; public static final int Eton_blue = 0xFF563C5C; public static final int Eucalyptus = 0xFF96C8A2; public static final int Fallow = 0xFF44D7A8; public static final int Falu_red = 0xFFC19A6B; public static final int Fandango = 0xFF801818; public static final int Fandango_pink = 0xFFB53389; public static final int Fashion_fuchsia = 0xFFDE5285; public static final int Fawn = 0xFFF400A1; public static final int Feldgrau = 0xFFE5AA70; public static final int Feldspar = 0xFF4D5D53; public static final int Fern_green = 0xFFFDD5B1; public static final int Ferrari_Red = 0xFF4F7942; public static final int Field_drab = 0xFFFF2800; public static final int Firebrick = 0xFF6C541E; public static final int Fire_engine_red = 0xFFB22222; public static final int Flame = 0xFFCE2029; public static final int Flamingo_pink = 0xFFE25822; public static final int Flattery = 0xFFFC8EAC; public static final int Flavescent = 0xFF6B4423; public static final int Flax = 0xFFF7E98E; public static final int Flirt = 0xFFEEDC82; public static final int Floral_white = 0xFFA2006D; public static final int Fluorescent_orange = 0xFFFFFAF0; public static final int Fluorescent_pink = 0xFFFFBF00; public static final int Fluorescent_yellow = 0xFFFF1493; public static final int Folly = 0xFFCCFF00; public static final int Forest_green_traditional = 0xFFFF004F; public static final int Forest_green_web = 0xFF014421; public static final int French_beige = 0xFF228B22; public static final int French_bistre = 0xFFA67B5B; public static final int French_blue = 0xFF856D4D; public static final int French_fuchsia = 0xFF0072BB; public static final int French_lilac = 0xFFFD3F92; public static final int French_lime = 0xFF86608E; public static final int French_mauve = 0xFF9EFD38; public static final int French_pink = 0xFFD473D4; public static final int French_plum = 0xFFFD6C9E; public static final int French_puce = 0xFF811453; public static final int French_raspberry = 0xFF4E1609; public static final int French_rose = 0xFFC72C48; public static final int French_sky_blue = 0xFFF64A8A; public static final int French_violet = 0xFF77B5FE; public static final int French_wine = 0xFF8806CE; public static final int Fresh_Air = 0xFFAC1E44; public static final int Fuchsia = 0xFFA6E7FF; public static final int Fuchsia_Crayola = 0xFFFF00FF; public static final int Fuchsia_pink = 0xFFC154C1; public static final int Fuchsia_purple = 0xFFFF77FF; public static final int Fuchsia_rose = 0xFFCC397B; public static final int Fulvous = 0xFFC74375; public static final int Fuzzy_Wuzzy = 0xFFE48400; public static final int Gainsboro = 0xFFCC6666; public static final int Gamboge = 0xFFDCDCDC; public static final int Generic_viridian = 0xFFE49B0F; public static final int Ghost_white = 0xFF007F66; public static final int Giants_orange = 0xFFF8F8FF; public static final int Ginger = 0xFFFE5A1D; public static final int Glaucous = 0xFFB06500; public static final int Glitter = 0xFF6082B6; public static final int GO_green = 0xFFE6E8FA; public static final int Gold_metallic = 0xFF00AB66; public static final int Gold_web_Golden = 0xFFD4AF37; public static final int Gold_Fusion = 0xFFFFD700; public static final int Golden_brown = 0xFF85754E; public static final int Golden_poppy = 0xFF996515; public static final int Golden_yellow = 0xFFFCC200; public static final int Goldenrod = 0xFFFFDF00; public static final int Granny_Smith_Apple = 0xFFDAA520; public static final int Grape = 0xFFA8E4A0; public static final int Gray = 0xFF6F2DA8; public static final int Gray_HTML_CSS_gray = 0xFF808080; public static final int Gray_X11_gray = 0xFF808080; public static final int Gray_asparagus = 0xFFBEBEBE; public static final int Gray_blue = 0xFF465945; public static final int Green_Color_Wheel_X11_green = 0xFF8C92AC; public static final int Green_Crayola = 0xFF00FF00; public static final int Green_HTML_CSS_color = 0xFF1CAC78; public static final int Green_Munsell = 0xFF008000; public static final int Green_NCS = 0xFF00A877; public static final int Green_Pantone = 0xFF009F6B; public static final int Green_pigment = 0xFF00AD43; public static final int Green_RYB = 0xFF00A550; public static final int Green_yellow = 0xFF66B032; public static final int Grullo = 0xFFADFF2F; public static final int Guppie_green = 0xFFA99A86; public static final int Halay_be = 0xFF00FF7F; public static final int Han_blue = 0xFF663754; public static final int Han_purple = 0xFF446CCF; public static final int Hansa_yellow = 0xFF5218FA; public static final int Harlequin = 0xFFE9D66B; public static final int Harvard_crimson = 0xFF3FFF00; public static final int Harvest_gold = 0xFFC90016; public static final int Heart_Gold = 0xFFDA9100; public static final int Heliotrope = 0xFF808000; public static final int Heliotrope_gray = 0xFFDF73FF; public static final int Hollywood_cerise = 0xFFAA98A8; public static final int Honeydew = 0xFFF400A1; public static final int Honolulu_blue = 0xFFF0FFF0; public static final int Hooker_s_green = 0xFF006DB0; public static final int Hot_magenta = 0xFF49796B; public static final int Hot_pink = 0xFFFF1DCE; public static final int Hunter_green = 0xFFFF69B4; public static final int Iceberg = 0xFF355E3B; public static final int Icterine = 0xFF71A6D2; public static final int Illuminating_Emerald = 0xFFFCF75E; public static final int Imperial = 0xFF319177; public static final int Imperial_blue = 0xFF602F6B; public static final int Imperial_purple = 0xFF002395; public static final int Imperial_red = 0xFF66023C; public static final int Inchworm = 0xFFED2939; public static final int Independence = 0xFFB2EC5D; public static final int India_green = 0xFF4C516D; public static final int Indian_red = 0xFF138808; public static final int Indian_yellow = 0xFFCD5C5C; public static final int Indigo = 0xFFE3A857; public static final int Indigo_dye = 0xFF6F00FF; public static final int Indigo_web = 0xFF091F92; public static final int International_Klein_Blue = 0xFF4B0082; public static final int International_orange_aerospace = 0xFF002FA7; public static final int International_orange_engineering = 0xFFFF4F00; public static final int International_orange_Golden_Gate_Bridge = 0xFFBA160C; public static final int Iris = 0xFFC0362C; public static final int Irresistible = 0xFF5A4FCF; public static final int Isabelline = 0xFFB3446C; public static final int Islamic_green = 0xFFF4F0EC; public static final int Italian_sky_blue = 0xFF009000; public static final int Ivory = 0xFFB2FFFF; public static final int Jade = 0xFFFFFFF0; public static final int Japanese_carmine = 0xFF00A86B; public static final int Japanese_indigo = 0xFF9D2933; public static final int Japanese_violet = 0xFF264348; public static final int Jasmine = 0xFF5B3256; public static final int Jasper = 0xFFF8DE7E; public static final int Jazzberry_jam = 0xFFD73B3E; public static final int Jelly_Bean = 0xFFA50B5E; public static final int Jet = 0xFFDA614E; public static final int Jonquil = 0xFF343434; public static final int Jordy_blue = 0xFFF4CA16; public static final int June_bud = 0xFF8AB9F1; public static final int Jungle_green = 0xFFBDDA57; public static final int Kelly_green = 0xFF29AB87; public static final int Kenyan_copper = 0xFF4CBB17; public static final int Keppel = 0xFF7C1C05; public static final int Khaki_HTML_CSS_Khaki = 0xFF3AB09E; public static final int Khaki_X11_Light_khaki = 0xFFC3B091; public static final int Kobe = 0xFFF0E68C; public static final int Kobi = 0xFF882D17; public static final int Kombu_green = 0xFFE79FC4; public static final int KU_Crimson = 0xFF354230; public static final int La_Salle_Green = 0xFFE8000D; public static final int Languid_lavender = 0xFF087830; public static final int Lapis_lazuli = 0xFFD6CADD; public static final int Laser_Lemon = 0xFF26619C; public static final int Laurel_green = 0xFFFFFF66; public static final int Lava = 0xFFA9BA9D; public static final int Lavender_floral = 0xFFCF1020; public static final int Lavender_web = 0xFFB57EDC; public static final int Lavender_blue = 0xFFE6E6FA; public static final int Lavender_blush = 0xFFCCCCFF; public static final int Lavender_gray = 0xFFFFF0F5; public static final int Lavender_indigo = 0xFFC4C3D0; public static final int Lavender_magenta = 0xFF9457EB; public static final int Lavender_mist = 0xFFEE82EE; public static final int Lavender_pink = 0xFFE6E6FA; public static final int Lavender_purple = 0xFFFBAED2; public static final int Lavender_rose = 0xFF967BB6; public static final int Lawn_green = 0xFFFBA0E3; public static final int Lemon = 0xFF7CFC00; public static final int Lemon_chiffon = 0xFFFFF700; public static final int Lemon_curry = 0xFFFFFACD; public static final int Lemon_glacier = 0xFFCCA01D; public static final int Lemon_lime = 0xFFFDFF00; public static final int Lemon_meringue = 0xFFE3FF00; public static final int Lemon_yellow = 0xFFF6EABE; public static final int Licorice = 0xFFFFF44F; public static final int Liberty = 0xFF1A1110; public static final int Light_apricot = 0xFF545AA7; public static final int Light_blue = 0xFFFDD5B1; public static final int Light_brown = 0xFFADD8E6; public static final int Light_carmine_pink = 0xFFB5651D; public static final int Light_coral = 0xFFE66771; public static final int Light_cornflower_blue = 0xFFF08080; public static final int Light_crimson = 0xFF93CCEA; public static final int Light_cyan = 0xFFF56991; public static final int Light_deep_pink = 0xFFE0FFFF; public static final int Light_French_beige = 0xFFFF5CCD; public static final int Light_fuchsia_pink = 0xFFC8AD7F; public static final int Light_goldenrod_yellow = 0xFFF984EF; public static final int Light_gray = 0xFFFAFAD2; public static final int Light_green = 0xFFD3D3D3; public static final int Light_hot_pink = 0xFF90EE90; public static final int Light_khaki = 0xFFFFB3DE; public static final int Light_medium_orchid = 0xFFF0E68C; public static final int Light_moss_green = 0xFFD39BCB; public static final int Light_orchid = 0xFFADDFAD; public static final int Light_pastel_purple = 0xFFE6A8D7; public static final int Light_pink = 0xFFB19CD9; public static final int Light_red_ochre = 0xFFFFB6C1; public static final int Light_salmon = 0xFFE97451; public static final int Light_salmon_pink = 0xFFFFA07A; public static final int Light_sea_green = 0xFFFF9999; public static final int Light_sky_blue = 0xFF20B2AA; public static final int Light_slate_gray = 0xFF87CEFA; public static final int Light_steel_blue = 0xFF778899; public static final int Light_taupe = 0xFFB0C4DE; public static final int Light_Thulian_pink = 0xFFB38B6D; public static final int Light_yellow = 0xFFE68FAC; public static final int Lilac = 0xFFFFFFE0; public static final int Lime_color_wheel = 0xFFC8A2C8; public static final int Lime_web_X11_green = 0xFFBFFF00; public static final int Lime_green = 0xFF00FF00; public static final int Limerick = 0xFF32CD32; public static final int Lincoln_green = 0xFF9DC209; public static final int Linen = 0xFF195905; public static final int Lion = 0xFFFAF0E6; public static final int Liseran_Purple = 0xFFC19A6B; public static final int Little_boy_blue = 0xFFDE6FA1; public static final int Liver = 0xFF6CA0DC; public static final int Liver_dogs = 0xFF674C47; public static final int Liver_organ = 0xFFB86D29; public static final int Liver_chestnut = 0xFF6C2E1F; public static final int Livid = 0xFF987456; public static final int Lumber = 0xFF6699CC; public static final int Lust = 0xFFFFE4CD; public static final int Magenta = 0xFFE62020; public static final int Magenta_Crayola = 0xFFFF00FF; public static final int Magenta_dye = 0xFFFF55A3; public static final int Magenta_Pantone = 0xFFCA1F7B; public static final int Magenta_process = 0xFFD0417E; public static final int Magenta_haze = 0xFFFF0090; public static final int Magic_mint = 0xFF9F4576; public static final int Magnolia = 0xFFAAF0D1; public static final int Mahogany = 0xFFF8F4FF; public static final int Maize = 0xFFC04000; public static final int Majorelle_Blue = 0xFFFBEC5D; public static final int Malachite = 0xFF6050DC; public static final int Manatee = 0xFF0BDA51; public static final int Mango_Tango = 0xFF979AAA; public static final int Mantis = 0xFFFF8243; public static final int Mardi_Gras = 0xFF74C365; public static final int Maroon_Crayola = 0xFF880085; public static final int Maroon_HTML_CSS = 0xFFC32148; public static final int Maroon_X11 = 0xFF800000; public static final int Mauve = 0xFFB03060; public static final int Mauve_taupe = 0xFFE0B0FF; public static final int Mauvelous = 0xFF915F6D; public static final int May_green = 0xFFEF98AA; public static final int Maya_blue = 0xFF4C9141; public static final int Meat_brown = 0xFF73C2FB; public static final int Medium_aquamarine = 0xFFE5B73B; public static final int Medium_blue = 0xFF66DDAA; public static final int Medium_candy_apple_red = 0xFF0000CD; public static final int Medium_carmine = 0xFFE2062C; public static final int Medium_champagne = 0xFFAF4035; public static final int Medium_electric_blue = 0xFFF3E5AB; public static final int Medium_jungle_green = 0xFF035096; public static final int Medium_lavender_magenta = 0xFF1C352D; public static final int Medium_orchid = 0xFFDDA0DD; public static final int Medium_Persian_blue = 0xFFBA55D3; public static final int Medium_purple = 0xFF0067A5; public static final int Medium_red_violet = 0xFF9370DB; public static final int Medium_ruby = 0xFFBB3385; public static final int Medium_sea_green = 0xFFAA4069; public static final int Medium_sky_blue = 0xFF3CB371; public static final int Medium_slate_blue = 0xFF80DAEB; public static final int Medium_spring_bud = 0xFF7B68EE; public static final int Medium_spring_green = 0xFFC9DC87; public static final int Medium_taupe = 0xFF00FA9A; public static final int Medium_turquoise = 0xFF674C47; public static final int Medium_Tuscan_red = 0xFF48D1CC; public static final int Medium_vermilion = 0xFF79443B; public static final int Medium_violet_red = 0xFFD9603B; public static final int Mellow_apricot = 0xFFC71585; public static final int Mellow_yellow = 0xFFF8B878; public static final int Melon = 0xFFF8DE7E; public static final int Metallic_Seaweed = 0xFFFDBCB4; public static final int Metallic_Sunburst = 0xFF0A7E8C; public static final int Mexican_pink = 0xFF9C7C38; public static final int Midnight_blue = 0xFFE4007C; public static final int Midnight_green_eagle_green = 0xFF191970; public static final int Mikado_yellow = 0xFF004953; public static final int Mindaro = 0xFFFFC40C; public static final int Mint = 0xFFE3F988; public static final int Mint_cream = 0xFF3EB489; public static final int Mint_green = 0xFFF5FFFA; public static final int Misty_rose = 0xFF98FF98; public static final int Moccasin = 0xFFFFE4E1; public static final int Mode_beige = 0xFFFAEBD7; public static final int Moonstone_blue = 0xFF967117; public static final int Mordant_red_19 = 0xFF73A9C2; public static final int Moss_green = 0xFFAE0C00; public static final int Mountain_Meadow = 0xFF8A9A5B; public static final int Mountbatten_pink = 0xFF30BA8F; public static final int MSU_Green = 0xFF997A8D; public static final int Mughal_green = 0xFF18453B; public static final int Mulberry = 0xFF306030; public static final int Mustard = 0xFFC54B8C; public static final int Myrtle_green = 0xFFFFDB58; public static final int Nadeshiko_pink = 0xFF317873; public static final int Napier_green = 0xFFF6ADC6; public static final int Naples_yellow = 0xFF2A8000; public static final int Navajo_white = 0xFFFADA5E; public static final int Navy = 0xFFFFDEAD; public static final int Navy_purple = 0xFF000080; public static final int Neon_Carrot = 0xFF9457EB; public static final int Neon_fuchsia = 0xFFFFA343; public static final int Neon_green = 0xFFFE4164; public static final int New_Car = 0xFF39FF14; public static final int New_York_pink = 0xFF214FC6; public static final int Non_photo_blue = 0xFFD7837F; public static final int North_Texas_Green = 0xFFA4DDED; public static final int Nyanza = 0xFF059033; public static final int Ocean_Boat_Blue = 0xFFE9FFDB; public static final int Ochre = 0xFF0077BE; public static final int Office_green = 0xFFCC7722; public static final int Old_burgundy = 0xFF008000; public static final int Old_gold = 0xFF43302E; public static final int Old_heliotrope = 0xFFCFB53B; public static final int Old_lace = 0xFF563C5C; public static final int Old_lavender = 0xFFFDF5E6; public static final int Old_mauve = 0xFF796878; public static final int Old_moss_green = 0xFF673147; public static final int Old_rose = 0xFF867E36; public static final int Old_silver = 0xFFC08081; public static final int Olive = 0xFF848482; public static final int Olive_Drab_3 = 0xFF808000; public static final int Olive_Drab_7 = 0xFF6B8E23; public static final int Olivine = 0xFF3C341F; public static final int Onyx = 0xFF9AB973; public static final int Opera_mauve = 0xFF353839; public static final int Orange_color_wheel = 0xFFB784A7; public static final int Orange_Crayola = 0xFFFF7F00; public static final int Orange_Pantone = 0xFFFF7538; public static final int Orange_RYB = 0xFFFF5800; public static final int Orange_web = 0xFFFB9902; public static final int Orange_peel = 0xFFFFA500; public static final int Orange_red = 0xFFFF9F00; public static final int Orchid = 0xFFFF4500; public static final int Orchid_pink = 0xFFDA70D6; public static final int Orioles_orange = 0xFFF2BDCD; public static final int Otter_brown = 0xFFFB4F14; public static final int Outer_Space = 0xFF654321; public static final int Outrageous_Orange = 0xFF414A4C; public static final int Oxford_Blue = 0xFFFF6E4A; public static final int OU_Crimson_Red = 0xFF002147; public static final int Pakistan_green = 0xFF990000; public static final int Palatinate_blue = 0xFF006600; public static final int Palatinate_purple = 0xFF273BE2; public static final int Pale_aqua = 0xFF682860; public static final int Pale_blue = 0xFFBCD4E6; public static final int Pale_brown = 0xFFAFEEEE; public static final int Pale_carmine = 0xFF987654; public static final int Pale_cerulean = 0xFFAF4035; public static final int Pale_chestnut = 0xFF9BC4E2; public static final int Pale_copper = 0xFFDDADAF; public static final int Pale_cornflower_blue = 0xFFDA8A67; public static final int Pale_gold = 0xFFABCDEF; public static final int Pale_goldenrod = 0xFFE6BE8A; public static final int Pale_green = 0xFFEEE8AA; public static final int Pale_lavender = 0xFF98FB98; public static final int Pale_magenta = 0xFFDCD0FF; public static final int Pale_pink = 0xFFF984E5; public static final int Pale_plum = 0xFFFADADD; public static final int Pale_red_violet = 0xFFDDA0DD; public static final int Pale_robin_egg_blue = 0xFFDB7093; public static final int Pale_silver = 0xFF96DED1; public static final int Pale_spring_bud = 0xFFC9C0BB; public static final int Pale_taupe = 0xFFECEBBD; public static final int Pale_turquoise = 0xFFBC987E; public static final int Pale_violet_red = 0xFFAFEEEE; public static final int Pansy_purple = 0xFFDB7093; public static final int Paolo_Veronese_green = 0xFF78184A; public static final int Papaya_whip = 0xFF009B7D; public static final int Paradise_pink = 0xFFFFEFD5; public static final int Paris_Green = 0xFFE63E62; public static final int Pastel_blue = 0xFF50C878; public static final int Pastel_brown = 0xFFAEC6CF; public static final int Pastel_gray = 0xFF826953; public static final int Pastel_green = 0xFFCFCFC4; public static final int Pastel_magenta = 0xFF77DD77; public static final int Pastel_orange = 0xFFF49AC2; public static final int Pastel_pink = 0xFFFFB347; public static final int Pastel_purple = 0xFFDEA5A4; public static final int Pastel_red = 0xFFB39EB5; public static final int Pastel_violet = 0xFFFF6961; public static final int Pastel_yellow = 0xFFCB99C9; public static final int Patriarch = 0xFFFDFD96; public static final int Payne_s_grey = 0xFF800080; public static final int Peach = 0xFF536878; public static final int Peach2 = 0xFFFFE5B4; public static final int Peach_orange = 0xFFFFCBA4; public static final int Peach_puff = 0xFFFFCC99; public static final int Peach_yellow = 0xFFFFDAB9; public static final int Pear = 0xFFFADFAD; public static final int Pearl = 0xFFD1E231; public static final int Pearl_Aqua = 0xFFEAE0C8; public static final int Pearly_purple = 0xFF88D8C0; public static final int Peridot = 0xFFB768A2; public static final int Periwinkle = 0xFFE6E200; public static final int Persian_blue = 0xFFCCCCFF; public static final int Persian_green = 0xFF1C39BB; public static final int Persian_indigo = 0xFF00A693; public static final int Persian_orange = 0xFF32127A; public static final int Persian_pink = 0xFFD99058; public static final int Persian_plum = 0xFFF77FBE; public static final int Persian_red = 0xFF701C1C; public static final int Persian_rose = 0xFFCC3333; public static final int Persimmon = 0xFFFE28A2; public static final int Peru = 0xFFEC5800; public static final int Phlox = 0xFFCD853F; public static final int Phthalo_blue = 0xFFDF00FF; public static final int Phthalo_green = 0xFF000F89; public static final int Picton_blue = 0xFF123524; public static final int Pictorial_carmine = 0xFF45B1E8; public static final int Piggy_pink = 0xFFC30B4E; public static final int Pine_green = 0xFFFDDDE6; public static final int Pineapple = 0xFF01796F; public static final int Pink = 0xFF563C0D; public static final int Pink_Pantone = 0xFFFFC0CB; public static final int Pink_lace = 0xFFD74894; public static final int Pink_lavender = 0xFFFFDDF4; public static final int Pink_orange = 0xFFD8B2D1; public static final int Pink_pearl = 0xFFFF9966; public static final int Pink_Sherbet = 0xFFE7ACCF; public static final int Pistachio = 0xFFF78FA7; public static final int Platinum = 0xFF93C572; public static final int Plum = 0xFFE5E4E2; public static final int Plum_web = 0xFF8E4585; public static final int Pomp_and_Power = 0xFFDDA0DD; public static final int Popstar = 0xFF86608E; public static final int Portland_Orange = 0xFFBE4F62; public static final int Powder_blue = 0xFFFF5A36; public static final int Princeton_orange = 0xFFB0E0E6; public static final int Prune = 0xFFF58025; public static final int Prussian_blue = 0xFF701C1C; public static final int Psychedelic_purple = 0xFF003153; public static final int Puce = 0xFFDF00FF; public static final int Puce_red = 0xFFCC8899; public static final int Pullman_Brown_UPS_Brown = 0xFF722F37; public static final int Pumpkin = 0xFF644117; public static final int Purple_HTML = 0xFFFF7518; public static final int Purple_Munsell = 0xFF800080; public static final int Purple_X11 = 0xFF9F00C5; public static final int Purple_Heart = 0xFFA020F0; public static final int Purple_mountain_majesty = 0xFF69359C; public static final int Purple_navy = 0xFF9678B6; public static final int Purple_pizzazz = 0xFF4E5180; public static final int Purple_taupe = 0xFFFE4EDA; public static final int Purpureus = 0xFF50404D; public static final int Quartz = 0xFF9A4EAE; public static final int Queen_blue = 0xFF51484F; public static final int Queen_pink = 0xFF436B95; public static final int Quinacridone_magenta = 0xFFE8CCD7; public static final int Rackley = 0xFF8E3A59; public static final int Radical_Red = 0xFF5D8AA8; public static final int Rajah = 0xFFFF355E; public static final int Raspberry = 0xFFFBAB60; public static final int Raspberry_glace = 0xFFE30B5C; public static final int Raspberry_pink = 0xFF915F6D; public static final int Raspberry_rose = 0xFFE25098; public static final int Raw_umber = 0xFFB3446C; public static final int Razzle_dazzle_rose = 0xFF826644; public static final int Razzmatazz = 0xFFFF33CC; public static final int Razzmic_Berry = 0xFFE3256B; public static final int Red = 0xFF8D4E85; public static final int Red_Crayola = 0xFFFF0000; public static final int Red_Munsell = 0xFFEE204D; public static final int Red_NCS = 0xFFF2003C; public static final int Red_Pantone = 0xFFC40233; public static final int Red_pigment = 0xFFED2939; public static final int Red_RYB = 0xFFED1C24; public static final int Red_brown = 0xFFFE2712; public static final int Red_devil = 0xFFA52A2A; public static final int Red_orange = 0xFF860111; public static final int Red_purple = 0xFFFF5349; public static final int Red_violet = 0xFFE40078; public static final int Redwood = 0xFFC71585; public static final int Regalia = 0xFFA45A52; public static final int Resolution_blue = 0xFF522D80; public static final int Rhythm = 0xFF002387; public static final int Rich_black = 0xFF777696; public static final int Rich_brilliant_lavender = 0xFF004040; public static final int Rich_carmine = 0xFFF1A7FE; public static final int Rich_electric_blue = 0xFFD70040; public static final int Rich_lavender = 0xFF0892D0; public static final int Rich_lilac = 0xFFA76BCF; public static final int Rich_maroon = 0xFFB666D2; public static final int Rifle_green = 0xFFB03060; public static final int Roast_coffee = 0xFF444C38; public static final int Robin_egg_blue = 0xFF704241; public static final int Rocket_metallic = 0xFF00CCCC; public static final int Roman_silver = 0xFF8A7F80; public static final int Rose = 0xFF838996; public static final int Rose_bonbon = 0xFFFF007F; public static final int Rose_ebony = 0xFFF9429E; public static final int Rose_gold = 0xFF674846; public static final int Rose_madder = 0xFFB76E79; public static final int Rose_pink = 0xFFE32636; public static final int Rose_quartz = 0xFFFF66CC; public static final int Rose_red = 0xFFAA98A9; public static final int Rose_taupe = 0xFFC21E56; public static final int Rose_vale = 0xFF905D5D; public static final int Rosewood = 0xFFAB4E52; public static final int Rosso_corsa = 0xFF65000B; public static final int Rosy_brown = 0xFFD40000; public static final int Royal_azure = 0xFFBC8F8F; public static final int Royal_blue = 0xFF0038A8; public static final int Royal_blue2 = 0xFF002366; public static final int Royal_fuchsia = 0xFF4169E1; public static final int Royal_purple = 0xFFCA2C92; public static final int Royal_yellow = 0xFF7851A9; public static final int Ruber = 0xFFFADA5E; public static final int Rubine_red = 0xFFCE4676; public static final int Ruby = 0xFFD10056; public static final int Ruby_red = 0xFFE0115F; public static final int Ruddy = 0xFF9B111E; public static final int Ruddy_brown = 0xFFFF0028; public static final int Ruddy_pink = 0xFFBB6528; public static final int Rufous = 0xFFE18E96; public static final int Russet = 0xFFA81C07; public static final int Russian_green = 0xFF80461B; public static final int Russian_violet = 0xFF679267; public static final int Rust = 0xFF32174D; public static final int Rusty_red = 0xFFB7410E; public static final int Sacramento_State_green = 0xFFDA2C43; public static final int Saddle_brown = 0xFF00563F; public static final int Safety_orange_blaze_orange = 0xFF8B4513; public static final int Safety_yellow = 0xFFFF6700; public static final int Saffron = 0xFFEED202; public static final int Sage = 0xFFF4C430; public static final int St_Patrick_s_blue = 0xFFBCB88A; public static final int Salmon = 0xFF23297A; public static final int Salmon_pink = 0xFFFA8072; public static final int Sand = 0xFFFF91A4; public static final int Sand_dune = 0xFFC2B280; public static final int Sandstorm = 0xFF967117; public static final int Sandy_brown = 0xFFECD540; public static final int Sandy_taupe = 0xFFF4A460; public static final int Sangria = 0xFF967117; public static final int Sap_green = 0xFF92000A; public static final int Sapphire = 0xFF507D2A; public static final int Sapphire_blue = 0xFF0F52BA; public static final int Satin_sheen_gold = 0xFF0067A5; public static final int Scarlet = 0xFFCBA135; public static final int Scarlet2 = 0xFFFF2400; public static final int Schauss_pink = 0xFFFD0E35; public static final int School_bus_yellow = 0xFFFF91AF; public static final int Screamin_Green = 0xFFFFD800; public static final int Sea_blue = 0xFF76FF7A; public static final int Sea_green = 0xFF006994; public static final int Seal_brown = 0xFF2E8B57; public static final int Seashell = 0xFF321414; public static final int Selective_yellow = 0xFFFFF5EE; public static final int Sepia = 0xFFFFBA00; public static final int Shadow = 0xFF704214; public static final int Shadow_blue = 0xFF8A795D; public static final int Shampoo = 0xFF778BA5; public static final int Shamrock_green = 0xFFFFCFF1; public static final int Sheen_Green = 0xFF009E60; public static final int Shimmering_Blush = 0xFF8FD400; public static final int Shocking_pink = 0xFFD98695; public static final int Shocking_pink_Crayola = 0xFFFC0FC0; public static final int Sienna = 0xFFFF6FFF; public static final int Silver = 0xFF882D17; public static final int Silver_chalice = 0xFFC0C0C0; public static final int Silver_Lake_blue = 0xFFACACAC; public static final int Silver_pink = 0xFF5D89BA; public static final int Silver_sand = 0xFFC4AEAD; public static final int Sinopia = 0xFFBFC1C2; public static final int Skobeloff = 0xFFCB410B; public static final int Sky_blue = 0xFF007474; public static final int Sky_magenta = 0xFF87CEEB; public static final int Slate_blue = 0xFFCF71AF; public static final int Slate_gray = 0xFF6A5ACD; public static final int Smalt_Dark_powder_blue = 0xFF708090; public static final int Smitten = 0xFF003399; public static final int Smoke = 0xFFC84186; public static final int Smokey_topaz = 0xFF738276; public static final int Smoky_black = 0xFF933D41; public static final int Snow = 0xFF100C08; public static final int Soap = 0xFFFFFAFA; public static final int Solid_pink = 0xFFCEC8EF; public static final int Sonic_silver = 0xFF893843; public static final int Spartan_Crimson = 0xFF757575; public static final int Space_cadet = 0xFF9E1316; public static final int Spanish_bistre = 0xFF1D2951; public static final int Spanish_blue = 0xFF807532; public static final int Spanish_carmine = 0xFF0070B8; public static final int Spanish_crimson = 0xFFD10047; public static final int Spanish_gray = 0xFFE51A4C; public static final int Spanish_green = 0xFF989898; public static final int Spanish_orange = 0xFF009150; public static final int Spanish_pink = 0xFFE86100; public static final int Spanish_red = 0xFFF7BFBE; public static final int Spanish_sky_blue = 0xFFE60026; public static final int Spanish_violet = 0xFF00FFFF; public static final int Spanish_viridian = 0xFF4C2882; public static final int Spiro_Disco_Ball = 0xFF007F5C; public static final int Spring_bud = 0xFF0FC0FC; public static final int Spring_green = 0xFFA7FC00; public static final int Star_command_blue = 0xFF00FF7F; public static final int Steel_blue = 0xFF007BB8; public static final int Steel_pink = 0xFF4682B4; public static final int Stil_de_grain_yellow = 0xFFCC33CC; public static final int Stizza = 0xFFFADA5E; public static final int Stormcloud = 0xFF990000; public static final int Straw = 0xFF4F666A; public static final int Strawberry = 0xFFE4D96F; public static final int Sunglow = 0xFFFC5A8D; public static final int Sunray = 0xFFFFCC33; public static final int Sunset = 0xFFE3AB57; public static final int Sunset_orange = 0xFFFAD6A5; public static final int Super_pink = 0xFFFD5E53; public static final int Tan = 0xFFCF6BA9; public static final int Tangelo = 0xFFD2B48C; public static final int Tangerine = 0xFFF94D00; public static final int Tangerine_yellow = 0xFFF28500; public static final int Tango_pink = 0xFFFFCC00; public static final int Taupe = 0xFFE4717A; public static final int Taupe_gray = 0xFF483C32; public static final int Tea_green = 0xFF8B8589; public static final int Tea_rose = 0xFFD0F0C0; public static final int Tea_rose2 = 0xFFF88379; public static final int Teal = 0xFFF4C2C2; public static final int Teal_blue = 0xFF008080; public static final int Teal_deer = 0xFF367588; public static final int Teal_green = 0xFF99E6B3; public static final int Telemagenta = 0xFF00827F; public static final int Tenn = 0xFFCF3476; public static final int Terra_cotta = 0xFFCD5700; public static final int Thistle = 0xFFE2725B; public static final int Thulian_pink = 0xFFD8BFD8; public static final int Tickle_Me_Pink = 0xFFDE6FA1; public static final int Tiffany_Blue = 0xFFFC89AC; public static final int Tiger_s_eye = 0xFF0ABAB5; public static final int Timberwolf = 0xFFE08D3C; public static final int Titanium_yellow = 0xFFDBD7D2; public static final int Tomato = 0xFFEEE600; public static final int Toolbox = 0xFFFF6347; public static final int Topaz = 0xFF746CC0; public static final int Tractor_red = 0xFFFFC87C; public static final int Trolley_Grey = 0xFFFD0E35; public static final int Tropical_rain_forest = 0xFF808080; public static final int True_Blue = 0xFF00755E; public static final int Tufts_Blue = 0xFF0073CF; public static final int Tulip = 0xFF417DC1; public static final int Tumbleweed = 0xFFFF878D; public static final int Turkish_rose = 0xFFDEAA88; public static final int Turquoise = 0xFFB57281; public static final int Turquoise_blue = 0xFF40E0D0; public static final int Turquoise_green = 0xFF00FFEF; public static final int Tuscan = 0xFFA0D6B4; public static final int Tuscan_brown = 0xFFFAD6A5; public static final int Tuscan_red = 0xFF6F4E37; public static final int Tuscan_tan = 0xFF7C4848; public static final int Tuscany = 0xFFA67B5B; public static final int Twilight_lavender = 0xFFC09999; public static final int Tyrian_purple = 0xFF8A496B; public static final int UA_blue = 0xFF66023C; public static final int UA_red = 0xFF0033AA; public static final int Ube = 0xFFD9004C; public static final int UCLA_Blue = 0xFF8878C3; public static final int UCLA_Gold = 0xFF536895; public static final int UFO_Green = 0xFFFFB300; public static final int Ultramarine = 0xFF3CD070; public static final int Ultramarine_blue = 0xFF120A8F; public static final int Ultra_pink = 0xFF4166F5; public static final int Ultra_red = 0xFFFF6FFF; public static final int Umber = 0xFFFC6C85; public static final int Unbleached_silk = 0xFF635147; public static final int United_Nations_blue = 0xFFFFDDCA; public static final int University_of_California_Gold = 0xFF5B92E5; public static final int Unmellow_yellow = 0xFFB78727; public static final int UP_Forest_green = 0xFFFFFF66; public static final int UP_Maroon = 0xFF014421; public static final int Upsdell_red = 0xFF7B1113; public static final int Urobilin = 0xFFAE2029; public static final int USAFA_blue = 0xFFE1AD21; public static final int USC_Cardinal = 0xFF004F98; public static final int USC_Gold = 0xFF990000; public static final int University_of_Tennessee_Orange = 0xFFFFCC00; public static final int Utah_Crimson = 0xFFF77F00; public static final int Vanilla = 0xFFD3003F; public static final int Vanilla_ice = 0xFFF3E5AB; public static final int Vegas_gold = 0xFFF38FA9; public static final int Venetian_red = 0xFFC5B358; public static final int Verdigris = 0xFFC80815; public static final int Vermilion = 0xFF43B3AE; public static final int Vermilion2 = 0xFFE34234; public static final int Veronica = 0xFFD9381E; public static final int Violet = 0xFFA020F0; public static final int Violet_color_wheel = 0xFF8F00FF; public static final int Violet_RYB = 0xFF7F00FF; public static final int Violet_web = 0xFF8601AF; public static final int Violet_blue = 0xFFEE82EE; public static final int Violet_red = 0xFF324AB2; public static final int Viridian = 0xFFF75394; public static final int Viridian_green = 0xFF40826D; public static final int Vista_blue = 0xFF009698; public static final int Vivid_auburn = 0xFF7C9ED9; public static final int Vivid_burgundy = 0xFF922724; public static final int Vivid_cerise = 0xFF9F1D35; public static final int Vivid_orchid = 0xFFDA1D81; public static final int Vivid_sky_blue = 0xFFCC00FF; public static final int Vivid_tangerine = 0xFF00CCFF; public static final int Vivid_violet = 0xFFFFA089; public static final int Warm_black = 0xFF9F00FF; public static final int Waterspout = 0xFF004242; public static final int Wenge = 0xFFA4F4F9; public static final int Wheat = 0xFF645452; public static final int White = 0xFFF5DEB3; public static final int White_smoke = 0xFFFFFFFF; public static final int Wild_blue_yonder = 0xFFF5F5F5; public static final int Wild_orchid = 0xFFA2ADD0; public static final int Wild_Strawberry = 0xFFD470A2; public static final int Wild_watermelon = 0xFFFF43A4; public static final int Willpower_orange = 0xFFFC6C85; public static final int Windsor_tan = 0xFFFD5800; public static final int Wine = 0xFFA75502; public static final int Wine_dregs = 0xFF722F37; public static final int Wisteria = 0xFF673147; public static final int Wood_brown = 0xFFC9A0DC; public static final int Xanadu = 0xFFC19A6B; public static final int Yale_Blue = 0xFF738678; public static final int Yankees_blue = 0xFF0F4D92; public static final int Yellow = 0xFF1C2841; public static final int Yellow_Crayola = 0xFFFFFF00; public static final int Yellow_Munsell = 0xFFFCE883; public static final int Yellow_NCS = 0xFFEFCC00; public static final int Yellow_Pantone = 0xFFFFD300; public static final int Yellow_process = 0xFFFEDF00; public static final int Yellow_RYB = 0xFFFFEF00; public static final int Yellow_green = 0xFFFEFE33; public static final int Yellow_Orange = 0xFF9ACD32; public static final int Yellow_rose = 0xFFFFAE42; public static final int Zaffre = 0xFFFFF000; public static final int Zinnwaldite_brown = 0xFF0014A8; public static final int Zomp = 0xFF2C1608; }//class DYColor ends here
package com.teamname.goaton.components; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.physics.box2d.Contact; import com.badlogic.gdx.physics.box2d.Fixture; import com.teamname.goaton.*; public class EnemyComponent extends Component { public int health; public boolean canBeDamaged = true; public int initialHealth; public EnemyComponent(int health) { this.health = health; this.initialHealth = health; } @Override protected void update(float dt) { if(health <= 0) { if((gameObject.getComponent("DemonPhysicsComponent")) != null){ ((DemonPhysicsComponent)(gameObject.getComponent("DemonPhysicsComponent"))).setMaskBits((short)0); } } } @Override protected void onCollisionEnter(Contact collision, GameObject other) { if (collision.getFixtureA().isSensor() || collision.getFixtureB().isSensor()) { return; } if(other.tags.contains("goat") && canBeDamaged) { if(gameObject.tags.contains("demonboss") && ((DemonBossPhysicsComponent)gameObject.getComponent("DemonBossPhysicsComponent")).isHitboxOn()) { //don't damage demon, push that goat back } else { health -= 1; gameObject.send(new Message("damaged")); other.send(new Message("destroy")); GoatonWorld.numGoats GoatonWorld.Destroy(other); } } if (other.tags.contains("player")) { // Damage player // other.getBody().applyForce(100f, 100f, 0f, 0f, false); if (!((PlayerMovementComponent) other.getComponent("PlayerMovementComponent")).hit) { GoatonWorld.sendGlobalMessage(new Message("player_hit")); } ((PlayerMovementComponent) other.getComponent("PlayerMovementComponent")).hit = true; if(gameObject.tags.contains("demonboss")) { other.getBody().setLinearVelocity(new Vector2(0,-7.50f)); other.getBody().applyLinearImpulse(new Vector2(0, -2.5f), new Vector2(0,-4.0f), false); } else { Vector2 dir = other.getPosition().sub(this.gameObject.getPosition()).nor().scl(10.0f); other.getBody().setLinearVelocity(dir); } } if(health <= 0 && canBeDamaged) { gameObject.send(new Message("destroy")); if (gameObject.tags.contains("demon")) { GoatonWorld.numDemons ((AnimatedSpriteRenderComponent)(gameObject.getComponent("DemonAnimatedSpriteComponent"))).setVisible(false); //demon is destroyed when particle effect is finished; check ParticleCompnoent } } } public int getHealth() { return health; } @Override public String getID() { return "EnemyComponent"; } @Override public Component cloneComponent() { return new EnemyComponent(initialHealth); } }
package io.bigio.core.codec; import io.bigio.core.Envelope; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufInputStream; import java.io.IOException; import org.msgpack.core.MessageFormat; import org.msgpack.core.MessagePack; import org.msgpack.core.MessageUnpacker; /** * This is a class for decoding envelope messages. * * @author Andy Trimble */ public class EnvelopeDecoder { private static final MessagePack msgPack = new MessagePack(); private EnvelopeDecoder() { } /** * Decode a message envelope. * * @param bytes the raw message. * @return the decoded message. * @throws IOException in case of a decode error. */ public static Envelope decode(ByteBuf bytes) throws IOException { MessageUnpacker unpacker = msgPack.newUnpacker(new ByteBufInputStream(bytes)); Envelope message = decode(unpacker); return message; } /** * Decode a message envelope. * * @param bytes the raw message. * @return the decoded message. * @throws IOException in case of a decode error. */ public static Envelope decode(byte[] bytes) throws IOException { MessageUnpacker unpacker = msgPack.newUnpacker(bytes); Envelope message = decode(unpacker); return message; } /** * Decode a message envelope. * * @param unpacker a MsgPack object containing the raw message. * @return the decoded message. * @throws IOException in case of a decode error. */ private static Envelope decode(MessageUnpacker unpacker) throws IOException { Envelope message = new Envelope(); StringBuilder keyBuilder = new StringBuilder(); keyBuilder .append(unpacker.unpackInt()) .append(".") .append(unpacker.unpackInt()) .append(".") .append(unpacker.unpackInt()) .append(".") .append(unpacker.unpackInt()) .append(":") .append(unpacker.unpackInt()) .append(":") .append(unpacker.unpackInt()); message.setSenderKey(keyBuilder.toString()); message.setEncrypted(unpacker.unpackBoolean()); if(message.isEncrypted()) { int length = unpacker.unpackArrayHeader(); byte[] key = new byte[length]; for(int i = 0; i < length; ++i) { key[i] = unpacker.unpackByte(); } message.setKey(key); } message.setExecuteTime(unpacker.unpackInt()); message.setMillisecondsSinceMidnight(unpacker.unpackInt()); message.setTopic(unpacker.unpackString()); message.setPartition(unpacker.unpackString()); message.setClassName(unpacker.unpackString()); if(unpacker.getNextFormat() == MessageFormat.BIN16 || unpacker.getNextFormat() == MessageFormat.BIN32 || unpacker.getNextFormat() == MessageFormat.BIN8) { int length = unpacker.unpackBinaryHeader(); byte[] payload = new byte[length]; unpacker.readPayload(payload); message.setPayload(payload); } else { int length = unpacker.unpackArrayHeader(); byte[] payload = new byte[length]; for(int i = 0; i < length; ++i) { payload[i] = unpacker.unpackByte(); } message.setPayload(payload); } return message; } }
// Triple Play - utilities for use in PlayN-based games package tripleplay.ui.layout; import playn.core.Asserts; import pythagoras.f.Dimension; import pythagoras.f.IDimension; import pythagoras.f.IPoint; import pythagoras.f.Point; import pythagoras.f.Rectangle; import tripleplay.ui.Element; import tripleplay.ui.Elements; import tripleplay.ui.Layout; /** * A layout that positions elements at absolute coordinates (at either their preferred size or at a * manually specified size). Constraints are specified like so: * <pre>{@code * Group group = new Group(new AbsoluteLayout()).add( * AbsoluteLayout.at(new Label("+50+50"), 50, 50), * AbsoluteLayout.at(new Button("100x50+25+25"), 25, 25, 100, 50) * ); * }</pre> */ public class AbsoluteLayout extends Layout { /** Defines absolute layout constraints. */ public static final class Constraint extends Layout.Constraint { public final IPoint position; public final IDimension size; public final boolean center; public Constraint (IPoint position, IDimension size, boolean center) { this.position = position; this.size = size; this.center = center; } public IDimension psize (AbsoluteLayout layout, Element<?> elem) { return size == ZERO ? layout.preferredSize(elem, size.width(), size.height()) : size; } public IPoint pos (IDimension psize) { return center ? position.subtract(psize.width()/2, psize.height()/2) : position; } } /** * Positions {@code elem} at the specified position, in its preferred size. */ public static <T extends Element<?>> T at (T elem, float x, float y) { return at(elem, new Point(x, y)); } /** * Positions {@code elem} at the specified position, in its preferred size. */ public static <T extends Element<?>> T at (T elem, IPoint position) { return at(elem, position, ZERO); } /** * Constrains {@code elem} to the specified position and size. */ public static <T extends Element<?>> T at (T elem, float x, float y, float width, float height) { return at(elem, new Point(x, y), new Dimension(width, height)); } /** * Constrains {@code elem} to the specified position and size. */ public static <T extends Element<?>> T at (T elem, IPoint position, IDimension size) { elem.setConstraint(new Constraint(position, size, false)); return elem; } /** * Centers {@code elem} on the specified position, in its preferred size. */ public static <T extends Element<?>> T centerAt (T elem, float x, float y) { return centerAt(elem, new Point(x, y)); } /** * Centers {@code elem} on the specified position, in its preferred size. */ public static <T extends Element<?>> T centerAt (T elem, IPoint position) { elem.setConstraint(new Constraint(position, ZERO, true)); return elem; } @Override public Dimension computeSize (Elements<?> elems, float hintX, float hintY) { // report a size large enough to contain all of our elements Rectangle bounds = new Rectangle(); for (Element<?> elem : elems) { if (!elem.isVisible()) continue; Constraint c = constraint(elem); IDimension psize = c.psize(this, elem); bounds.add(new Rectangle(c.pos(psize), psize)); } return new Dimension(bounds.width, bounds.height); } @Override public void layout (Elements<?> elems, float left, float top, float width, float height) { for (Element<?> elem : elems) { if (!elem.isVisible()) continue; Constraint c = constraint(elem); IDimension psize = c.psize(this, elem); // this should return a cached size IPoint pos = c.pos(psize); setBounds(elem, left + pos.x(), top + pos.y(), psize.width(), psize.height()); } } protected static Constraint constraint (Element<?> elem) { return (Constraint)Asserts.checkNotNull( elem.constraint(), "Elements in AbsoluteLayout must have a constraint."); } protected static final Dimension ZERO = new Dimension(0, 0); }
package leetcode; public class NO_029 { public static void main(String[] args) { //System.out.println(1 << 30); System.out.println(new NO_029().divide(1010369383, -2147483648)); //System.out.println(new NO_029().recurse(3, 2, 2, 1)); //System.out.println(Integer.toBinaryString(Integer.MIN_VALUE)); //System.out.println(~Integer.MIN_VALUE); //System.out.println(new NO_029().recurse(Integer.MAX_VALUE, 1, 1, 1)); //System.out.println(new NO_029().divid(10, 2, 1)); } public int divide(int a, int b) { if (b == 0) return Integer.MAX_VALUE; //Integer.MINVALUE()intlong long dividend = (long)a; long divisor = (long)b; long sign = (dividend >> 31) + (divisor >> 31); dividend = (dividend >> 31) == 0 ? dividend : (~dividend + 1); divisor = (divisor >> 31) == 0 ? divisor : (~divisor + 1); if(divisor > dividend) return 0; long quotient = recurse(dividend, divisor, divisor, 1); quotient = (sign == -1) ? (~quotient + 1) : quotient; //a=Integer.MINVALUE,b=-1 if(quotient >= Integer.MIN_VALUE && quotient <= Integer.MAX_VALUE) return (int)quotient; else return Integer.MAX_VALUE; } /* * * (2) * * (7,2,2,1)->(7,4,2,2)->(7,8,2,4)->(3>2)->(3,2,2,1)+2->(3,4,2,1)+2->(1<2)->return 1+2=3; */ public long recurse(long dividend, long divisor, long init, long quotient){ if(dividend > divisor){ return recurse(dividend, divisor << 1, init, quotient << 1); } else if(dividend == divisor){ return quotient; } else{ if(dividend-(divisor >> 1) < init) return quotient>>1; else return recurse(dividend - (divisor >> 1), init, init, 1) + (quotient >> 1); } } }
package gcm.gui.modelview.movie; import gcm.parser.GCMFile; import gcm.util.GlobalConstants; import gcm.gui.modelview.movie.SerializableScheme; import java.awt.Color; import java.awt.GradientPaint; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Properties; /** * this class contains all of the information related to a movie appearance scheme * that affects all cells on the graph (ie, species or components, etc.) * * this combines/re-works a lot of the stuff that tyler had spread out among myriad classes * * @author jason */ public class MovieScheme { //CLASS VARIABLES //contains the appearance data for each species //these have the full name, including a component or grid location prefix private HashMap<String, Scheme> speciesSchemes; private ArrayList<String> allSpecies; //CLASS METHODS /** * constructor */ public MovieScheme() { speciesSchemes = new HashMap<String, Scheme>(); } //SCHEME METHODS /** * adds a species color scheme * it's a wrapper for the applySpeciesColorScheme function * * @param speciesID * @param colorGradient * @param min * @param max * @param applyTo * @param gcm * @param cellType */ public void addSpeciesColorScheme(String speciesID, GradientPaint colorGradient, int min, int max, String applyTo, GCMFile gcm, String cellType) { final GradientPaint gradient = colorGradient; SchemeApplyFunction schemeApply = new SchemeApplyFunction() { public void apply(String speciesID) { speciesSchemes.get(speciesID).setColorGradient(gradient); } }; applySpeciesSchemeElement(schemeApply, speciesID, min, max, applyTo, gcm, cellType, false); } /** * removes a color scheme for the given species * it's a wrapper for the applySpeciesColorScheme function * * @param speciesID */ public void removeSpeciesColorScheme(String speciesID, String cellType, String applyTo, GCMFile gcm) { SchemeApplyFunction schemeApply = new SchemeApplyFunction() { public void apply(String speciesID) { if (speciesSchemes.get(speciesID) != null) speciesSchemes.get(speciesID).setColorGradient(null); } }; applySpeciesSchemeElement(schemeApply, speciesID, 0, 0, applyTo, gcm, cellType, true); } /** * adds an opacity scheme to the overall species scheme * * @param speciesID * @param min * @param max * @param applyTo * @param gcm * @param cellType */ public void addSpeciesOpacityScheme(String speciesID, int min, int max, String applyTo, GCMFile gcm, String cellType) { SchemeApplyFunction schemeApply = new SchemeApplyFunction() { public void apply(String speciesID) { speciesSchemes.get(speciesID).setOpacityState(true); } }; applySpeciesSchemeElement(schemeApply, speciesID, min, max, applyTo, gcm, cellType, false); } /** * removes an opacity scheme from the overall species scheme * @param speciesID * @param cellType * @param applyTo */ public void removeSpeciesOpacityScheme(String speciesID, String cellType, String applyTo, GCMFile gcm) { SchemeApplyFunction schemeApply = new SchemeApplyFunction() { public void apply(String speciesID) { if (speciesSchemes.get(speciesID) != null) speciesSchemes.get(speciesID).setOpacityState(false); } }; applySpeciesSchemeElement(schemeApply, speciesID, 0, 0, applyTo, gcm, cellType, true); } /** * * @param speciesID * @param min * @param max * @param applyTo * @param gcm * @param cellType */ public void addSpeciesSizeScheme(String speciesID, int min, int max, String applyTo, GCMFile gcm, String cellType) { SchemeApplyFunction schemeApply = new SchemeApplyFunction() { public void apply(String speciesID) { speciesSchemes.get(speciesID).setSizeState(true); } }; applySpeciesSchemeElement(schemeApply, speciesID, min, max, applyTo, gcm, cellType, false); } /** * * @param speciesID * @param cellType * @param applyTo * @param gcm */ public void removeSpeciesSizeScheme(String speciesID, String cellType, String applyTo, GCMFile gcm) { SchemeApplyFunction schemeApply = new SchemeApplyFunction() { public void apply(String speciesID) { if (speciesSchemes.get(speciesID) != null) speciesSchemes.get(speciesID).setSizeState(false); } }; applySpeciesSchemeElement(schemeApply, speciesID, 0, 0, applyTo, gcm, cellType, true); } /** * uses anonymous functions to update the scheme object for the particular species scheme * given the species ID passed in * * @param schemeApply function that applies ths schem * @param speciesID * @param min * @param max * @param applyTo * @param gcm * @param cellType * @param remove boolean representing addition or removal of the scheme */ public void applySpeciesSchemeElement(SchemeApplyFunction schemeApply, String speciesID, int min, int max, String applyTo, GCMFile gcm, String cellType, boolean remove) { if (applyTo.equals("this component only") || applyTo.equals("this location only") || applyTo.equals("this species only")) { if (remove) { schemeApply.apply(speciesID); } else { schemeApply.apply(speciesID); speciesSchemes.get(speciesID).setMin(min); speciesSchemes.get(speciesID).setMax(max); } } //if applyTo is for all cells of that type or model or whatever else { //take off the speciesID's component prefix //this is done so that other component or grid location prefixes can be added String[] speciesParts = speciesID.split("__"); String speciesIDNoPrefix = speciesID.replace(new String(speciesParts[0] + "__"), ""); String compID = speciesParts[0]; //if the user selected to change components if (cellType.equals(GlobalConstants.COMPONENT)) { for (Map.Entry<String, Properties> component : gcm.getComponents().entrySet()) { if (component != null) { //if the component has the same GCM as the component whose appearance //was just altered via the scheme chooser panel if (component.getValue().getProperty("gcm") .equals(gcm.getComponents().get(compID).getProperty("gcm"))) { speciesID = new String(component.getKey() + "__" + speciesIDNoPrefix); //if (!allSpecies.contains(speciesID)) continue; if (remove) { schemeApply.apply(speciesID); } else { //add a scheme with this other species that's part of the same GCM //as the component this.createOrUpdateSpeciesScheme(speciesID, null); schemeApply.apply(speciesID); speciesSchemes.get(speciesID).setMin(min); speciesSchemes.get(speciesID).setMax(max); } } } } } //if the user selected to change grid rectangles else if (cellType.equals(GlobalConstants.GRID_RECTANGLE)) { //loop through every grid location //add a new scheme for each grid rectangle for (int row = 0; row < gcm.getGrid().getNumRows(); ++row) { for (int col = 0; col < gcm.getGrid().getNumCols(); ++col) { String gridPrefix = "ROW" + row + "_COL" + col; speciesID = new String(gridPrefix + "__" + speciesIDNoPrefix); if (allSpecies != null && !allSpecies.contains(speciesID)) continue; if (remove) { schemeApply.apply(speciesID); } else { //add a scheme with this other species at another grid location this.createOrUpdateSpeciesScheme(speciesID, null); schemeApply.apply(speciesID); speciesSchemes.get(speciesID).setMin(min); speciesSchemes.get(speciesID).setMax(max); } } } } //if the user selected to change species else if (cellType.equals(GlobalConstants.SPECIES)) { //loop through every species in the gcm //add/remove a scheme for that species for (String specID : gcm.getSpecies().keySet()) { if (remove) { schemeApply.apply(specID); } else { this.createOrUpdateSpeciesScheme(specID, null); schemeApply.apply(specID); speciesSchemes.get(specID).setMin(min); speciesSchemes.get(specID).setMax(max); } } } } } /** * returns the scheme corresponding to speciesID * @param speciesID * @return */ public Scheme getSpeciesScheme(String speciesID) { return speciesSchemes.get(speciesID); } /** * creates a new scheme if one doesn't exist * * @param speciesID */ public void createOrUpdateSpeciesScheme(String speciesID, ArrayList<String> allSpecies) { if (allSpecies != null) this.allSpecies = allSpecies; if (speciesSchemes.get(speciesID) == null) speciesSchemes.put(speciesID, new Scheme()); } /** * returns the hash map of species schemes * @return the hashmap of species schemes */ public SerializableScheme[] getAllSpeciesSchemes() { SerializableScheme[] schemes = new SerializableScheme[speciesSchemes.size()]; Iterator<Map.Entry<String, Scheme>> speciesSchemesIter = speciesSchemes.entrySet().iterator(); for (int index = 0; index < speciesSchemes.size(); ++index) { Map.Entry<String, Scheme> entry = speciesSchemesIter.next(); schemes[index] = new SerializableScheme(); if (entry.getValue().getColorGradient() == null) { schemes[index].startColor = 0; schemes[index].endColor = 0; } else { schemes[index].startColor = entry.getValue().getColorGradient().getColor1().getRGB(); schemes[index].endColor = entry.getValue().getColorGradient().getColor2().getRGB(); } schemes[index].min = entry.getValue().getMin(); schemes[index].max = entry.getValue().getMax(); schemes[index].opacityState = entry.getValue().getOpacityState(); schemes[index].sizeState = entry.getValue().getSizeState(); schemes[index].name = entry.getKey(); } return schemes; } /** * adds schemes to the movie scheme * this data comes from a saved filebut * * @param schemes */ public void populate(SerializableScheme[] schemes, ArrayList<String> allSpecies) { for (SerializableScheme scheme : schemes) { //make sure not to load schemes for species that no longer exist if (allSpecies.contains(scheme.name) == false) continue; GradientPaint gradient = null; if (!(scheme.startColor == 0 && scheme.endColor == 0)) { gradient = new GradientPaint(0.0f, 0.0f, new Color(scheme.startColor), 0.0f, 50.0f, new Color(scheme.endColor)); } Scheme speciesScheme = new Scheme(gradient, scheme.opacityState, scheme.sizeState, scheme.min, scheme.max); speciesSchemes.put(scheme.name, speciesScheme); } } //APPEARANCE METHODS /** * returns a MovieAppearance object which has the data that the graph needs * in order to change the appearance of the cell during the movie * * @param ID * @param cellType * @return the appearance at */ public MovieAppearance getAppearance(String cellID, String cellType, int frameIndex, HashMap<String, ArrayList<Double>> speciesTSData) { HashMap<String, Scheme> cellSchemes = getSchemesWithinCell(cellID, cellType); if (cellSchemes.size() <= 0) return null; else if (cellSchemes.size() == 1) { //if there's just one scheme, use the frame index and the scheme //to create an appearance to send back Map.Entry<String, Scheme> cellScheme = cellSchemes.entrySet().iterator().next(); String speciesID = cellScheme.getKey(); int min = cellScheme.getValue().getMin(); int max = cellScheme.getValue().getMax(); GradientPaint colorGradient = cellScheme.getValue().getColorGradient(); boolean opacityState = cellScheme.getValue().getOpacityState(); boolean sizeState = cellScheme.getValue().getSizeState(); if (speciesTSData.get(speciesID) == null) return null; //number of molecules at this time instance double speciesValue = speciesTSData.get(speciesID).get(frameIndex); //how far along this value is on the gradient spectrum of min to max double gradientValue = (double)((speciesValue - min) / (max - min)); //now calculate the correct appearance along the gradient to use return getIntermediateAppearance(colorGradient, gradientValue, opacityState, sizeState, cellType); } else if (cellSchemes.size() > 1) { //if there's more than one scheme the colors need to be added together //this may change to something else in time to make the colors separable MovieAppearance cellAppearance = new MovieAppearance(); //loop through every scheme in the cell and add them together //to get the final cell appearance for (Map.Entry<String, Scheme> cellScheme : cellSchemes.entrySet()) { String speciesID = cellScheme.getKey(); int min = cellScheme.getValue().getMin(); int max = cellScheme.getValue().getMax(); GradientPaint colorGradient = cellScheme.getValue().getColorGradient(); boolean opacityState = cellScheme.getValue().getOpacityState(); boolean sizeState = cellScheme.getValue().getSizeState(); if (speciesTSData.get(speciesID) == null) continue; //number of molecules at this time instance double speciesValue = speciesTSData.get(speciesID).get(frameIndex); //how far along this value is on the gradient spectrum of min to max double gradientValue = (double)((speciesValue - min) / (max - min)); //now calculate the correct appearance along the gradient to use cellAppearance.add(getIntermediateAppearance( colorGradient, gradientValue, opacityState, sizeState, cellType)); } return cellAppearance; } return null; } /** * returns a hashmap of all of the schemes that apply to the cell passed in * * @param cellID * @param cellType * @return */ private HashMap<String, Scheme> getSchemesWithinCell(String cellID, String cellType) { //this will store all species schemes that exist within this component HashMap<String, Scheme> cellSchemes = new HashMap<String, Scheme>(); //loop through the species schemes //if any of these apply to the cell with cellID //then add that scheme to the map of cell schemes for (Map.Entry<String, Scheme> speciesScheme : speciesSchemes.entrySet()) { String compID = ""; if (cellType.equals(GlobalConstants.SPECIES)) compID = speciesScheme.getKey(); else { //take off the speciesID's component prefix //this is done so that other component or grid location prefixes can be added String[] speciesParts = speciesScheme.getKey().split("__"); //continue if there's no component prefix //or if the prefix is inconsistent with the cell type if (speciesParts.length < 2 || (cellType.equals(GlobalConstants.COMPONENT) && speciesParts[0].contains("ROW"))) continue; compID = speciesParts[0]; } //if these are equal then this scheme is for this cell if (compID.equals(cellID)) cellSchemes.put(speciesScheme.getKey(), speciesScheme.getValue()); } return cellSchemes; } /** * calculates a color along the gradient * this is essentially a re-located function that tyler wrote * * @param colorGradient the color gradient * @param gradientValue the location along the gradient (on 0 to 1) * @return the intermediate color */ private MovieAppearance getIntermediateAppearance( GradientPaint colorGradient, double gradientValue, Boolean opacityState, Boolean sizeState, String cellType) { Color startColor = null, endColor = null; if (colorGradient != null) { startColor = colorGradient.getColor1(); endColor = colorGradient.getColor2(); } Double startOpacity = 0.025; Double endOpacity = 0.75; Double startSize = 1.0; Double endSize = GlobalConstants.DEFAULT_COMPONENT_WIDTH + 20.0; if (cellType.equals(GlobalConstants.SPECIES)) { endSize = GlobalConstants.DEFAULT_SPECIES_WIDTH + 20.0; } MovieAppearance newAppearance = new MovieAppearance(); if (gradientValue <= 0.0) { newAppearance.color = startColor; if (opacityState == true) newAppearance.opacity = startOpacity; if (sizeState == true) newAppearance.size = startSize; } else if (gradientValue >= 1.0) { newAppearance.color = endColor; if (opacityState == true) newAppearance.opacity = endOpacity; if (sizeState == true) newAppearance.size = endSize; } else{ float oneMinusRatio = (float)1.0 - (float)gradientValue; //COLOR if(startColor != null && endColor != null) { int newRed = (int)Math.round(startColor.getRed() * oneMinusRatio + endColor.getRed() * gradientValue); if (newRed > 255) newRed = 255; int newGreen = (int)Math.round(startColor.getGreen() * oneMinusRatio + endColor.getGreen() * gradientValue); if (newGreen > 255) newGreen = 255; int newBlue = (int)Math.round(startColor.getBlue() * oneMinusRatio + endColor.getBlue() * gradientValue); if (newBlue > 255) newBlue = 255; newAppearance.color = new Color(newRed, newGreen, newBlue); } else newAppearance.color = endColor; //OPACITY if(startOpacity != null && endOpacity != null && opacityState == true) newAppearance.opacity = startOpacity * oneMinusRatio + endOpacity * gradientValue; //SIZE if(startSize != null && endSize != null && sizeState == true) newAppearance.size = startSize * oneMinusRatio + endSize * gradientValue; } return newAppearance; } /** * empties the scheme hashmap */ public void clearAppearances() { speciesSchemes = new HashMap<String, Scheme>(); } //SCHEME CLASS public class Scheme { GradientPaint colorGradient; boolean opacityState, sizeState; int min, max; /** * constructor */ public Scheme() { min = 0; max = 20; colorGradient = null; opacityState = false; sizeState = false; } /** * constructor * @param colorGradient * @param min * @param max */ public Scheme(GradientPaint colorGradient, Boolean opacityState, Boolean sizeState, int min, int max) { this(); this.min = min; this.max = max; if (colorGradient != null) this.colorGradient = colorGradient; if (opacityState != null) this.opacityState = opacityState; if (sizeState != null) this.sizeState = sizeState; } //BORING GET/SET METHODS /** * @param gradient color gradient to be set */ public void setColorGradient(GradientPaint colorGradient) { this.colorGradient = colorGradient; } /** * returns the scheme's color gradient * @return the scheme's color gradient */ public GradientPaint getColorGradient() { return colorGradient; } /** * set whether the opacity changes or not * @param opacityState */ public void setOpacityState(boolean opacityState) { this.opacityState = opacityState; } /** * @return whether opacity changes or not */ public boolean getOpacityState() { return opacityState; } /** * set whether the size changes or not * @param sizeState */ public void setSizeState(boolean sizeState) { this.sizeState = sizeState; } /** * @return whether size changes or not */ public boolean getSizeState() { return sizeState; } /** * @param min the minimum visible number of molecules wrt the color gradient */ public void setMin(int min) { this.min = min; } /** * * @return */ public int getMin() { return min; } /** * @param max the saturating number of molecules wrt the color gradient */ public void setMax(int max) { this.max = max; } /** * * @return */ public int getMax() { return max; } } //SCHEME APPLY FUNCTION INTERFACE interface SchemeApplyFunction { void apply(String speciesID); } }
package org.judal.hbase; import java.io.IOException; import java.sql.Types; import java.util.Iterator; import java.util.HashSet; import java.util.Set; import javax.jdo.FetchGroup; import javax.jdo.FetchPlan; import javax.jdo.JDOException; import javax.jdo.JDOUnsupportedOptionException; import javax.jdo.PersistenceManager; import javax.jdo.metadata.PrimaryKeyMetadata; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import com.knowgate.debug.DebugFile; import org.judal.metadata.ColumnDef; import org.judal.metadata.TableDef; import org.judal.serialization.BytesConverter; import org.judal.storage.TableDataSource; import org.judal.storage.ArrayListRecordSet; import org.judal.storage.Param; import org.judal.storage.ReadOnlyBucket; import org.judal.storage.Record; import org.judal.storage.StorageObjectFactory; import org.judal.storage.RecordSet; import org.judal.storage.Stored; import org.judal.storage.Table; public class HBTable implements Table { private String sTsc; private HTable oTbl; private HBTableDataSource oCfg; private Class<? extends Record> oCls; private HashSet<HBIterator> oItr; public HBTable(HBTableDataSource hCfg, HTable hTbl, Class<? extends Record> cRecordClass) { oCfg = hCfg; oTbl = hTbl; sTsc = null; oItr = null; oCls = cRecordClass; } @Override public String name() { try { return (String) BytesConverter.fromBytes(oTbl.getTableName(), Types.VARCHAR); } catch (IOException e) { return null; } } public HTable getTable() { return oTbl; } @Override public ColumnDef[] columns() { ColumnDef[] oLst; try { if (null==getDataSource().getMetaData()) return null; oLst = getDataSource().getMetaData().getColumns(name()); } catch (Exception xcpt) { if (DebugFile.trace) DebugFile.writeln("HBTable.columns() "+xcpt.getClass().getName()+" "+xcpt.getMessage()); oLst = null; } return oLst; } public TableDataSource getDataSource() { return oCfg; } @Override public void close() throws JDOException { try { oTbl.close(); oCfg.openedTables().remove(oTbl); } catch (IOException ioe) { throw new JDOException(ioe.getMessage(),ioe); } } @Override public boolean exists(Object key) throws NullPointerException, IllegalArgumentException, JDOException { Object value; if (key==null) throw new NullPointerException("HBTable.exists() Key cannot be null"); if (key instanceof Param) value = ((Param) key).getValue(); else value = key; if (value==null) throw new NullPointerException("HBTable.exists() Key value cannot be null"); try { return oTbl.exists(new Get(BytesConverter.toBytes(key))); } catch (IOException ioe) { throw new JDOException(ioe.getMessage(),ioe); } } @Override public boolean exists(Param... keys) throws JDOException { if (keys.length>1) throw new JDOUnsupportedOptionException("HBase can only use a single column as index at a time"); if (keys[0].getValue()==null) throw new NullPointerException("HBTable.exists() Key value cannot be null"); try { return oTbl.exists(new Get(BytesConverter.toBytes(keys[0]))); } catch (IOException ioe) { throw new JDOException(ioe.getMessage(),ioe); } } @Override public boolean load(Object key, Stored target) throws JDOException { if (DebugFile.trace) { DebugFile.writeln("Begin HBTable.load("+key+")"); DebugFile.incIdent(); } boolean retval = true; Record oRow = (Record) target; Get oGet = new Get(BytesConverter.toBytes(key)); try { Result oRes = oTbl.get(oGet); for (ColumnDef oCol : columns()) { KeyValue oKvl = oRes.getColumnLatest(BytesConverter.toBytes(oCol.getFamily()), BytesConverter.toBytes(oCol.getName())); if (oKvl!=null) if (oKvl.getValue()!=null) oRow.put(oCol.getName(), BytesConverter.fromBytes(oKvl.getValue(), oCol.getType())); if (DebugFile.trace) { DebugFile.writeln("KeyValue == "+oKvl); if (oKvl==null) DebugFile.writeln("Result.getColumnLatest("+oCol.getFamily()+","+oCol.getName()+") == null"); else if (oKvl.getValue()==null) DebugFile.writeln("Result.getColumnLatest("+oCol.getFamily()+","+oCol.getName()+").getValue() is null"); else DebugFile.writeln("Result.getColumnLatest("+oCol.getFamily()+","+oCol.getName()+").getValue() == "+BytesConverter.fromBytes(oKvl.getValue(), oCol.getType())); } } if (DebugFile.trace) { DebugFile.decIdent(); DebugFile.writeln("End HBTable.load()"); } } catch (IOException ioe) { if (DebugFile.trace) { DebugFile.decIdent(); DebugFile.writeln("IOException "+ioe.getMessage()); } throw new JDOException(ioe.getMessage(),ioe); } return retval; } @Override public void store(Stored oStored) throws JDOException { Record oRow = (Record) oStored; if (DebugFile.trace) { DebugFile.writeln("Begin HBTable.store("+oRow.getTableName()+"."+oRow.getKey()+")"); DebugFile.incIdent(); } // oRow.checkConstraints(getDataSource()); final byte[] byPK = BytesConverter.toBytes(oRow.getKey()); Put oPut = new Put(byPK); try { for (ColumnDef oCol : columns()) { Object oObj = oRow.apply(oCol.getName()); if (oObj!=null) { if (DebugFile.trace) { DebugFile.writeln("Put.add("+oCol.getFamily()+","+oCol.getName()+",toBytes("+oRow.getClass().getName()+".apply("+oCol.getName()+"),"+oCol.getType()+"))"); DebugFile.writeln(oCol.getName()+"="+oObj.toString()); } oPut.add(BytesConverter.toBytes(oCol.getFamily()), BytesConverter.toBytes(oCol.getName()), BytesConverter.toBytes(oObj, oCol.getType())); } else { if (DebugFile.trace) DebugFile.writeln(oRow.getClass().getName()+".apply("+oCol.getName()+") == null"); } } oTbl.put(oPut); } catch (IOException ioe) { if (DebugFile.trace) { DebugFile.writeln("IOException "+ioe.getMessage()); DebugFile.decIdent(); } throw new JDOException(ioe.getMessage(),ioe); } if (DebugFile.trace) { DebugFile.decIdent(); DebugFile.writeln("End HBTable.store()"); } } @Override public void insert(Param... aParams) throws JDOException { if (DebugFile.trace) { DebugFile.writeln("Begin HBTable.insert(Param...)"); DebugFile.incIdent(); } // oRow.checkConstraints(getDataSource()); byte[] byPK = null; for (Param oPar : aParams) { if (oPar.isPrimaryKey()) { byPK = BytesConverter.toBytes(oPar.getValue()); break; } } if (null==byPK) throw new JDOException("No value supplied for primary key among insert parameters"); Put oPut = new Put(byPK); try { for (Param oPar : aParams) { Object oObj = oPar.getValue(); if (oObj!=null) { oPut.add(BytesConverter.toBytes(oPar.getFamily()), BytesConverter.toBytes(oPar.getName()), BytesConverter.toBytes(oObj, oPar.getType())); } } oTbl.put(oPut); } catch (IOException ioe) { if (DebugFile.trace) { DebugFile.writeln("IOException "+ioe.getMessage()); DebugFile.decIdent(); } throw new JDOException(ioe.getMessage(),ioe); } if (DebugFile.trace) { DebugFile.decIdent(); DebugFile.writeln("End HBTable.insert()"); } } @Override public void delete(Object oKey) throws NullPointerException, IllegalArgumentException, JDOException { if (oKey==null) throw new NullPointerException("HBTable.delete() Key cannot be null"); Object oVal; if (oKey instanceof Param) oVal = ((Param) oKey).getValue(); else oVal = oKey; if (oVal==null) throw new NullPointerException("HBTable.delete() Key value cannot be null"); Delete oDel = new Delete(BytesConverter.toBytes(oVal)); try { oTbl.delete(oDel); } catch (IOException ioe) { throw new JDOException(ioe.getMessage(),ioe); } } @Override public <R extends Record> RecordSet<R> fetch(FetchGroup fetchGroup, String indexColumnName, Object valueSearched) throws JDOException { return fetch (fetchGroup, indexColumnName, valueSearched, Integer.MAX_VALUE, 0); } @Override public <R extends Record> RecordSet<R> fetch(FetchGroup fetchGroup, String indexColumnName, Object valueSearched, int maxrows, int offset) throws JDOException { if (!indexColumnName.equalsIgnoreCase(getPrimaryKey().getColumn())) throw new JDOUnsupportedOptionException("HBase only supports queries by primary key"); if (valueSearched==null) throw new NullPointerException("HBTable.fetch("+indexColumnName+") index value cannot be null"); if (fetchGroup==null) throw new NullPointerException("HBTable.fetch("+indexColumnName+") columns list cannot be null"); else if (fetchGroup.getMembers().size()==0) throw new NullPointerException("HBTable.fetch("+indexColumnName+") columns list cannot be empty"); TableDef oDef = getDataSource().getMetaData().getTable(name()); R oRow; try { oRow = (R) StorageObjectFactory.newRecord(oCls, oDef); } catch (NoSuchMethodException nsme) { throw new JDOException(nsme.getMessage(), nsme); } Set<String> members; if (fetchGroup==null) members = oRow.fetchGroup().getMembers(); else members = fetchGroup.getMembers(); if (DebugFile.trace) { String sColList = ""; for (String sCol : members) sColList+=","+sCol; DebugFile.writeln("Begin HBTable.fetch("+indexColumnName+","+valueSearched+",["+sColList.substring(1)+"])"); DebugFile.incIdent(); } ArrayListRecordSet<R> oRst = new ArrayListRecordSet<R>((Class<R>) oCls); Get oGet = new Get(BytesConverter.toBytes(valueSearched)); try { Result oRes = oTbl.get(oGet); if (oRes!=null) { if (!oRes.isEmpty()) { for (String sColName : members) { ColumnDef oCol = getColumnByName(sColName); KeyValue oKvl = oRes.getColumnLatest(BytesConverter.toBytes(oCol.getFamily()), BytesConverter.toBytes(oCol.getName())); if (oKvl!=null) { if (oKvl.getValue()!=null) { oRow.put(oCol.getName(), BytesConverter.fromBytes(oKvl.getValue(), oCol.getType())); } else { if (DebugFile.trace) DebugFile.writeln("Value is null"); } } else { if (DebugFile.trace) DebugFile.writeln("KeyValue is null"); } } oRst.add(oRow); } else { if (DebugFile.trace) DebugFile.writeln("Result is empty"); } } else { if (DebugFile.trace) DebugFile.writeln("Result is null"); } } catch (IOException ioe) { if (DebugFile.trace) DebugFile.decIdent(); throw new JDOException(ioe.getMessage(),ioe); } if (DebugFile.trace) { DebugFile.decIdent(); DebugFile.writeln("End HBTable.fetch() : "+String.valueOf(oRst.size())); } return oRst; } public <R extends Record> RecordSet<R> last(FetchGroup cols, int maxrows, int offset, String orderByValue) throws JDOException { return fetch(cols, getPrimaryKey().getColumn(), orderByValue+"00000000000000000000000000000000", orderByValue+"99999999999999999999999999999999", ReadOnlyBucket.MAX_ROWS, 0); } @Override public void setClass(Class<? extends Stored> candidateClass) { oCls = (Class<? extends Record>) candidateClass; } @Override public void close(Iterator<Stored> iterator) { ((HBIterator) iterator).close(); oItr.remove(iterator); } @Override public void closeAll() { for (HBIterator oHbi : oItr) oHbi.close(); oItr.clear(); } @SuppressWarnings("unchecked") @Override public Class getCandidateClass() { return oCls; } @Override public FetchPlan getFetchPlan() { return null; } @Override public PersistenceManager getPersistenceManager() { return null; } @Override public boolean hasSubclasses() { return false; } @Override public Iterator<Stored> iterator() { HBIterator oHbi = new HBIterator(this); oItr.add(oHbi); return oHbi; } @Override public int columnsCount() { return getDataSource().getMetaData().getTable(name()).getNumberOfColumns(); } @Override public ColumnDef getColumnByName(String columnName) { return getDataSource().getMetaData().getTable(name()).getColumnByName(columnName); } @Override public int getColumnIndex(String columnName) { return getDataSource().getMetaData().getTable(name()).getColumnIndex(columnName); } @Override public Class<? extends Record> getResultClass() { return oCls; } @Override public String getTimestampColumnName() { return sTsc; } @Override public void setTimestampColumnName(String columnName) throws IllegalArgumentException { sTsc = columnName; } @Override public PrimaryKeyMetadata getPrimaryKey() { return getDataSource().getMetaData().getTable(name()).getPrimaryKeyMetadata(); } @Override public int count(String indexColumnName, Object valueSearched) throws JDOException { if (!indexColumnName.equalsIgnoreCase(getPrimaryKey().getColumn())) throw new JDOUnsupportedOptionException("HBase only supports queries by primary key"); return exists(valueSearched) ? 1 : 0; } @Override public <R extends Record> RecordSet<R> fetch(FetchGroup cols, String indexColumnName, Object valueFrom, Object valueTo) throws JDOException, IllegalArgumentException { return fetch(cols, indexColumnName, valueFrom, valueTo, ReadOnlyBucket.MAX_ROWS, 0); } @Override public <R extends Record> RecordSet<R> fetch(FetchGroup cols, String indexColumnName, Object valueFrom, Object valueTo, int maxrows, int offset) throws JDOException, IllegalArgumentException { if (!indexColumnName.equalsIgnoreCase(getPrimaryKey().getColumn())) throw new JDOUnsupportedOptionException("HBase only supports queries by primary key"); ArrayListRecordSet<R> rst = new ArrayListRecordSet<R>((Class<R>) oCls); TableDef tdef = getDataSource().getMetaData().getTable(name()); ColumnDef[] fetchCols = new ColumnDef[cols.getMembers().size()]; Scan scn = new Scan(); scn.setStartRow(BytesConverter.toBytes(valueFrom)); scn.setStopRow (BytesConverter.toBytes(valueTo)); int c = 0; for (Object colName : cols.getMembers()) { ColumnDef cdef = getColumnByName((String) colName); fetchCols[c++] = cdef; scn.addColumn(BytesConverter.toBytes(cdef.getFamily()), BytesConverter.toBytes(cdef.getName())); } ResultScanner rsc = null; int rowCount = 0; final int maxrow = maxrows+offset; try { rsc = oTbl.getScanner(scn); for (Result res=rsc.next(); res!=null && rowCount<maxrow; res=rsc.next()) { if (++rowCount>offset) { R row; try { row = (R) StorageObjectFactory.newRecord(oCls, tdef); } catch (NoSuchMethodException nsme) { throw new JDOException(nsme.getMessage(), nsme); } for (ColumnDef oCol : fetchCols) { KeyValue oKvl = res.getColumnLatest(BytesConverter.toBytes(oCol.getFamily()), BytesConverter.toBytes(oCol.getName())); if (oKvl!=null) { if (oKvl.getValue()!=null) row.put(oCol.getName(), BytesConverter.fromBytes(oKvl.getValue(), oCol.getType())); } } // next rst.add(row); } // fi (nRowCount>iOffset) } // next } catch (IOException ioe) { throw new JDOException(ioe.getMessage(),ioe); } finally { if(rsc!=null) rsc.close(); } return rst; } public int update(Param[] aValues, Param[] aWhere) throws JDOException { if (aWhere==null) throw new NullPointerException("HBTable.update() where clause cannot be null"); if (aWhere.length!=1) throw new IllegalArgumentException("HBTable updates must use exactly one parameter"); Put oPut = new Put(BytesConverter.toBytes((String) aWhere[0].getValue())); try { for (Param v : aValues) { if (v.getValue()==null) oPut.add(BytesConverter.toBytes(v.getFamily()), BytesConverter.toBytes(v.getName()), new byte[0]); else oPut.add(BytesConverter.toBytes(v.getFamily()), BytesConverter.toBytes(v.getName()), BytesConverter.toBytes(v.getValue(), v.getType())); } oTbl.put(oPut); } catch (IOException ioe) { throw new JDOException(ioe.getMessage(), ioe); } return 1; } }
// B a r s B u i l d e r // // Contact author at herve.bitteur@laposte.net to report bugs & suggestions. // package omr.sheet; import omr.Main; import omr.check.CheckBoard; import omr.check.CheckSuite; import omr.check.FailureResult; import omr.constant.Constant; import omr.constant.ConstantSet; import omr.glyph.Glyph; import omr.glyph.GlyphLag; import omr.glyph.GlyphModel; import omr.glyph.GlyphSection; import omr.glyph.GlyphsBuilder; import omr.glyph.Shape; import omr.glyph.ui.GlyphBoard; import omr.glyph.ui.GlyphLagView; import omr.lag.JunctionDeltaPolicy; import omr.lag.RunBoard; import omr.lag.ScrollLagView; import omr.lag.SectionBoard; import omr.lag.SectionsBuilder; import omr.lag.VerticalOrientation; import omr.score.Barline; import omr.score.Measure; import omr.score.Score; import omr.score.ScoreConstants; import omr.score.ScorePart; import omr.score.Staff; import omr.score.System; import omr.score.SystemPart; import omr.score.UnitDimension; import omr.score.visitor.ScoreFixer; import omr.score.visitor.SheetPainter; import omr.selection.Selection; import omr.selection.SelectionHint; import omr.selection.SelectionTag; import static omr.selection.SelectionTag.*; import omr.step.StepException; import omr.stick.Stick; import omr.stick.StickSection; import omr.ui.BoardsPane; import omr.ui.PixelBoard; import static omr.ui.field.SpinnerUtilities.*; import omr.util.Dumper; import omr.util.Logger; import omr.util.TreeNode; import java.awt.*; import java.util.*; import java.util.List; /** * Class <code>BarsBuilder</code> handles the vertical lines that are recognized * as bar lines. This class uses a dedicated companion named {@link * omr.sheet.BarsChecker} which handles physical checks. * * <p> Input is provided by a list of vertical sticks retrieved from the * vertical lag. * * <p> Output is the collection of detected Bar lines. * * @author Herv&eacute; Bitteur * @version $Id$ */ public class BarsBuilder extends GlyphModel { /** Specific application parameters */ private static final Constants constants = new Constants(); /** Usual logger utility */ private static final Logger logger = Logger.getLogger(BarsBuilder.class); /** Failure */ private static final FailureResult NOT_SYSTEM_ALIGNED = new FailureResult( "Bar-NotSystemAligned"); private static final FailureResult NOT_STAFF_ALIGNED = new FailureResult( "Bar-NotStaffAligned"); private static final FailureResult SHORTER_THAN_STAFF_HEIGHT = new FailureResult( "Bar-ShorterThanStaffHeight"); private static final FailureResult THICK_BAR_NOT_ALIGNED = new FailureResult( "Bar-ThickBarNotAligned"); private static final FailureResult CANCELLED = new FailureResult( "Bar-Cancelled"); /** Companion physical stick checker */ private BarsChecker checker; /** Lag view on bars, if so desired */ private GlyphLagView lagView; /** List of found bar sticks */ private List<Stick> bars = new ArrayList<Stick>(); /** Unused vertical sticks */ private List<Stick> clutter; /** Sheet scale */ private Scale scale; /** Related score */ private Score score; /** Bars area, with retrieved vertical sticks */ private VerticalArea barsArea; // BarsBuilder // /** * Prepare a bar retriever on the provided sheet * * @param sheet the sheet to process */ public BarsBuilder (Sheet sheet) { super(sheet, new GlyphLag("vLag", new VerticalOrientation())); } // assignBraces // /** * Pass the braces symbols found, so that score parts can be defined * * @param braceLists the braces, system per system */ public void assignBraces (List<List<Glyph>> braceLists) { int is = 0; // Build the SystemParts for each system for (SystemInfo systemInfo : sheet.getSystems()) { setSystemBraces(systemInfo.getScoreSystem(), braceLists.get(is++)); } // Repaint the score view, if any (TBI) if (sheet.getScore() .getView() != null) { sheet.getScore() .getView() .repaint(); } } // buildInfo // /** * Retrieve and store the bars information on the provided sheet * * @throws StepException raised when step processing must stop, due to * encountered error */ public void buildInfo () throws StepException { // Stuff to be made available scale = sheet.getScale(); sheet.getHorizontals(); try { // Populate the vertical lag of runs lag.setVertexClass(StickSection.class); SectionsBuilder<GlyphLag, GlyphSection> lagBuilder; lagBuilder = new SectionsBuilder<GlyphLag, GlyphSection>( lag, new JunctionDeltaPolicy( scale.toPixels(constants.maxDeltaLength))); lagBuilder.createSections(sheet.getPicture(), 0); // 0 = minRunLength sheet.setVerticalLag(lag); // Retrieve (vertical) sticks barsArea = new VerticalArea( sheet, lag, scale.toPixels(constants.maxBarThickness)); clutter = new ArrayList<Stick>(barsArea.getSticks()); // Allocate score createScore(); // Delegate to BarsChecker companion checker = new BarsChecker(sheet); checker.retrieveMeasures(clutter, bars); // Check Measures using only score parameters checkMeasures(); // Define score parts defineScoreParts(); // Remove clutter glyphs from lag (they will be handled as specific // glyphs in the user view). for (Stick stick : clutter) { stick.destroy( /* cutSections => */ false); } // Erase bar pixels from picture (not used for the time being) //////eraseBars(); // Update score internal data score.accept(new ScoreFixer()); if (logger.isFineEnabled()) { score.dump(); } // Report number of systems & measures retrieved StringBuilder sb = new StringBuilder(); sb.append(sheet.getSystems().size()) .append(" systems"); int nb = score.getLastSystem() .getLastPart() .getLastMeasure() .getId(); if (nb > 0) { sb.append(", ") .append(nb) .append(" measure"); if (nb > 1) { sb.append("s"); } } else { sb.append(", no measure found"); } logger.info(sb.toString()); // Split everything, including horizontals, per system SystemSplit.computeSystemLimits(sheet); SystemSplit.splitHorizontals(sheet); SystemSplit.splitVerticalSections(sheet); // Assign the bar stick to the proper system glyphs collection GlyphsBuilder glyphsBuilder = sheet.getGlyphsBuilder(); for (Stick stick : bars) { glyphsBuilder.insertGlyph( stick, sheet.getSystemAtY(stick.getContourBox().y)); } } finally { // Display the resulting stickarea if so asked for if (constants.displayFrame.getValue() && (Main.getGui() != null)) { displayFrame(); } } } // deassignGlyphShape // /** * Remove a bar together with all its related entities. This means removing * reference in the bars list of this builder, reference in the containing * SystemInfo, reference in the Measure it ends, and removing this Measure * itself if this (false) bar was the only ending bar left for the * Measure. The related stick must also be assigned a failure result. * * @param glyph the (false) bar glyph to deassign * @param record true if this action is to be recorded in the script */ @Override public void deassignGlyphShape (Glyph glyph, boolean record) { if ((glyph.getShape() == Shape.THICK_BAR_LINE) || (glyph.getShape() == Shape.THIN_BAR_LINE)) { Stick bar = getBarOf(glyph); if (bar == null) { return; } else { logger.info("Deassigning a " + glyph.getShape()); } // Related stick has to be freed bar.setShape(null); bar.setResult(CANCELLED); // Remove from the internal all-bars list bars.remove(bar); // Remove from the containing SystemInfo SystemInfo system = checker.getSystemOf(bar, sheet); if (system == null) { return; } // Remove from the containing Measure System scoreSystem = system.getScoreSystem(); for (TreeNode pNode : scoreSystem.getParts()) { SystemPart part = (SystemPart) pNode; if (checker.isPartEmbraced(part, bar)) { for (Iterator mit = part.getMeasures() .iterator(); mit.hasNext();) { Measure measure = (Measure) mit.next(); for (Iterator sit = measure.getBarline() .getSticks() .iterator(); sit.hasNext();) { Stick stick = (Stick) sit.next(); if (stick == bar) { // Remove the bar stick if (logger.isFineEnabled()) { logger.fine( "Removing " + stick + " from " + measure); } sit.remove(); // Remove measure as well ? if (measure.getBarline() .getSticks() .size() == 0) { if (logger.isFineEnabled()) { logger.fine("Removing " + measure); } mit.remove(); } break; } } } } } assignGlyphShape(glyph, null, false); // Update score internal data score.accept(new ScoreFixer()); // Update the view accordingly if (lagView != null) { lagView.colorize(); lagView.repaint(); } } else { BarsBuilder.logger.warning( "No deassign meant for " + glyph.getShape() + " glyph"); } } // deassignSetShape // /** * Remove a set of bars * * @param glyphs the collection of glyphs to be de-assigned * @param record true if this action is to be recorded in the script */ @Override public void deassignSetShape (Collection<Glyph> glyphs, boolean record) { for (Glyph glyph : glyphs) { deassignGlyphShape(glyph, false); } } // defineScoreParts // /** * From system part, define the score parts * @throws StepException */ public void defineScoreParts () throws StepException { // First, make sure all the system are consistent wrt parts Integer NbOfParts = null; for (SystemInfo systemInfo : sheet.getSystems()) { int nb = systemInfo.getScoreSystem() .getParts() .size(); if (NbOfParts == null) { NbOfParts = nb; } else if (NbOfParts != nb) { String msg = "Systems with different number of parts: " + NbOfParts + " vs " + nb; logger.warning(msg); if (Main.getGui() != null) { Main.getGui() .displayWarning(msg); } throw new StepException(msg); } } // (Re)set the global ScorePart list accordingly List<ScorePart> partList = null; boolean ok = true; for (SystemInfo systemInfo : sheet.getSystems()) { logger.fine(systemInfo.getScoreSystem().toString()); if (partList == null) { // Build a ScorePart list based on the SystemPart list partList = new ArrayList<ScorePart>(); for (TreeNode node : systemInfo.getScoreSystem() .getParts()) { SystemPart sp = (SystemPart) node; ScorePart scorePart = new ScorePart(sp, score); logger.fine("Adding " + scorePart); partList.add(scorePart); } } else { // Check our ScorePart list is still ok int i = 0; for (TreeNode node : systemInfo.getScoreSystem() .getParts()) { SystemPart sp = (SystemPart) node; ScorePart global = partList.get(i++); ScorePart scorePart = new ScorePart(sp, score); logger.fine( "Comparing global " + global + " with " + scorePart); if (!global.equals(scorePart)) { logger.warning("Different SystemPart in system " + i); ok = false; } } } } if (ok) { // Assign id and names (TBI) int index = 0; for (ScorePart part : partList) { part.setId(++index); part.setName("Part_" + index); if (logger.isFineEnabled()) { logger.fine("Global " + part); } } // This is now the global score part list score.setPartList(partList); // Link the SystemPart instances to their corresponding ScorePart for (TreeNode node : score.getSystems()) { System system = (System) node; for (TreeNode n : system.getParts()) { SystemPart sp = (SystemPart) n; sp.setScorePart(score.getPartList().get(sp.getId() - 1)); } } } } // checkBarAlignments // /** * Check alignment of each measure of each part with the other part * measures, a test that needs several staves in the system * * @param system the system to check */ private void checkBarAlignments (omr.score.System system) { if (system.getInfo() .getStaves() .size() > 1) { int maxShiftDx = scale.toPixels(constants.maxAlignShiftDx); for (Iterator pit = system.getParts() .iterator(); pit.hasNext();) { SystemPart part = (SystemPart) pit.next(); for (Iterator mit = part.getMeasures() .iterator(); mit.hasNext();) { Measure measure = (Measure) mit.next(); // Check that all staves in this part are concerned with // one stick of the barline Collection<Staff> staves = new ArrayList<Staff>(); for (TreeNode node : part.getStaves()) { staves.add((Staff) node); } if (!measure.getBarline() .joinsAllStaves(staves)) { // Remove the false bar info for (Stick stick : measure.getBarline() .getSticks()) { stick.setResult(NOT_STAFF_ALIGNED); stick.setShape(null); bars.remove(stick); } // Remove the false measure mit.remove(); break; } // Compare the abscissa with corresponding position in // the other parts if (logger.isFineEnabled()) { logger.fine( system.getContextString() + " Checking measure alignment at x: " + measure.getLeftX()); } int x = measure.getBarline() .getCenter().x; for (Iterator it = system.getParts() .iterator(); it.hasNext();) { SystemPart prt = (SystemPart) it.next(); if (prt == part) { continue; } if (!prt.barlineExists(x, maxShiftDx)) { if (logger.isFineEnabled()) { logger.fine( "Singular measure removed: " + Dumper.dumpOf(measure)); } // Remove the false bar info for (Stick stick : measure.getBarline() .getSticks()) { stick.setResult(NOT_SYSTEM_ALIGNED); bars.remove(stick); } // Remove the false measure mit.remove(); break; } } } } } } // checkEndingBar // /** * Use ending bar line if any, to adjust the right abscissa of the system * and its staves. * * @param system the system to check */ private void checkEndingBar (omr.score.System system) { try { SystemPart part = system.getFirstPart(); Measure measure = part.getLastMeasure(); Barline barline = measure.getBarline(); int lastX = barline.getRightX(); int minWidth = scale.toPixels(constants.minMeasureWidth); if ((part.getFirstStaff() .getWidth() - lastX) < minWidth) { if (logger.isFineEnabled()) { logger.fine("Adjusting EndingBar " + system); } // Adjust end of system & staff(s) to this one UnitDimension dim = system.getDimension(); if (dim == null) { system.setDimension(new UnitDimension(lastX, 0)); } else { dim.width = lastX; } for (Iterator pit = system.getParts() .iterator(); pit.hasNext();) { SystemPart prt = (SystemPart) pit.next(); for (Iterator sit = prt.getStaves() .iterator(); sit.hasNext();) { Staff stv = (Staff) sit.next(); stv.setWidth(system.getDimension().width); } } } } catch (Exception ex) { logger.warning( system.getContextString() + " Error in checking ending bar", ex); } } // checkMeasures // /** * Check measure reality, using a set of additional tests. */ private void checkMeasures () { // Check are performed on a system basis for (TreeNode node : score.getSystems()) { omr.score.System system = (omr.score.System) node; // Check alignment of each measure of each staff with the other // staff measures, a test that needs several staves in the system checkBarAlignments(system); // Detect very narrow measures which in fact indicate double bar // lines. mergeBarlines(system); // First barline may be just the beginning of the staff, so do not // count the very first bar line, which in general defines the // beginning of the staff rather than the end of a measure, but use // it to precisely define the left abscissa of the system and all // its contained staves. removeStartingBar(system); // Similarly, use the very last bar line, which generally ends the // system, to define the right abscissa of the system and its // staves. checkEndingBar(system); } } // createScore // private void createScore () { if (logger.isFineEnabled()) { logger.fine("Allocating score"); } score = new Score( scale.toUnits( new PixelDimension(sheet.getWidth(), sheet.getHeight())), (int) Math.rint(sheet.getSkew().angle() * ScoreConstants.BASE), scale, sheet.getPath()); // Mutual referencing score.setSheet(sheet); sheet.setScore(score); } // displayFrame // private void displayFrame () { Selection glyphSelection = sheet.getSelection(VERTICAL_GLYPH); lagView = new MyLagView(lag); lagView.setGlyphSelection(glyphSelection); glyphSelection.addObserver(lagView); lagView.colorize(); final String unit = sheet.getRadix() + ":BarsBuilder"; BoardsPane boardsPane = new BoardsPane( sheet, lagView, new PixelBoard(unit), new RunBoard(unit, sheet.getSelection(VERTICAL_RUN)), new SectionBoard( unit, lag.getLastVertexId(), sheet.getSelection(VERTICAL_SECTION), sheet.getSelection(VERTICAL_SECTION_ID)), new GlyphBoard( unit, this, clutter, sheet.getSelection(VERTICAL_GLYPH), sheet.getSelection(VERTICAL_GLYPH_ID), sheet.getSelection(GLYPH_SET)), new MyCheckBoard( unit, checker.getSuite(), sheet.getSelection(VERTICAL_GLYPH))); // Create a hosting frame for the view ScrollLagView slv = new ScrollLagView(lagView); sheet.getAssembly() .addViewTab("Bars", slv, boardsPane); } // getBarOf // private Stick getBarOf (Glyph glyph) { for (Stick bar : bars) { if (bar == glyph) { return bar; } } logger.warning("Cannot find bar for " + glyph); return null; } // mergeBarlines // /** * Check whether two close bar lines are not in fact double lines (with * variants) * * @param system the system to check */ private void mergeBarlines (omr.score.System system) { int maxDoubleDx = scale.toPixels(constants.maxDoubleBarDx); for (TreeNode node : system.getParts()) { SystemPart part = (SystemPart) node; Measure prevMeasure = null; for (Iterator mit = part.getMeasures() .iterator(); mit.hasNext();) { Measure measure = (Measure) mit.next(); if (prevMeasure != null) { final int measureWidth = measure.getBarline() .getCenter().x - prevMeasure.getBarline() .getCenter().x; if (measureWidth <= maxDoubleDx) { // Lines are side by side or one above the other? Stick stick = (Stick) measure.getBarline() .getSticks() .toArray()[0]; Stick prevStick = (Stick) prevMeasure.getBarline() .getSticks() .toArray()[0]; if (stick.overlapWith(prevStick)) { // Overlap => side by side // Merge the two bar lines into the first one prevMeasure.getBarline() .mergeWith(measure.getBarline()); if (logger.isFineEnabled()) { logger.fine( "Merged two close barlines into " + prevMeasure.getBarline()); } } else { // No overlap => one above the other // prevStick.addGlyphSections(stick, true); // stick.destroy(false); // bars.remove(stick); if (logger.isFineEnabled()) { logger.fine( "Two barlines segments one above the other in " + measure.getBarline()); } } mit.remove(); } else { prevMeasure = measure; } } else { prevMeasure = measure; } } } } // removeStartingBar // /** * We associate measures only with their ending bar line(s), so the starting * bar of a staff does not end a measure, we thus have to remove the measure * that we first had associated with it. * * @param system the system whose staves starting measure has to be checked */ private void removeStartingBar (omr.score.System system) { int minWidth = scale.toPixels(constants.minMeasureWidth); Barline firstBarline = system.getFirstPart() .getFirstMeasure() .getBarline(); int firstX = firstBarline.getLeftX(); // Check is based on the width of this first measure if (firstX < minWidth) { // Adjust system parameters if needed : topLeft and dimension if (firstX != 0) { if (logger.isFineEnabled()) { logger.fine("Adjusting firstX=" + firstX + " " + system); } system.getTopLeft() .translate(firstX, 0); system.getDimension().width -= firstX; } // Adjust beginning of all staves to this one // Remove this false "measure" in all parts of the system for (TreeNode node : system.getParts()) { SystemPart part = (SystemPart) node; // Set the bar as starting bar for the staff Measure measure = part.getFirstMeasure(); part.setStartingBarline(measure.getBarline()); // Remove this first measure part.getMeasures() .remove(0); // Update abscissa of top-left corner of every staff for (TreeNode sNode : part.getStaves()) { Staff staff = (Staff) sNode; staff.getTopLeft() .translate(firstX, 0); } // Update other bar lines abscissae accordingly for (TreeNode mNode : part.getMeasures()) { Measure meas = (Measure) mNode; meas.resetAbscissae(); } } } } // setSystemBraces // /** * Pass the braces symbols found for one system * * @param braces list of braces for this system */ private void setSystemBraces (System system, List<Glyph> braces) { // // Map Staff -> its containing staves ensemble (= ScorePart) // Map<Staff, List<Staff>> ensembles = new HashMap<Staff, List<Staff>>(); // // Inspect each brace in turn // for (Glyph brace : braces) { // List<Staff> ensemble = new ArrayList<Staff>(); // // Inspect all staves for this brace // for (TreeNode node : system.getStaves()) { // Staff staff = (Staff) node; // if (checker.isStaffEmbraced(staff, brace)) { // ensemble.add(staff); // ensembles.put(staff, ensemble); // if (ensemble.size() == 0) { // logger.warning( // "Brace with no embraced staves at all: " + brace.getId()); // // Now build the parts by looking back at all staves // List<SystemPart> parts = new ArrayList<SystemPart>(); // List<Staff> currentEnsemble = null; // for (TreeNode node : system.getStaves()) { // Staff staff = (Staff) node; // List<Staff> ensemble = ensembles.get(staff); // if (ensemble == null) { // // Standalone staff, a part by itself // parts.add(new SystemPart(Arrays.asList(staff))); // } else { // // Staff is in a part // if (ensemble != currentEnsemble) { // parts.add(new SystemPart(ensemble)); // } else { // // Nothing to do // currentEnsemble = ensemble; // // Dump this system parts // if (logger.isFineEnabled()) { // StringBuilder sb = new StringBuilder(); // for (SystemPart part : parts) { // sb.append("["); // for (Staff staff : part.getStaves()) { // sb.append(" ") // .append(staff.getStaffIndex()); // sb.append("] "); // logger.fine(system + " Parts: " + sb); // // Assign the parts to the system // system.setParts(parts); } // Constants // private static final class Constants extends ConstantSet { /** Should we display a frame on the vertical sticks */ Constant.Boolean displayFrame = new Constant.Boolean( false, "Should we display a frame on the vertical sticks"); /** Maximum horizontal shift in bars between staves in a system */ Scale.Fraction maxAlignShiftDx = new Scale.Fraction( 0.2, "Maximum horizontal shift in bars between staves in a system"); /** Maximum thickness of an interesting vertical stick */ Scale.Fraction maxBarThickness = new Scale.Fraction( 0.75, "Maximum thickness of an interesting vertical stick"); /** Maximum difference in run length to be part of the same section */ Scale.Fraction maxDeltaLength = new Scale.Fraction( 0.2, "Maximum difference in run length to be part of the same section"); /** Maximum horizontal distance between the two bars of a double bar */ Scale.Fraction maxDoubleBarDx = new Scale.Fraction( 0.75, "Maximum horizontal distance between the two bars of a double bar"); /** Minimum width for a measure */ Scale.Fraction minMeasureWidth = new Scale.Fraction( 0.75, "Minimum width for a measure"); } // MyCheckBoard // private class MyCheckBoard extends CheckBoard<BarsChecker.Context> { public MyCheckBoard (String unit, CheckSuite<BarsChecker.Context> suite, Selection inputSelection) { super(unit, suite, inputSelection); } public void update (Selection selection, SelectionHint hint) { BarsChecker.Context context = null; Object entity = selection.getEntity(); if (entity instanceof Stick) { // To get a fresh suite setSuite(checker.new BarCheckSuite()); Stick stick = (Stick) entity; context = new BarsChecker.Context(stick); } tellObject(context); } } // MyLagView // private class MyLagView extends GlyphLagView { private MyLagView (GlyphLag lag) { super(lag, null, null, BarsBuilder.this, clutter); setName("BarsBuilder-View"); // Pixel setLocationSelection( sheet.getSelection(SelectionTag.SHEET_RECTANGLE)); // Glyph set Selection glyphSetSelection = sheet.getSelection( SelectionTag.GLYPH_SET); setGlyphSetSelection(glyphSetSelection); glyphSetSelection.addObserver(this); // Glyph id sheet.getSelection(SelectionTag.VERTICAL_GLYPH_ID) .addObserver(this); } // colorize // public void colorize () { super.colorize(); // Determine my view index in the lag views final int viewIndex = lag.viewIndexOf(this); // All remaining vertical sticks clutter for (Stick stick : clutter) { stick.colorize(lag, viewIndex, Color.red); } // Recognized bar lines for (Stick stick : bars) { stick.colorize(lag, viewIndex, Color.yellow); } } // renderItems // public void renderItems (Graphics g) { // Render all physical info known so far, which is just the staff // line info, lineset by lineset sheet.accept(new SheetPainter(g, getZoom())); super.renderItems(g); } } }
package theschoolproject; import java.awt.Graphics; import java.awt.image.BufferedImage; import java.util.Random; import theschoolproject.Input.Keyboard; import java.awt.Rectangle; /** * * @author root */ public class Entity { GamePanel world; double xLoc = 0; double yLoc = 0; double xLocFeet = this.xLoc + 32; double yLocFeet = this.yLoc + 64; boolean uBlock, rBlock, dBlock, lBlock = false; //Whether or not adjacent block is solid int dTu = 99; int dTr = 99; int dTd = 99; int dTl = 99; //Distance from the player's feet to the closest edge of a solid block (Distance to Up etc...) int collMinDist = 10; int tileLocX; int tileLocY; int orientation = 2; //0 - North/Up, 1 - East/Right, 2 - South, 3/Down - West/Left int[] animSeq = {0, 1, 2, 1}; double spd = 0; boolean isMoving = false; int rows = 4; int columns = 3; int height = 64; int width = 64; int animCycle = 1; BufferedImage spriteSheetB; BufferedImage[][] sprites; String[] spritePaths = {"/resources/pl_sprite.png", "/resources/en1_sprite.png"}; Random rand = new Random(); Keyboard keys; public Entity(GamePanel gp, String sp) { world = gp; sprites = new BufferedImage[rows][columns]; for (int i = 0; i < rows; i++) { for (int j = 0; j < columns; j++) { spriteSheetB = UsefulSnippets.loadImage(sp); sprites[i][j] = spriteSheetB.getSubimage(j * width, i * height, width, height); } } this.xLoc = rand.nextInt(400) + 50; this.yLoc = rand.nextInt(300) + 50; } public void draw(Graphics g) { g.drawImage(sprites[orientation][animSeq[animCycle]], (int) xLoc, (int) yLoc, null); } public void checkCollision() { // System.out.println(" " + dTu + " " + dTd + " " + dTr + " " + dTl); if ((this.tileLocX != 0) && (this.tileLocY != 0)) { uBlock = false; rBlock = false; dBlock = false; lBlock = false; dTu = 99; dTr = 99; dTd = 99; dTl = 99; if ((world.rooms[0].tileArry[(this.tileLocX + 1) + this.tileLocY * world.rooms[0].width].isSolid()) || (world.rooms[0].tileArry[(this.tileLocX + 1) + this.tileLocY * world.rooms[0].width].isDoor())) { this.rBlock = true; dTr = ((50 * (this.tileLocX + 1))) - (int) this.xLocFeet; } //Right if ((world.rooms[0].tileArry[(this.tileLocX - 1) + this.tileLocY * world.rooms[0].width].isSolid()) || (world.rooms[0].tileArry[(this.tileLocX - 1) + this.tileLocY * world.rooms[0].width].isDoor())) { this.lBlock = true; dTl = ((int) this.xLocFeet - (50 * (this.tileLocX))); } //Left if ((world.rooms[0].tileArry[this.tileLocX + (this.tileLocY + 1) * world.rooms[0].width].isSolid()) || (world.rooms[0].tileArry[this.tileLocX + (this.tileLocY + 1) * world.rooms[0].width].isDoor())) { this.dBlock = true; dTd = ((50 * (this.tileLocY + 1))) - (int) this.yLocFeet; } if ((world.rooms[0].tileArry[this.tileLocX + (this.tileLocY - 1) * world.rooms[0].width].isSolid()) || (world.rooms[0].tileArry[this.tileLocX + (this.tileLocY - 1) * world.rooms[0].width].isDoor())) { this.uBlock = true; dTu = ((int) this.yLocFeet - (50 * (this.tileLocY))); } //Down } } public Rectangle getBounds() { return new Rectangle((int) this.xLoc, (int) this.yLoc, (int) width, (int) height); } public void tick() { checkCollision(); if (isMoving && spd < 3) { spd = spd + 0.5; } if (!isMoving && spd > 0) { spd = spd - 0.5; animCycle = 1; } if (animCycle > 2) { animCycle = 0; } switch (orientation) { case 0: if (!uBlock && dTu > 3) { setLocation(this.getX(), this.getY() - spd); } break; case 1: if (!rBlock && dTr > 3) { setLocation(this.getX() + spd, this.getY()); } break; case 2: if (!dBlock && dTd > 3) { setLocation(this.getX(), this.getY() + spd); } break; case 3: if (!lBlock && dTl > 3) { setLocation(this.getX() - spd, this.getY()); } break; } tileLocX = (int) (xLocFeet) / 50; tileLocY = (int) (yLocFeet) / 50; } public void setLocation(double x, double y) { this.xLoc = x; this.yLoc = y; xLocFeet = this.xLoc + 32; yLocFeet = this.yLoc + 64; tileLocX = (int) (xLocFeet) / 50; tileLocY = (int) (yLocFeet) / 50; } public double getX() { return xLoc; } public double getY() { return yLoc; } }
package com.k3nx.signupform; public class User { private String mUsername; private String mPassword; User(String username, String password) { this.mUsername = username; this.mPassword = password; } }
package me.kirimin.kumin; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.Locale; import android.net.Uri; import android.os.Handler; import me.kirimin.kumin.db.User; import twitter4j.AsyncTwitter; import twitter4j.AsyncTwitterFactory; import twitter4j.ConnectionLifeCycleListener; import twitter4j.Status; import twitter4j.StatusUpdate; import twitter4j.TwitterAdapter; import twitter4j.TwitterException; import twitter4j.TwitterMethod; import twitter4j.TwitterStream; import twitter4j.TwitterStreamFactory; import twitter4j.UserStreamAdapter; import twitter4j.UserStreamListener; import twitter4j.auth.AccessToken; import twitter4j.auth.RequestToken; /** * Twitter4J * * @author kirimin */ public class Twitter { private AsyncTwitter mTwitter = new AsyncTwitterFactory().getInstance(); private TwitterStream mStream = new TwitterStreamFactory().getInstance(); private Handler mHandler = new Handler(); private RequestToken mRequestToken; private boolean isFinishedClose = true; public Twitter() { mTwitter.setOAuthConsumer(Consumer.K, Consumer.S); mStream.setOAuthConsumer(Consumer.K, Consumer.S); } /** * * * @param user */ public void setUser(User user) { AccessToken token = new AccessToken(user.getToken(), user.getSecret()); mTwitter.setOAuthAccessToken(token); mStream.setOAuthAccessToken(token); } /** * . OnStatusUpdateListener * * @param tweet */ public void updateStatus(String tweet) { mTwitter.updateStatus(tweet); } /** * . OnStatusUpdateListener * * @param tweet * @param imagePath * @throws IOException */ public void updateStatus(String tweet, String imagePath) throws IOException { StatusUpdate status = new StatusUpdate(tweet); File file = new File(imagePath); if (file.length() >= 3145728) throw new IOException(); status.media(file); mTwitter.updateStatus(status); } /** * / * * @param user */ public void doFavorite(long userId) { mTwitter.createFavorite(userId); } public synchronized boolean startUserStream() { if (isFinishedClose) { isFinishedClose = false; mStream.user(); return true; } return false; } public synchronized void stopUserStream() { mStream.shutdown(); } public void getOAuthRequestTokenAsync() { mTwitter.getOAuthRequestTokenAsync(); } public Uri getAuthorizationUri() { if (mRequestToken == null) throw new NullPointerException("token is empty"); return Uri.parse(mRequestToken.getAuthorizationURL()); } public void getOAuthAccessToken(String pincode) { mTwitter.getOAuthAccessTokenAsync(pincode); } public void setOnOAuthListener(final OnOAuthListener listener) { mTwitter.addListener(new TwitterAdapter() { @Override public void gotOAuthRequestToken(RequestToken token) { mRequestToken = token; } @Override public void gotOAuthAccessToken(final AccessToken accessToken) { mHandler.post(new Runnable() { @Override public void run() { String userId = String.valueOf(accessToken.getUserId()); String sName = accessToken.getScreenName(); String token = accessToken.getToken(); String secret = accessToken.getTokenSecret(); listener.gotOAuthAccessToken(new User(userId, sName, token, secret)); } }); } @Override public void onException(TwitterException te, TwitterMethod method) { te.printStackTrace(); mHandler.post(new Runnable() { @Override public void run() { listener.onError(); } }); } }); } public void setStreamListener(final StreamListener listener) { mStream.clearListeners(); mStream.addListener(new UserStreamAdapter() { @Override public void onStatus(final Status status) { mHandler.post(new Runnable() { @Override public void run() { listener.onStatus(status); } }); } }); mStream.addConnectionLifeCycleListener(new ConnectionLifeCycleListener() { @Override public void onDisconnect() { } @Override public void onConnect() { } @Override public void onCleanUp() { isFinishedClose = true; } }); } public void addOnStatusUpdateListener(final OnStatusUpdateListener listener) { mTwitter.addListener(new TwitterAdapter() { @Override public void updatedStatus(Status status) { mHandler.post(new Runnable() { @Override public void run() { listener.onStatusUpdate(); } }); } @Override public void createdFavorite(Status status) { mHandler.post(new Runnable() { @Override public void run() { listener.onFavorite(); } }); } @Override public void onException(final TwitterException te, TwitterMethod method) { te.printStackTrace(); mHandler.post(new Runnable() { @Override public void run() { listener.onError(); } }); } }); } public String getTimeStamp(Date createdAt) { Calendar nowCal = Calendar.getInstance(); Calendar createdAtCal = Calendar.getInstance(); createdAtCal.setTime(createdAt); if (!isToday(nowCal, createdAtCal)) { SimpleDateFormat format = new SimpleDateFormat("mm/dd", Locale.JAPAN); return format.format(createdAt); } else if (nowCal.get(Calendar.HOUR_OF_DAY) != createdAtCal.get(Calendar.HOUR_OF_DAY)) { return nowCal.get(Calendar.HOUR_OF_DAY) - createdAtCal.get(Calendar.HOUR_OF_DAY) + "h"; } else if (nowCal.get(Calendar.MINUTE) > createdAtCal.get(Calendar.MINUTE)) { return nowCal.get(Calendar.MINUTE) - createdAtCal.get(Calendar.MINUTE) + "m"; } else { String s = nowCal.get(Calendar.SECOND) - createdAtCal.get(Calendar.SECOND) + "s"; return s.equals("-1s") ? "0s" : s; } } private boolean isToday(Calendar cal1, Calendar cal2) { return cal1.get(Calendar.YEAR) == cal2.get(Calendar.YEAR) && cal1.get(Calendar.MONTH) == cal2.get(Calendar.MONTH) && cal1.get(Calendar.DAY_OF_MONTH) == cal2.get(Calendar.DAY_OF_MONTH); } public interface OnStatusUpdateListener { public void onStatusUpdate(); public void onFavorite(); public void onError(); } public interface StreamListener { public void onStatus(Status status); } public interface OnOAuthListener { public void gotOAuthAccessToken(User user); public void onError(); } }
package qaframework.rtv.tests; import org.openqa.selenium.WebDriver; import org.testng.Assert; import org.testng.annotations.Test; import qaframework.rtv.fw.ApplicationManager; public class VideoTests extends TestBase { char w = 0; @Test(testName = "RTV-11", description = "Check player and autostart") public void authorizationWithRememberCheckBox() throws Exception { WebDriver driver = null; app.getNavigationHelper().openMainPage(); AccountData account = new AccountData(); account.username = "test002"; account.password = "002test"; app.getAccountHelper().fillLoginForm(app, account); app.getNavigationHelper().clickButtonLogin(); //app.getNavigationHelper().getPlayerContainer(); app.getNavigationHelper().clickButtonExit(); } @Test(testName = "RTV-17", singleThreaded = true, description = "Check player and autostart") public void checkPlayerOnMainPage() throws Exception { ApplicationManager app2 = new ApplicationManager(); app2.getNavigationHelper().openMainPage(); Assert.assertTrue(app2.getVideoHelper().clickButtonPlay(), "Button start don't work, video isn't stated"); if (w < 30) while (!(app2.getVideoHelper().getDuration().equals("0:10")) && w < 30) { Thread.sleep(1000); w++; } else { app2.stop(); System.err.println("The bicycle has already stopped!"); } //TODO: add check video stream or url or screenshot Assert.assertTrue(app2.getVideoHelper().clickButtonPause(), "Button start don't work, video isn't stated"); Assert.assertEquals(app2.getVideoHelper().getDuration(), "0:10"); app2.stop(); //TODO: needed mouse move to iframe for open buttons panel } private void loginMainPageUserAdmin() { app.getNavigationHelper().openMainPage(); AccountData account = new AccountData(); account.username = "test002"; account.password = "002test"; app.getAccountHelper().fillLoginForm(app, account); app.getNavigationHelper().clickButtonLogin(); } }
package algorithms.imageProcessing; import algorithms.QuickSort; import algorithms.misc.MiscDebug; import algorithms.util.PairInt; import java.io.IOException; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; /** * class encapsulating the steps from scale calculation to matching corners * to make correspondence lists. * * @author nichole */ public class FeatureMatcherWrapper { private final ImageExt img1; private final ImageExt img2; private GreyscaleImage gsImg1 = null; private GreyscaleImage gsImg2 = null; private Set<CornerRegion> cornerRegions1 = null; private Set<CornerRegion> cornerRegions2 = null; private enum State { DID_APPLY_HIST_EQ, COULD_NOT_DETERMINE_SCALE } private Set<State> stateSet = new HashSet<State>(); private final boolean doDetermineScale; private final boolean debug; private final String debugTagPrefix; private TransformationParameters params = null; private float scaleTol = 0.2f; private float rotationInRadiansTol = (float)(20. * Math.PI/180.); //TODO: revise this... private int transXYTol = 20; private Logger log = Logger.getLogger(this.getClass().getName()); public FeatureMatcherWrapper(ImageExt image1, ImageExt image2) { img1 = image1; img2 = image2; doDetermineScale = true; debug = false; debugTagPrefix = ""; } public FeatureMatcherWrapper(ImageExt image1, ImageExt image2, String debugTagPrefix) { img1 = image1; img2 = image2; doDetermineScale = true; debug = true; this.debugTagPrefix = debugTagPrefix; } /** * constructor accepting transformation parameters. Note, for best results, * the standard deviations within parameters should be populated because they * are used as tolerances in matching. * @param image1 * @param image2 * @param parameters */ public FeatureMatcherWrapper(ImageExt image1, ImageExt image2, TransformationParameters parameters) { img1 = image1; img2 = image2; doDetermineScale = false; params = parameters; debug = false; debugTagPrefix = ""; } /** * constructor accepting transformation parameters and a debugging tag for * image names. Note, for best results, the standard deviations within * parameters should be populated because they are used as tolerances in * matching. * @param image1 * @param image2 * @param parameters * @param debugTagPrefix */ public FeatureMatcherWrapper(ImageExt image1, ImageExt image2, TransformationParameters parameters, String debugTagPrefix) { img1 = image1; img2 = image2; doDetermineScale = false; params = parameters; debug = true; this.debugTagPrefix = debugTagPrefix; } public CorrespondenceList matchFeatures() throws IOException, NoSuchAlgorithmException { /* options: (1) determine scale (a) match remaining points derived in scale calc. if resulting set spans the intersection, make and return the correspondence list else, follow (2) (2) given scale (b) extract corner regions from greyscale image (3) use feature matcher w/ scale to make the correspondence list */ CorrespondenceList cl = null; if (doDetermineScale) { cl = solveForScale(); return cl; } applyHistEqIfNeeded(); extractCornerRegions(); cl = extractAndMatch(params); if (debug && (cl != null)) { Collection<PairInt> m1 = cl.getPoints1(); Collection<PairInt> m2 = cl.getPoints2(); MiscDebug.plotCorners(gsImg1.copyImage(), m1, debugTagPrefix + "_1_matched", 2); MiscDebug.plotCorners(gsImg2.copyImage(), m2, debugTagPrefix + "_2_matched", 2); } return cl; } private CorrespondenceList solveForScale() throws IOException, NoSuchAlgorithmException { BlobScaleFinderWrapper scaleFinder = null; if (debug) { scaleFinder = new BlobScaleFinderWrapper(img1, img2, debugTagPrefix); } else { scaleFinder = new BlobScaleFinderWrapper(img1, img2); } /* TODO: NOTE: if extractAndMatch is needed below, and if polar ciexy was returned as the algorithm type here, consider using polar ciexy w/ k=8, 16 or 32 on the color image and extract corners from that result. reason being that at least one image set that is better solved w/ polar cie xy, the Venturi test images, has alot of texture in grass and ridgelines that is not present in the polar cie xy k=2 images... */ params = scaleFinder.calculateScale(); if (params == null) { stateSet.add(State.COULD_NOT_DETERMINE_SCALE); return null; } boolean didApplyHist = scaleFinder.img1Helper.didApplyHistEq(); this.gsImg1 = scaleFinder.img1Helper.getGreyscaleImage().copyImage(); this.gsImg2 = scaleFinder.img2Helper.getGreyscaleImage().copyImage(); if (didApplyHist) { stateSet.add(State.DID_APPLY_HIST_EQ); } List<FeatureComparisonStat> stats = null; CorrespondenceList cl = null; int tolXY; if (params.getStandardDeviations() != null) { tolXY = Math.round(Math.max(params.getStandardDeviations()[2], params.getStandardDeviations()[3])); if (tolXY < 3) { tolXY = 3; } } else { tolXY = 10; } // try to match the remaining points created in the scale finder if (scaleFinder.getAllCornerRegions1OfSolution() != null) { List<List<CornerRegion>> transformedFilteredC1 = new ArrayList<List<CornerRegion>>(); List<List<CornerRegion>> filteredC1 = new ArrayList<List<CornerRegion>>(); List<List<CornerRegion>> filteredC2 = new ArrayList<List<CornerRegion>>(); FeatureMatcher.filterForIntersection(params, tolXY, scaleFinder.getAllCornerRegions1OfSolution(), scaleFinder.getAllCornerRegions2OfSolution(), transformedFilteredC1, filteredC1, filteredC2); if (debug) { try { Collection<CornerRegion> set1 = new HashSet<CornerRegion>(); Collection<CornerRegion> set2 = new HashSet<CornerRegion>(); for (List<CornerRegion> list : filteredC1) { set1.addAll(list); } for (List<CornerRegion> list : filteredC2) { set2.addAll(list); } MiscDebug.writeImage(set1, gsImg1.copyToColorGreyscale(), debugTagPrefix + "_filtered_1_corners_"); MiscDebug.writeImage(set2, gsImg2.copyToColorGreyscale(), debugTagPrefix + "_filtered_2_corners_"); } catch (IOException ex) { Logger.getLogger(FeatureMatcherWrapper.class.getName()).log( Level.SEVERE, null, ex); } } stats = matchRemainingBlobCornerPoints(scaleFinder, transformedFilteredC1, filteredC1, filteredC2); if ((stats.size() >= 7) && statsCoverIntersection(stats, filteredC2)) { List<PairInt> matched1 = new ArrayList<PairInt>(); List<PairInt> matched2 = new ArrayList<PairInt>(); populateLists(stats, matched1, matched2); cl = new CorrespondenceList(params.getScale(), Math.round(params.getRotationInDegrees()), Math.round(params.getTranslationX()), Math.round(params.getTranslationY()), Math.round(params.getStandardDeviations()[0]), Math.round(params.getStandardDeviations()[2]), Math.round(params.getStandardDeviations()[3]), matched1, matched2); return cl; } cl = extractAndMatch(params); } else { // solve for contours List<List<CurvatureScaleSpaceContour>> transformedFilteredC1 = new ArrayList<List<CurvatureScaleSpaceContour>>(); List<List<CurvatureScaleSpaceContour>> filteredC1 = new ArrayList<List<CurvatureScaleSpaceContour>>(); List<List<CurvatureScaleSpaceContour>> filteredC2 = new ArrayList<List<CurvatureScaleSpaceContour>>(); FeatureMatcher.filterForIntersection2(params, tolXY, scaleFinder.getAllContours1OfSolution(), scaleFinder.getAllContours2OfSolution(), transformedFilteredC1, filteredC1, filteredC2); if (debug) { List<CurvatureScaleSpaceContour> set1 = new ArrayList<CurvatureScaleSpaceContour>(); List<CurvatureScaleSpaceContour> set2 = new ArrayList<CurvatureScaleSpaceContour>(); for (List<CurvatureScaleSpaceContour> list : filteredC1) { set1.addAll(list); } for (List<CurvatureScaleSpaceContour> list : filteredC2) { set2.addAll(list); } MiscDebug.debugPlot(set1, img1.copyToImageExt(), 0, 0, debugTagPrefix + "_filtered_1_corners_"); MiscDebug.debugPlot(set2, img2.copyToImageExt(), 0, 0, debugTagPrefix + "_filtered_2_corners_"); } stats = matchRemainingBlobContourPoints(scaleFinder, transformedFilteredC1, filteredC1, filteredC2); if ((stats.size() >= 7) && statsCoverIntersection2(stats, filteredC2)) { List<PairInt> matched1 = new ArrayList<PairInt>(); List<PairInt> matched2 = new ArrayList<PairInt>(); populateLists(stats, matched1, matched2); cl = new CorrespondenceList(params.getScale(), Math.round(params.getRotationInDegrees()), Math.round(params.getTranslationX()), Math.round(params.getTranslationY()), Math.round(params.getStandardDeviations()[0]), Math.round(params.getStandardDeviations()[2]), Math.round(params.getStandardDeviations()[3]), matched1, matched2); return cl; } cl = extractAndMatch(params); } if (cl != null) { return cl; } return cl; } private void applyHistEqIfNeeded() { if (stateSet.contains(State.DID_APPLY_HIST_EQ)) { return; } if (gsImg1 != null) { // gs images were set during scale calculation return; } this.gsImg1 = img1.copyToGreyscale(); this.gsImg2 = img2.copyToGreyscale(); ImageStatistics stats1 = ImageStatisticsHelper.examineImage(gsImg1, true); ImageStatistics stats2 = ImageStatisticsHelper.examineImage(gsImg2, true); boolean performHistEq = false; double median1DivMedian2 = stats1.getMedian()/stats2.getMedian(); double meanDivMedian1 = stats1.getMean()/stats1.getMedian(); double meanDivMedian2 = stats2.getMean()/stats2.getMedian(); if ( ((median1DivMedian2 > 1) && ((median1DivMedian2 - 1) > 0.2)) || ((median1DivMedian2 < 1) && (median1DivMedian2 < 0.8))) { performHistEq = true; } else if ( ((meanDivMedian1 > 1) && ((meanDivMedian1 - 1) > 0.2)) || ((meanDivMedian1 < 1) && (meanDivMedian1 < 0.8))) { performHistEq = true; } else if ( ((meanDivMedian2 > 1) && ((meanDivMedian2 - 1) > 0.2)) || ((meanDivMedian2 < 1) && (meanDivMedian2 < 0.8))) { performHistEq = true; } if (performHistEq) { log.info("use histogram equalization on the greyscale images"); HistogramEqualization hEq = new HistogramEqualization(gsImg1); hEq.applyFilter(); hEq = new HistogramEqualization(gsImg2); hEq.applyFilter(); stateSet.add(State.DID_APPLY_HIST_EQ); } } private void extractCornerRegions() { ImageProcessor imageProcessor = new ImageProcessor(); imageProcessor.blur(gsImg1, SIGMA.ONE); CurvatureScaleSpaceCornerDetector detector = new CurvatureScaleSpaceCornerDetector(gsImg1); detector.doNotPerformHistogramEqualization(); detector.findCorners(); //TODO: revisit to make sure coordinate systems are consistent: cornerRegions1 = detector.getEdgeCornerRegions(true); //cornerRegions1 = detector.getEdgeCornerRegionsInOriginalReferenceFrame(true); imageProcessor.blur(gsImg2, SIGMA.ONE); detector = new CurvatureScaleSpaceCornerDetector(gsImg2); detector.doNotPerformHistogramEqualization(); detector.findCorners(); cornerRegions2 = detector.getEdgeCornerRegions(true); //cornerRegions2 = detector.getEdgeCornerRegionsInOriginalReferenceFrame(true); if (debug) { try { MiscDebug.writeImage(cornerRegions1, img1.copyImage(), debugTagPrefix + "_1_corners_"); MiscDebug.writeImage(cornerRegions2, img2.copyImage(), debugTagPrefix + "_2_corners_"); } catch (IOException ex) { Logger.getLogger(FeatureMatcherWrapper.class.getName()).log(Level.SEVERE, null, ex); } } } private CorrespondenceList findCorrespondence(TransformationParameters parameters) { FeatureMatcher matcher = new FeatureMatcher(); int tolXY; if (params.getStandardDeviations() != null) { tolXY = Math.round(Math.max(params.getStandardDeviations()[2], params.getStandardDeviations()[3])); if (tolXY < 3) { tolXY = 3; } } else { tolXY = transXYTol; } int dither = 1; //TODO: revise this if (tolXY > 3) { dither = 4; } CorrespondenceList cl = matcher.findSimilarFeatures(gsImg1, cornerRegions1.toArray(new CornerRegion[cornerRegions1.size()]), gsImg2, cornerRegions2.toArray(new CornerRegion[cornerRegions2.size()]), parameters, scaleTol, rotationInRadiansTol, tolXY, dither); return cl; } private List<FeatureComparisonStat> matchRemainingBlobCornerPoints( BlobScaleFinderWrapper scaleFinder, List<List<CornerRegion>> filteredC1Transformed, List<List<CornerRegion>> filteredC1, List<List<CornerRegion>> filteredC2) { if (filteredC1Transformed.size() != filteredC1.size()) { throw new IllegalArgumentException("filteredC1Transformed and " + "filteredC1 are expected to be same size"); } // use the association w/ tranformed blobs to make the matching faster List<FeatureComparisonStat> compStats = new ArrayList<FeatureComparisonStat>( scaleFinder.getSolution().getComparisonStats()); /* choose the best for each '1' and if a high quality exists, store it for further quality check (theta and intensity) then add to compStats for transformedblob1 init storage for best match to blob1 for blob2 if centroid within tolerance, use features to match untransformed blob1 corners to blob2 corners (this is the curve matcher within corner matcher for combinations?) if results are high quality and better than best, assign it as best store best in map for blob1 remove outliers by theta and by ssd return combined results */ MiscellaneousCurveHelper curveHelper = new MiscellaneousCurveHelper(); Map<Integer, IntensityFeatureComparisonStats> index1Map = new HashMap<Integer, IntensityFeatureComparisonStats>(); Map<Integer, Set<Integer>> assignedIndex2 = new HashMap<Integer, Set<Integer>>(); Set<Integer> redo = new HashSet<Integer>(); int tolXY; if (params.getStandardDeviations() != null) { tolXY = Math.round(Math.max(params.getStandardDeviations()[2], params.getStandardDeviations()[3])); if (tolXY < 3) { tolXY = 3; } else { tolXY += 2; } } else { tolXY = 10; } for (int i1 = 0; i1 < filteredC1Transformed.size(); ++i1) { List<CornerRegion> trC1List = filteredC1Transformed.get(i1); if (trC1List.isEmpty()) { continue; } List<CornerRegion> c1List = filteredC1.get(i1); double[] xyCen1 = curveHelper.calculateXYCentroids0(trC1List); IntensityFeatureComparisonStats best = null; for (int i2 = 0; i2 < filteredC2.size(); ++i2) { List<CornerRegion> c2List = filteredC2.get(i2); if (c2List.isEmpty()) { continue; } double[] xyCen2 = curveHelper.calculateXYCentroids0(c2List); double diffX = Math.abs(xyCen1[0] - xyCen2[0]); double diffY = Math.abs(xyCen1[1] - xyCen2[1]); if ((diffX > tolXY) || (diffY > tolXY)) { continue; } ClosedCurveCornerMatcherWrapper mapper = new ClosedCurveCornerMatcherWrapper(); boolean matched = mapper.matchCorners( scaleFinder.getSolutionFeatures1(), scaleFinder.getSolutionFeatures2(), c1List, c2List, true, gsImg1, gsImg2); if (!matched) { continue; } //TODO: this should to be revised to scale w/ errors if (mapper.getSolvedCost() > 800) { continue; } TransformationPair2 transformationPair = mapper.getSolution(); transformationPair.setCornerListIndex1(i1); transformationPair.setCornerListIndex2(i2); TransformationParameters params2 = transformationPair.getTransformationParameters(); if (params2 == null) { continue; } List<FeatureComparisonStat> compStats2 = transformationPair.getNextCorner().getMatchedFeatureComparisonStats(); FeatureMatcher.removeDiscrepantThetaDiff(compStats2, params.getRotationInDegrees()); if (compStats2.size() < 2) { continue; } /* int nm = compStats2.size(); int x1 = compStats2.get(0).getImg1Point().getX(); int y1 = compStats2.get(0).getImg1Point().getY(); int x2 = compStats2.get(0).getImg2Point().getX(); int y2 = compStats2.get(0).getImg2Point().getY(); */ IntensityFeatureComparisonStats stats2 = new IntensityFeatureComparisonStats(i1, i2, mapper.getSolvedCost(), params.getScale()); stats2.addAll(compStats2); int comp = -1; if (best != null) { comp = stats2.compareTo(best); } if (comp == -1) { best = stats2; } } if (best != null) { index1Map.put(Integer.valueOf(i1), best); Set<Integer> a1 = assignedIndex2.get(Integer.valueOf(best.getIndex2())); if (a1 != null) { redo.add(Integer.valueOf(i1)); for (Integer index1 : a1) { redo.add(index1); } } else { a1 = new HashSet<Integer>(); assignedIndex2.put(Integer.valueOf(best.getIndex2()), a1); } a1.add(Integer.valueOf(best.getIndex1())); } } if (!redo.isEmpty()) { //TODO: consider using all points except conflicted indexes to // determine transformation params and then choose among conflict // those with closer match to expected transformed coordinates. // problem with this instead of SSD is it would perform worse for // projection. Set<Integer> resolved1 = new HashSet<Integer>(); for (Integer redoIndex1 : redo) { if (resolved1.contains(redoIndex1)) { continue; } Integer conflictIndex2 = null; for (Entry<Integer, Set<Integer>> entry : assignedIndex2.entrySet()) { Set<Integer> indexes1 = entry.getValue(); if (indexes1.contains(redoIndex1)) { conflictIndex2 = entry.getKey(); break; } } Set<Integer> conflictIndexes1 = assignedIndex2.get(conflictIndex2); //decide by SSD or by difference from transformed point 1's assert(conflictIndexes1 != null); double bestCost = Double.MAX_VALUE; Integer bestCostIndex1 = null; for (Integer index1 : conflictIndexes1) { IntensityFeatureComparisonStats st = index1Map.get(index1); if ((bestCostIndex1 == null) || (bestCost > st.getAdjustedCost())) { bestCost = st.getAdjustedCost(); bestCostIndex1 = index1; } resolved1.add(index1); } for (Integer index1 : conflictIndexes1) { if (index1.equals(bestCostIndex1)) { continue; } index1Map.remove(index1); } } } List<FeatureComparisonStat> add = new ArrayList<FeatureComparisonStat>(); for (Entry<Integer, IntensityFeatureComparisonStats> entry : index1Map.entrySet()) { // make sure not already in compStats for (FeatureComparisonStat stat : entry.getValue().getComparisonStats()) { PairInt p1 = stat.getImg1Point(); PairInt p2 = stat.getImg2Point(); for (FeatureComparisonStat cStat : compStats) { boolean found = false; PairInt p1c = cStat.getImg1Point(); PairInt p2c = cStat.getImg2Point(); if (p1c.equals(p1) || p2c.equals(p2)) { found = true; break; } int diffX1 = Math.abs(p1c.getX() - p1.getX()); int diffY1 = Math.abs(p1c.getY() - p1.getY()); if ((diffX1 < 5) && (diffY1 < 5)) { found = true; break; } int diffX2 = Math.abs(p2c.getX() - p2.getX()); int diffY2 = Math.abs(p2c.getY() - p2.getY()); if ((diffX2 < 5) && (diffY2 < 5)) { found = true; break; } if (!found) { add.add(stat); } } } } compStats.addAll(add); return compStats; } private List<FeatureComparisonStat> matchRemainingBlobContourPoints( BlobScaleFinderWrapper scaleFinder, List<List<CurvatureScaleSpaceContour>> transformedFilteredC1, List<List<CurvatureScaleSpaceContour>> filteredC1, List<List<CurvatureScaleSpaceContour>> filteredC2) { throw new UnsupportedOperationException("Not supported yet."); } private boolean statsCoverIntersection2(List<FeatureComparisonStat> stats, List<List<CurvatureScaleSpaceContour>> filteredC2) { /* dividing the range in filteredC2 by 2 in x and 2 in y and returning true if at least one point2 in stats is found in each division. */ int n = 0; for (List<CurvatureScaleSpaceContour> list : filteredC2) { n += list.size(); } float[] xPoints = new float[n]; float[] yPoints = new float[n]; n = 0; for (List<CurvatureScaleSpaceContour> list : filteredC2) { for (CurvatureScaleSpaceContour cr : list) { float x = cr.getPeakDetails()[0].getXCoord(); float y = cr.getPeakDetails()[0].getYCoord(); xPoints[n] = x; yPoints[n] = y; ++n; } } return statsCoverIntersection(stats, xPoints, yPoints); } private boolean statsCoverIntersection(List<FeatureComparisonStat> stats, List<List<CornerRegion>> filteredC2) { /* dividing the range in filteredC2 by 2 in x and 2 in y and returning true if at least one point2 in stats is found in each division. */ int n = 0; for (List<CornerRegion> list : filteredC2) { n += list.size(); } float[] xPoints = new float[n]; float[] yPoints = new float[n]; n = 0; for (List<CornerRegion> list : filteredC2) { for (CornerRegion cr : list) { float x = cr.getX()[cr.getKMaxIdx()]; float y = cr.getY()[cr.getKMaxIdx()]; xPoints[n] = x; yPoints[n] = y; ++n; } } return statsCoverIntersection(stats, xPoints, yPoints); } private boolean statsCoverIntersection(List<FeatureComparisonStat> stats, final float[] xPoints, final float[] yPoints) { int n = xPoints.length; QuickSort.sortBy1stThen2nd(xPoints, yPoints); float minX = xPoints[0]; float maxX = xPoints[n - 1]; float divX = (maxX + minX)/2.f; /* Finding y min and max within each of these division. The reason for doing this separately from y min max over all of filteredC2 is that the geometry of matchable points might not be rectangular. | | | | | | | | | 0 1 */ float[] yMin = new float[2]; Arrays.fill(yMin, Float.MAX_VALUE); float[] yMax = new float[2]; Arrays.fill(yMax, Float.MIN_VALUE); for (int i = 0; i < xPoints.length; ++i) { float x = xPoints[i]; float y = yPoints[i]; int cIdx = 1; if (x < divX) { cIdx = 0; } if (y < yMin[cIdx]) { yMin[cIdx] = y; } if (y > yMax[cIdx]) { yMax[cIdx] = y; } } float yDiv12 = (yMax[0] + yMin[0])/2.f; float yDiv03 = (yMax[1] + yMin[1])/2.f; /* 2 3 | | | 1 0 */ int[] counts = new int[4]; for (FeatureComparisonStat stat : stats) { PairInt p2 = stat.getImg2Point(); int x = p2.getX(); int y = p2.getY(); if (x < divX) { if (y < yDiv12) { counts[1]++; } else { counts[2]++; } } else { if (y < yDiv03) { counts[0]++; } else { counts[3]++; } } } int nq = 0; for (int i = 0; i < counts.length; ++i) { if (counts[i] > 0) { nq++; } } // check that there is at least 1 in each quadrant if (nq >= 3) { //if (nq == 4) { return true; } return false; } private void populateLists(List<FeatureComparisonStat> stats, List<PairInt> matched1, List<PairInt> matched2) { for (FeatureComparisonStat stat : stats) { int x1 = stat.getImg1Point().getX(); int y1 = stat.getImg1Point().getY(); int x2 = stat.getImg2Point().getX(); int y2 = stat.getImg2Point().getY(); matched1.add(new PairInt(x1, y1)); matched2.add(new PairInt(x2, y2)); } } private CorrespondenceList extractAndMatch( TransformationParameters parameters) { extractCornerRegions(); CorrespondenceList cl = findCorrespondence(parameters); if (cl != null) { // add stats in if not already present int z = 1; } return cl; } }
package ru.direc.sand; import org.testng.Assert; import org.testng.annotations.Test; public class PrimeTests { @Test public void testPrime() { Assert.assertTrue(Primes.isPrime(Integer.MAX_VALUE)); } @Test public void testPrimeFast() { Assert.assertTrue(Primes.isPrimeFast(Integer.MAX_VALUE)); } @Test (enabled = false) public void testPrimeLong() { long n = Integer.MAX_VALUE; Assert.assertTrue(Primes.isPrime(n)); } @Test public void testNotPrime() { Assert.assertFalse(Primes.isPrime(Integer.MAX_VALUE-2)); } }
package ch.unizh.ini.jaer.projects.minliu; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Iterator; import java.util.Observable; import java.util.Observer; import com.jogamp.opengl.util.awt.TextRenderer; import ch.unizh.ini.jaer.projects.rbodo.opticalflow.AbstractMotionFlow; import com.jogamp.opengl.GL; import com.jogamp.opengl.GL2; import com.jogamp.opengl.GLAutoDrawable; import eu.seebetter.ini.chips.davis.imu.IMUSample; import java.awt.Color; import java.util.logging.Level; import net.sf.jaer.Description; import net.sf.jaer.DevelopmentStatus; import net.sf.jaer.chip.AEChip; import net.sf.jaer.event.ApsDvsEvent; import net.sf.jaer.event.ApsDvsEventPacket; import net.sf.jaer.event.EventPacket; import net.sf.jaer.eventprocessing.FilterChain; import net.sf.jaer.eventprocessing.filter.Steadicam; import net.sf.jaer.graphics.FrameAnnotater; /** * Uses patch matching to measureTT local optical flow. <b>Not</b> gradient * based, but rather matches local features backwards in time. * * @author Tobi and Min, Jan 2016 */ @Description("Computes optical flow with vector direction using binary block matching") @DevelopmentStatus(DevelopmentStatus.Status.Experimental) public class PatchMatchFlow extends AbstractMotionFlow implements Observer, FrameAnnotater { /* LDSP is Large Diamond Search Pattern, and SDSP mens Small Diamond Search Pattern. LDSP has 9 points and SDSP consists of 5 points. */ private static final int LDSP[][] = {{0, -2}, {-1, -1}, {1, -1}, {-2, 0}, {0, 0}, {2, 0}, {-1, 1}, {1, 1}, {0, 2}}; private static final int SDSP[][] = {{0, -1}, {-1, 0}, {0, 0}, {1, 0}, {0, 1}}; private int[][][] histograms = null; private static final int NUM_SLICES = 3; // private int sx, sy; private int tMinus2SliceIdx = 0, tMinus1SliceIdx = 1, currentSliceIdx = 2; private int[][] currentSlice = null, tMinus1Slice = null, tMinus2Slice = null; // private ArrayList<Integer[]>[][] spikeTrains = null; // Spike trains for one block // private ArrayList<int[][]>[] histogramsAL = null; // private ArrayList<int[][]> currentAL = null, previousAL = null, previousMinus1AL = null; // One is for current, the second is for previous, the third is for the one before previous one private BitSet[] histogramsBitSet = null; private BitSet currentSli = null, tMinus1Sli = null, tMinus2Sli = null; private final SADResult tmpSadResult = new SADResult(0, 0, 0); // used to pass data back from min distance computation private int patchDimension = getInt("patchDimension", 9); // private int eventPatchDimension = getInt("eventPatchDimension", 3); // private int forwardEventNum = getInt("forwardEventNum", 10); private float cost = getFloat("cost", 0.001f); private float confidenceThreshold = getFloat("confidenceThreshold", 0f); private float validPixOccupancy = getFloat("validPixOccupancy", 0.01f); // threshold for valid pixel percent for one block private float weightDistance = getFloat("weightDistance", 0.9f); // confidence value consists of the distance and the dispersion, this value set the distance value // private int thresholdTime = getInt("thresholdTime", 1000000); // private int[][] lastFireIndex = null; // Events are numbered in time order for every block. This variable is for storing the last event index fired on all blocks. // private int[][] eventSeqStartTs = null; // private boolean preProcessEnable = false; private int skipProcessingEventsCount = getInt("skipProcessingEventsCount", 0); // skip this many events for processing (but not for accumulating to bitmaps) private int skipCounter = 0; private boolean adaptiveEventSkipping = getBoolean("adaptiveEventSkipping", false); private boolean outputSearchErrorInfo = false; // make user choose this slow down every time // results histogram for each packet private int[][] resultHistogram = null; private float FSCnt = 0, DSCorrectCnt = 0; float DSAverageNum = 0, DSAveError[] = {0, 0}; // Evaluate DS cost average number and the error. public enum PatchCompareMethod { JaccardDistance, HammingDistance, SAD/*, EventSqeDistance*/ }; private PatchCompareMethod patchCompareMethod = PatchCompareMethod.valueOf(getString("patchCompareMethod", PatchCompareMethod.HammingDistance.toString())); public enum SearchMethod { FullSearch, DiamondSearch, CrossDiamondSearch }; private SearchMethod searchMethod = SearchMethod.valueOf(getString("searchMethod", SearchMethod.FullSearch.toString())); private int sliceDurationUs = getInt("sliceDurationUs", 100000); private int sliceEventCount = getInt("sliceEventCount", 1000); private boolean rewindFlg = false; // The flag to indicate the rewind event. private FilterChain filterChain; private Steadicam cameraMotion; // calibration private boolean calibrating = false; // used to flag calibration state private int calibrationSampleCount = 0; private int NUM_CALIBRATION_SAMPLES_DEFAULT = 800; // 400 samples /sec protected int numCalibrationSamples = getInt("numCalibrationSamples", NUM_CALIBRATION_SAMPLES_DEFAULT); TextRenderer imuTextRenderer = null; private boolean showGrid = getBoolean("showGrid", true); private boolean displayResultHistogram = getBoolean("displayResultHistogram", true); public enum SliceMethod { ConstantDuration, ConstantEventNumber, AdaptationDuration }; private SliceMethod sliceMethod = SliceMethod.valueOf(getString("sliceMethod", SliceMethod.ConstantDuration.toString())); private int eventCounter = 0; private int sliceLastTs = 0; public PatchMatchFlow(AEChip chip) { super(chip); filterChain = new FilterChain(chip); cameraMotion = new Steadicam(chip); cameraMotion.setFilterEnabled(true); cameraMotion.setDisableRotation(true); cameraMotion.setDisableTranslation(true); // filterChain.add(cameraMotion); setEnclosedFilterChain(filterChain); String patchTT = "Block matching"; String eventSqeMatching = "Event squence matching"; String preProcess = "Denoise"; String metricConfid = "Confidence of current metric"; chip.addObserver(this); // to allocate memory once chip size is known // setPropertyTooltip(preProcess, "preProcessEnable", "enable this to denoise before data processing"); // setPropertyTooltip(preProcess, "forwardEventNum", "Number of events have fired on the current block since last processing"); setPropertyTooltip(metricConfid, "confidenceThreshold", "<html>Confidence threshold for rejecting unresonable value; Range from 0 to 1. <p>Higher value means it is harder to accept the event. <br>Set to 0 to accept all results."); setPropertyTooltip(metricConfid, "validPixOccupancy", "<html>Threshold for valid pixel percent for each block; Range from 0 to 1. <p>If either matching block is less occupied than this fraction, no motion vector will be calculated."); setPropertyTooltip(metricConfid, "weightDistance", "<html>The confidence value consists of the distance and the dispersion; <br>weightDistance sets the weighting of the distance value compared with the dispersion value; Range from 0 to 1. <p>To count only e.g. hamming distance, set weighting to 1. <p> To count only dispersion, set to 0."); setPropertyTooltip(patchTT, "patchDimension", "linear dimenion of patches to match, in pixels"); setPropertyTooltip(patchTT, "searchDistance", "search distance for matching patches, in pixels"); setPropertyTooltip(patchTT, "patchCompareMethod", "method to compare two patches"); setPropertyTooltip(patchTT, "searchMethod", "method to search patches"); setPropertyTooltip(patchTT, "sliceDurationUs", "duration of bitmaps in us, also called sample interval, when ConstantDuration method is used"); setPropertyTooltip(patchTT, "sliceEventCount", "number of events collected to fill a slice, when ConstantEventNumber method is used"); setPropertyTooltip(patchTT, "sliceMethod", "set method for determining time slice duration for block matching"); setPropertyTooltip(patchTT, "skipProcessingEventsCount", "skip this many events for processing (but not for accumulating to bitmaps)"); setPropertyTooltip(patchTT, "adaptiveEventSkipping", "enables adaptive event skipping depending on free time left in AEViewer animation loop"); setPropertyTooltip(patchTT, "outputSearchErrorInfo", "enables displaying the search method error information"); // setPropertyTooltip(eventSqeMatching, "cost", "The cost to translation one event to the other position"); // setPropertyTooltip(eventSqeMatching, "thresholdTime", "The threshold value of interval time between the first event and the last event"); // setPropertyTooltip(eventSqeMatching, "sliceEventCount", "number of collected events in each bitmap"); // setPropertyTooltip(eventSqeMatching, "eventPatchDimension", "linear dimenion of patches to match, in pixels"); setPropertyTooltip(dispTT, "displayOutputVectors", "display the output motion vectors or not"); setPropertyTooltip(dispTT, "displayResultHistogram", "display the output motion vectors histogram to show disribution of results for each packet. Only implemented for HammingDistance"); } @Override synchronized public EventPacket filterPacket(EventPacket in) { setupFilter(in); checkArrays(); adaptEventSkipping(); if (resultHistogram == null || resultHistogram.length != 2 * searchDistance + 1) { int dim = 2 * searchDistance + 1; // e.g. search distance 1, dim=3, 3x3 possibilties (including zero motion) resultHistogram = new int[dim][dim]; } else { for (int[] h : resultHistogram) { Arrays.fill(h, 0); } } ApsDvsEventPacket in2 = (ApsDvsEventPacket) in; Iterator itr = in2.fullIterator(); // Wfffsfe also need IMU data, so here we use the full iterator. while (itr.hasNext()) { Object ein = itr.next(); if (ein == null) { log.warning("null event passed in, returning input packet"); return in; } if (!extractEventInfo(ein)) { continue; } ApsDvsEvent apsDvsEvent = (ApsDvsEvent) ein; if (apsDvsEvent.isImuSample()) { IMUSample s = apsDvsEvent.getImuSample(); continue; } if (apsDvsEvent.isApsData()) { continue; } // inItr = in.inputIterator; if (measureAccuracy || discardOutliersForStatisticalMeasurementEnabled) { imuFlowEstimator.calculateImuFlow((ApsDvsEvent) inItr.next()); setGroundTruth(); } if (xyFilter()) { continue; } countIn++; // compute flow SADResult result = null; // int blockLocX = x / eventPatchDimension; // int blockLocY = y / eventPatchDimension; // Build the spike trains of every block, every block is consist of 3*3 pixels. // if (spikeTrains[blockLocX][blockLocY] == null) { // spikeTrains[blockLocX][blockLocY] = new ArrayList(); // int spikeBlokcLength = spikeTrains[blockLocX][blockLocY].size(); // int previousTsInterval = 0; // if (spikeBlokcLength == 0) { // previousTsInterval = ts; // } else { // previousTsInterval = ts - spikeTrains[blockLocX][blockLocY].get(spikeBlokcLength - 1)[0]; // if (preProcessEnable || patchCompareMethod == PatchCompareMethod.EventSqeDistance) { // spikeTrains[blockLocX][blockLocY].add(new Integer[]{ts, type}); switch (patchCompareMethod) { case HammingDistance: maybeRotateSlices(); if (!accumulateEvent(in)) { break; } // if (preProcessEnable) { // // There are enough events fire on the specific block now. // if ((spikeTrains[blockLocX][blockLocY].size() - lastFireIndex[blockLocX][blockLocY]) >= forwardEventNum) { // lastFireIndex[blockLocX][blockLocY] = spikeTrains[blockLocX][blockLocY].size() - 1; // result = minHammingDistance(x, y, tMinus2Sli, tMinus1Sli); // result.dx = (result.dx / sliceDurationUs) * 1000000; // result.dy = (result.dy / sliceDurationUs) * 1000000; // } else { result = minHammingDistance(x, y, tMinus2Sli, tMinus1Sli); result.dx = (result.dx / sliceDurationUs) * 1000000; // hack, convert to pix/second result.dy = (result.dy / sliceDurationUs) * 1000000; break; case SAD: maybeRotateSlices(); if (!accumulateEvent(in)) { break; } // if (preProcessEnable) { // // There're enough events fire on the specific block now // if ((spikeTrains[blockLocX][blockLocY].size() - lastFireIndex[blockLocX][blockLocY]) >= forwardEventNum) { // lastFireIndex[blockLocX][blockLocY] = spikeTrains[blockLocX][blockLocY].size() - 1; // result = minSad(x, y, tMinus2Sli, tMinus1Sli); // result.dx = (result.dx / sliceDurationUs) * 1000000; // result.dy = (result.dy / sliceDurationUs) * 1000000; // } else { result = minSad(x, y, tMinus2Sli, tMinus1Sli); result.dx = (result.dx / sliceDurationUs) * 1000000; result.dy = (result.dy / sliceDurationUs) * 1000000; break; case JaccardDistance: maybeRotateSlices(); if (!accumulateEvent(in)) { break; } // if (preProcessEnable) { // // There're enough events fire on the specific block now // if ((spikeTrains[blockLocX][blockLocY].size() - lastFireIndex[blockLocX][blockLocY]) >= forwardEventNum) { // lastFireIndex[blockLocX][blockLocY] = spikeTrains[blockLocX][blockLocY].size() - 1; // result = minJaccardDistance(x, y, tMinus2Sli, tMinus1Sli); // result.dx = (result.dx / sliceDurationUs) * 1000000; // result.dy = (result.dy / sliceDurationUs) * 1000000; // } else { result = minJaccardDistance(x, y, tMinus2Sli, tMinus1Sli); result.dx = (result.dx / sliceDurationUs) * 1000000; result.dy = (result.dy / sliceDurationUs) * 1000000; break; // case EventSqeDistance: // if (previousTsInterval < 0) { // spikeTrains[blockLocX][blockLocY].remove(spikeTrains[blockLocX][blockLocY].size() - 1); // continue; // if (previousTsInterval >= thresholdTime) { // float maxDt = 0; // float[][] dataPoint = new float[9][2]; // if ((blockLocX >= 1) && (blockLocY >= 1) && (blockLocX <= 238) && (blockLocY <= 178)) { // for (int ii = -1; ii < 2; ii++) { // for (int jj = -1; jj < 2; jj++) { // float dt = ts - eventSeqStartTs[blockLocX + ii][blockLocY + jj]; // // Remove the seq1 itself // if ((0 == ii) && (0 == jj)) { // // continue; // dt = 0; // dataPoint[((ii + 1) * 3) + (jj + 1)][0] = dt; // if (dt > maxDt) { // // result = minVicPurDistance(blockLocX, blockLocY); // eventSeqStartTs[blockLocX][blockLocY] = ts; // boolean allZeroFlg = true; // for (int mm = 0; mm < 9; mm++) { // for (int nn = 0; nn < 1; nn++) { // if (dataPoint[mm][nn] != 0) { // allZeroFlg = false; // if (allZeroFlg) { // continue; // KMeans cluster = new KMeans(); // cluster.setData(dataPoint); // int[] initialValue = new int[3]; // initialValue[0] = 0; // initialValue[1] = 4; // initialValue[2] = 8; // cluster.setInitialByUser(initialValue); // cluster.cluster(); // ArrayList<ArrayList<Integer>> kmeansResult = cluster.getResult(); // float[][] classData = cluster.getClassData(); // int firstClusterIdx = -1, secondClusterIdx = -1, thirdClusterIdx = -1; // for (int i = 0; i < 3; i++) { // if (kmeansResult.get(i).contains(0)) { // firstClusterIdx = i; // if (kmeansResult.get(i).contains(4)) { // secondClusterIdx = i; // if (kmeansResult.get(i).contains(8)) { // thirdClusterIdx = i; // if ((kmeansResult.get(firstClusterIdx).size() == 3) // && (kmeansResult.get(firstClusterIdx).size() == 3) // && (kmeansResult.get(firstClusterIdx).size() == 3) // && kmeansResult.get(firstClusterIdx).contains(1) // && kmeansResult.get(firstClusterIdx).contains(2)) { // result.dx = (-1 / (classData[secondClusterIdx][0] - classData[firstClusterIdx][0])) * 1000000 * 0.2f * eventPatchDimension;; // result.dy = 0; // if ((kmeansResult.get(firstClusterIdx).size() == 3) // && (kmeansResult.get(firstClusterIdx).size() == 3) // && (kmeansResult.get(firstClusterIdx).size() == 3) // && kmeansResult.get(thirdClusterIdx).contains(2) // && kmeansResult.get(thirdClusterIdx).contains(5)) { // result.dy = (-1 / (classData[thirdClusterIdx][0] - classData[secondClusterIdx][0])) * 1000000 * 0.2f * eventPatchDimension;; // result.dx = 0; // break; } if (result == null) { continue; // maybe some property change caused this } vx = result.dx; vy = result.dy; v = (float) Math.sqrt((vx * vx) + (vy * vy)); // reject values that are unreasonable if (isNotSufficientlyAccurate(result)) { continue; } if (resultHistogram != null) { resultHistogram[result.xidx][result.yidx]++; } processGoodEvent(); } if (rewindFlg) { rewindFlg = false; sliceLastTs = 0; // final int sx = chip.getSizeX(), sy = chip.getSizeY(); // for (int i = 0; i < sx; i++) { // for (int j = 0; j < sy; j++) { // if (spikeTrains != null && spikeTrains[i][j] != null) { // spikeTrains[i][j] = null; // if (lastFireIndex != null) { // lastFireIndex[i][j] = 0; // eventSeqStartTs[i][j] = 0; } motionFlowStatistics.updatePacket(countIn, countOut); return isDisplayRawInput() ? in : dirPacket; } @Override public void annotate(GLAutoDrawable drawable) { super.annotate(drawable); if (displayResultHistogram && resultHistogram != null) { GL2 gl = drawable.getGL().getGL2(); // draw histogram as shaded in 2d hist above color wheel // normalize hist int max = 0; for (int[] h : resultHistogram) { for (int v : h) { if (v > max) { max = v; } } } if (max == 0) { return; } final float maxRecip = 1f / max; int dim = resultHistogram.length; float s = 8; // chip pixels/bin gl.glPushMatrix(); final float scale = 30 / (2 * searchDistance + 1); gl.glTranslatef(-35, .65f * chip.getSizeY(), 0); gl.glScalef(scale, scale, 1); gl.glColor3f(0, 0, 1); gl.glLineWidth(2f); gl.glBegin(GL.GL_LINE_LOOP); gl.glVertex2f(0, 0); gl.glVertex2f(dim, 0); gl.glVertex2f(dim, dim); gl.glVertex2f(0, dim); gl.glEnd(); for (int x = 0; x < dim; x++) { for (int y = 0; y < dim; y++) { float g = maxRecip * resultHistogram[x][y]; gl.glColor3f(g, g, g); gl.glBegin(GL2.GL_QUADS); gl.glVertex2f(x, y); gl.glVertex2f(x + 1, y); gl.glVertex2f(x + 1, y + 1); gl.glVertex2f(x, y + 1); gl.glEnd(); } } gl.glPopMatrix(); } } @Override public synchronized void resetFilter() { super.resetFilter(); eventCounter = 0; lastTs = Integer.MIN_VALUE; if ((histograms == null) || (histograms.length != subSizeX) || (histograms[0].length != subSizeY)) { if ((NUM_SLICES == 0) && (subSizeX == 0) && (subSizeX == 0)) { return; } histograms = new int[NUM_SLICES][subSizeX][subSizeY]; } for (int[][] a : histograms) { for (int[] b : a) { Arrays.fill(b, 0); } } if (histogramsBitSet == null) { histogramsBitSet = new BitSet[NUM_SLICES]; } for (int ii = 0; ii < NUM_SLICES; ii++) { histogramsBitSet[ii] = new BitSet(subSizeX * subSizeY); } // // Initialize 3 ArrayList's histogram, every pixel has three patches: current, previous and previous-1 // if (histogramsAL == null) { // histogramsAL = new ArrayList[3]; // if ((spikeTrains == null) & (subSizeX != 0) & (subSizeY != 0)) { // spikeTrains = new ArrayList[subSizeX][subSizeY]; // if (patchDimension != 0) { // int colPatchCnt = subSizeX / patchDimension; // int rowPatchCnt = subSizeY / patchDimension; //// for (int ii = 0; ii < NUM_SLICES; ii++) { //// histogramsAL[ii] = new ArrayList(); //// for (int jj = 0; jj < (colPatchCnt * rowPatchCnt); jj++) { //// int[][] patch = new int[patchDimension][patchDimension]; //// histogramsAL[ii].add(patch); tMinus2SliceIdx = 0; tMinus1SliceIdx = 1; currentSliceIdx = 2; assignSliceReferences(); sliceLastTs = 0; rewindFlg = true; } @Override public void update(Observable o, Object arg) { if (!isFilterEnabled()) { return; } super.update(o, arg); if ((o instanceof AEChip) && (chip.getNumPixels() > 0)) { resetFilter(); } } /** * uses the current event to maybe rotate the slices */ private void maybeRotateSlices() { int dt = ts - sliceLastTs; switch (sliceMethod) { case ConstantDuration: if (rewindFlg) { return; } if ((dt < sliceDurationUs) || (dt < 0)) { return; } break; case ConstantEventNumber: if (eventCounter++ < sliceEventCount) { return; } case AdaptationDuration: log.warning("The adaptation method is not supported yet."); return; } /* The index cycle is " current idx -> t1 idx -> t2 idx -> current idx". Change the index, the change should like this: next t2 = previous t1 = histogram(previous t2 idx + 1); next t1 = previous current = histogram(previous t1 idx + 1); */ currentSliceIdx = (currentSliceIdx + 1) % NUM_SLICES; tMinus1SliceIdx = (tMinus1SliceIdx + 1) % NUM_SLICES; tMinus2SliceIdx = (tMinus2SliceIdx + 1) % NUM_SLICES; sliceEventCount = 0; sliceLastTs = ts; assignSliceReferences(); } private int updateAdaptDuration() { return 1000; } private void assignSliceReferences() { currentSlice = histograms[currentSliceIdx]; tMinus1Slice = histograms[tMinus1SliceIdx]; tMinus2Slice = histograms[tMinus2SliceIdx]; currentSli = histogramsBitSet[currentSliceIdx]; tMinus1Sli = histogramsBitSet[tMinus1SliceIdx]; tMinus2Sli = histogramsBitSet[tMinus2SliceIdx]; currentSli.clear(); } /** * Accumulates the current event to the current slice * * @return true if subsequent processing should done, false if it should be * skipped for efficiency */ private boolean accumulateEvent(EventPacket in) { currentSlice[x][y] += e.getPolaritySignum(); currentSli.set((x + 1) + (y * subSizeX)); // All evnets wheather 0 or 1 will be set in the BitSet Slice. if (in.isTimedOut()) { return false; } if (skipProcessingEventsCount == 0) { return true; } if (skipCounter++ < skipProcessingEventsCount) { return false; } skipCounter = 0; return true; } private void clearSlice(int idx) { for (int[] a : histograms[idx]) { Arrays.fill(a, 0); } } /** * Computes hamming eight around point x,y using patchDimension and * searchDistance * * @param x coordinate in subsampled space * @param y * @param prevSlice * @param curSlice * @return SADResult that provides the shift and SAD value */ private SADResult minHammingDistance(int x, int y, BitSet prevSlice, BitSet curSlice) { float minSum = Integer.MAX_VALUE, minSum1 = Integer.MAX_VALUE, sum = 0; float FSDx = 0, FSDy = 0, DSDx = 0, DSDy = 0; // This is for testing the DS search accuracy. int searchRange = 2 * searchDistance + 1; // The maxium search index, for xidx and yidx. float sumArray1[][] = new float[2 * searchDistance + 1][2 * searchDistance + 1]; for (float[] row : sumArray1) { Arrays.fill(row, Integer.MAX_VALUE); } if (outputSearchErrorInfo) { searchMethod = SearchMethod.FullSearch; } else { searchMethod = getSearchMethod(); } switch (searchMethod) { case FullSearch: for (int dx = -searchDistance; dx <= searchDistance; dx++) { for (int dy = -searchDistance; dy <= searchDistance; dy++) { sum = hammingDistance(x, y, dx, dy, prevSlice, curSlice); sumArray1[dx + searchDistance][dy + searchDistance] = sum; if (sum <= minSum1) { minSum1 = sum; tmpSadResult.dx = dx; tmpSadResult.dy = dy; tmpSadResult.sadValue = minSum1; } } } if (outputSearchErrorInfo) { FSCnt += 1; FSDx = tmpSadResult.dx; FSDy = tmpSadResult.dy; } else { break; } case DiamondSearch: /* The center of the LDSP or SDSP could change in the iteration process, so we need to use a variable to represent it. In the first interation, it's the Zero Motion Potion (ZMP). */ int xCenter = x, yCenter = y; /* x offset of center point relative to ZMP, y offset of center point to ZMP. x offset of center pointin positive number to ZMP, y offset of center point in positive number to ZMP. */ int dx, dy, xidx, yidx; int minPointIdx = 0; // Store the minimum point index. boolean SDSPFlg = false; // If this flag is set true, then it means LDSP search is finished and SDSP search could start. /* If one block has been already calculated, the computedFlg will be set so we don't to do the calculation again. */ boolean computedFlg[][] = new boolean[2 * searchDistance + 1][2 * searchDistance + 1]; for (boolean[] row : computedFlg) { Arrays.fill(row, false); } float sumArray[][] = new float[2 * searchDistance + 1][2 * searchDistance + 1]; for (float[] row : sumArray) { Arrays.fill(row, Integer.MAX_VALUE); } if (searchDistance == 1) { // LDSP search can only be applied for search distance >= 2. SDSPFlg = true; } while (!SDSPFlg) { /* 1. LDSP search */ for (int pointIdx = 0; pointIdx < LDSP.length; pointIdx++) { dx = LDSP[pointIdx][0] + xCenter - x; dy = LDSP[pointIdx][1] + yCenter - y; xidx = dx + searchDistance; yidx = dy + searchDistance; // Point to be searched is out of search area, skip it. if (xidx >= searchRange || yidx >= searchRange || xidx < 0 || yidx < 0) { continue; } /* We just calculate the blocks that haven't been calculated before */ if (computedFlg[xidx][yidx] == false) { sumArray[xidx][yidx] = hammingDistance(x, y, dx, dy, prevSlice, curSlice); computedFlg[xidx][yidx] = true; if (outputSearchErrorInfo) { DSAverageNum++; } if (outputSearchErrorInfo) { if (sumArray[xidx][yidx] != sumArray1[xidx][yidx]) { log.warning("It seems that there're some bugs in the DS algorithm."); } } } if (sumArray[xidx][yidx] <= minSum) { minSum = sumArray[xidx][yidx]; minPointIdx = pointIdx; } } /* 2. Check the minimum value position is in the center or not. */ xCenter = xCenter + LDSP[minPointIdx][0]; yCenter = yCenter + LDSP[minPointIdx][1]; if (minPointIdx == 4) { // It means it's in the center, so we should break the loop and go to SDSP search. SDSPFlg = true; } else { SDSPFlg = false; } } /* 3. SDSP Search */ for (int pointIdx = 0; pointIdx < SDSP.length; pointIdx++) { dx = SDSP[pointIdx][0] + xCenter - x; dy = SDSP[pointIdx][1] + yCenter - y; xidx = dx + searchDistance; yidx = dy + searchDistance; // Point to be searched is out of search area, skip it. if (xidx >= searchRange || yidx >= searchRange || xidx < 0 || yidx < 0) { continue; } /* We just calculate the blocks that haven't been calculated before */ if (computedFlg[xidx][yidx] == false) { sumArray[xidx][yidx] = hammingDistance(x, y, dx, dy, prevSlice, curSlice); computedFlg[xidx][yidx] = true; if (outputSearchErrorInfo) { DSAverageNum++; } if (outputSearchErrorInfo) { if (sumArray[xidx][yidx] != sumArray1[xidx][yidx]) { log.warning("It seems that there're some bugs in the DS algorithm."); } } } if (sumArray[xidx][yidx] <= minSum) { minSum = sumArray[xidx][yidx]; tmpSadResult.dx = dx; tmpSadResult.dy = dy; tmpSadResult.sadValue = minSum; } } if (outputSearchErrorInfo) { DSDx = tmpSadResult.dx; DSDy = tmpSadResult.dy; } break; case CrossDiamondSearch: break; } tmpSadResult.xidx = (int) tmpSadResult.dx + searchDistance; tmpSadResult.yidx = (int) tmpSadResult.dy + searchDistance; // what a hack.... if (outputSearchErrorInfo) { if (DSDx == FSDx && DSDy == FSDy) { DSCorrectCnt += 1; } else { DSAveError[0] += Math.abs(DSDx - FSDx); DSAveError[1] += Math.abs(DSDy - FSDy); } if (0 == FSCnt % 10000) { log.log(Level.INFO, "Correct Diamond Search times are {0}, Full Search times are {1}, accuracy is {2}, averageNumberPercent is {3}, averageError is ({4}, {5})", new Object[]{DSCorrectCnt, FSCnt, DSCorrectCnt / FSCnt, DSAverageNum / (searchRange * searchRange * FSCnt), DSAveError[0] / FSCnt, DSAveError[1] / (FSCnt - DSCorrectCnt)}); } } return tmpSadResult; } /** * computes Hamming distance centered on x,y with patch of patchSize for * prevSliceIdx relative to curSliceIdx patch. * * @param x coordinate x in subSampled space * @param y coordinate y in subSampled space * @param dx * @param dy * @param prevSlice * @param curSlice * @return Hamming Distance value */ private float hammingDistance(int x, int y, int dx, int dy, BitSet prevSlice, BitSet curSlice) { float retVal = 0, hd = 0; int blockRadius = patchDimension / 2; float validPixNumCurrSli = 0, validPixNumPrevSli = 0; // The valid pixel number in the current block // Make sure 0<=xx+dx<subSizeX, 0<=xx<subSizeX and 0<=yy+dy<subSizeY, 0<=yy<subSizeY, or there'll be arrayIndexOutOfBoundary exception. if ((x < (blockRadius + dx)) || (x >= ((subSizeX - blockRadius) + dx)) || (x < blockRadius) || (x >= (subSizeX - blockRadius)) || (y < (blockRadius + dy)) || (y >= ((subSizeY - blockRadius) + dy)) || (y < blockRadius) || (y >= (subSizeY - blockRadius))) { return 1; } for (int xx = x - blockRadius; xx <= (x + blockRadius); xx++) { for (int yy = y - blockRadius; yy <= (y + blockRadius); yy++) { boolean currSlicePol = curSlice.get((xx + 1) + ((yy) * subSizeX)); // binary value on (xx, yy) for current slice boolean prevSlicePol = prevSlice.get(((xx + 1) - dx) + ((yy - dy) * subSizeX)); // binary value on (xx, yy) for previous slice if (currSlicePol != prevSlicePol) { hd += 1; } if (currSlicePol == true) { validPixNumCurrSli += 1; } if (prevSlicePol == true) { validPixNumPrevSli += 1; } } } // TODD: NEXT WORK IS TO DO THE RESEARCH ON WEIGHTED HAMMING DISTANCE // Calculate the metric confidence value float validPixNum = this.validPixOccupancy * (((2 * blockRadius) + 1) * ((2 * blockRadius) + 1)); if ((validPixNumCurrSli <= validPixNum) || (validPixNumPrevSli <= validPixNum)) { // If valid pixel number of any slice is 0, then we set the distance to very big value so we can exclude it. retVal = 1; } else { /* retVal consists of the distance and the dispersion. dispersion is used to describe the spatial relationship within one block. Here we use the difference between validPixNumCurrSli and validPixNumPrevSli to calculate the dispersion. Inspired by paper "Measuring the spatial dispersion of evolutionist search process: application to Walksat" by Alain Sidaner. */ retVal = ((hd * weightDistance) + (Math.abs(validPixNumCurrSli - validPixNumPrevSli) * (1 - weightDistance))) / (((2 * blockRadius) + 1) * ((2 * blockRadius) + 1)); } return retVal; } /** * Computes hamming weight around point x,y using patchDimension and * searchDistance * * @param x coordinate in subsampled space * @param y * @param prevSlice * @param curSlice * @return SADResult that provides the shift and SAD value */ private SADResult minJaccardDistance(int x, int y, BitSet prevSlice, BitSet curSlice) { float minSum = Integer.MAX_VALUE, sum = 0; SADResult sadResult = new SADResult(0, 0, 0); for (int dx = -searchDistance; dx <= searchDistance; dx++) { for (int dy = -searchDistance; dy <= searchDistance; dy++) { sum = jaccardDistance(x, y, dx, dy, prevSlice, curSlice); if (sum <= minSum) { minSum = sum; sadResult.dx = dx; sadResult.dy = dy; sadResult.sadValue = minSum; } } } return sadResult; } /** * computes Hamming distance centered on x,y with patch of patchSize for * prevSliceIdx relative to curSliceIdx patch. * * @param x coordinate in subSampled space * @param y * @param patchSize * @param prevSlice * @param curSlice * @return SAD value */ private float jaccardDistance(int x, int y, int dx, int dy, BitSet prevSlice, BitSet curSlice) { float retVal = 0; float M01 = 0, M10 = 0, M11 = 0; int blockRadius = patchDimension / 2; // Make sure 0<=xx+dx<subSizeX, 0<=xx<subSizeX and 0<=yy+dy<subSizeY, 0<=yy<subSizeY, or there'll be arrayIndexOutOfBoundary exception. if ((x < (blockRadius + dx)) || (x >= ((subSizeX - blockRadius) + dx)) || (x < blockRadius) || (x >= (subSizeX - blockRadius)) || (y < (blockRadius + dy)) || (y >= ((subSizeY - blockRadius) + dy)) || (y < blockRadius) || (y >= (subSizeY - blockRadius))) { return 1; } for (int xx = x - blockRadius; xx <= (x + blockRadius); xx++) { for (int yy = y - blockRadius; yy <= (y + blockRadius); yy++) { if ((curSlice.get((xx + 1) + ((yy) * subSizeX)) == true) && (prevSlice.get(((xx + 1) - dx) + ((yy - dy) * subSizeX)) == true)) { M11 += 1; } if ((curSlice.get((xx + 1) + ((yy) * subSizeX)) == true) && (prevSlice.get(((xx + 1) - dx) + ((yy - dy) * subSizeX)) == false)) { M01 += 1; } if ((curSlice.get((xx + 1) + ((yy) * subSizeX)) == false) && (prevSlice.get(((xx + 1) - dx) + ((yy - dy) * subSizeX)) == true)) { M10 += 1; } } } if (0 == (M01 + M10 + M11)) { retVal = 0; } else { retVal = M11 / (M01 + M10 + M11); } retVal = 1 - retVal; return retVal; } // private SADResult minVicPurDistance(int blockX, int blockY) { // ArrayList<Integer[]> seq1 = new ArrayList(1); // SADResult sadResult = new SADResult(0, 0, 0); // int size = spikeTrains[blockX][blockY].size(); // int lastTs = spikeTrains[blockX][blockY].get(size - forwardEventNum)[0]; // for (int i = size - forwardEventNum; i < size; i++) { // seq1.add(spikeTrains[blockX][blockY].get(i)); //// if(seq1.get(2)[0] - seq1.get(0)[0] > thresholdTime) { //// return sadResult; // double minium = Integer.MAX_VALUE; // for (int i = -1; i < 2; i++) { // for (int j = -1; j < 2; j++) { // // Remove the seq1 itself // if ((0 == i) && (0 == j)) { // continue; // ArrayList<Integer[]> seq2 = new ArrayList(1); // if ((blockX >= 2) && (blockY >= 2)) { // ArrayList<Integer[]> tmpSpikes = spikeTrains[blockX + i][blockY + j]; // if (tmpSpikes != null) { // for (int index = 0; index < tmpSpikes.size(); index++) { // if (tmpSpikes.get(index)[0] >= lastTs) { // seq2.add(tmpSpikes.get(index)); // double dis = vicPurDistance(seq1, seq2); // if (dis < minium) { // minium = dis; // sadResult.dx = -i; // sadResult.dy = -j; // lastFireIndex[blockX][blockY] = spikeTrains[blockX][blockY].size() - 1; // if ((sadResult.dx != 1) || (sadResult.dy != 0)) { // // sadResult = new SADResult(0, 0, 0); // return sadResult; // private double vicPurDistance(ArrayList<Integer[]> seq1, ArrayList<Integer[]> seq2) { // int sum1Plus = 0, sum1Minus = 0, sum2Plus = 0, sum2Minus = 0; // Iterator itr1 = seq1.iterator(); // Iterator itr2 = seq2.iterator(); // int length1 = seq1.size(); // int length2 = seq2.size(); // double[][] distanceMatrix = new double[length1 + 1][length2 + 1]; // for (int h = 0; h <= length1; h++) { // for (int k = 0; k <= length2; k++) { // if (h == 0) { // distanceMatrix[h][k] = k; // continue; // if (k == 0) { // distanceMatrix[h][k] = h; // continue; // double tmpMin = Math.min(distanceMatrix[h][k - 1] + 1, distanceMatrix[h - 1][k] + 1); // double event1 = seq1.get(h - 1)[0] - seq1.get(0)[0]; // double event2 = seq2.get(k - 1)[0] - seq2.get(0)[0]; // distanceMatrix[h][k] = Math.min(tmpMin, distanceMatrix[h - 1][k - 1] + (cost * Math.abs(event1 - event2))); // while (itr1.hasNext()) { // Integer[] ii = (Integer[]) itr1.next(); // if (ii[1] == 1) { // sum1Plus += 1; // } else { // sum1Minus += 1; // while (itr2.hasNext()) { // Integer[] ii = (Integer[]) itr2.next(); // if (ii[1] == 1) { // sum2Plus += 1; // } else { // sum2Minus += 1; // // return Math.abs(sum1Plus - sum2Plus) + Math.abs(sum1Minus - sum2Minus); // return distanceMatrix[length1][length2]; /** * Computes min SAD shift around point x,y using patchDimension and * searchDistance * * @param x coordinate in subsampled space * @param y * @param prevSlice * @param curSlice * @return SADResult that provides the shift and SAD value */ private SADResult minSad(int x, int y, BitSet prevSlice, BitSet curSlice) { // for now just do exhaustive search over all shifts up to +/-searchDistance SADResult sadResult = new SADResult(0, 0, 0); float minSad = 1; for (int dx = -searchDistance; dx <= searchDistance; dx++) { for (int dy = -searchDistance; dy <= searchDistance; dy++) { float sad = sad(x, y, dx, dy, prevSlice, curSlice); if (sad <= minSad) { minSad = sad; sadResult.dx = dx; sadResult.dy = dy; sadResult.sadValue = minSad; } } } return sadResult; } /** * computes SAD centered on x,y with shift of dx,dy for prevSliceIdx * relative to curSliceIdx patch. * * @param x coordinate x in subSampled space * @param y coordinate y in subSampled space * @param dx block shift of x * @param dy block shift of y * @param prevSliceIdx * @param curSliceIdx * @return SAD value */ private float sad(int x, int y, int dx, int dy, BitSet prevSlice, BitSet curSlice) { int blockRadius = patchDimension / 2; // Make sure 0<=xx+dx<subSizeX, 0<=xx<subSizeX and 0<=yy+dy<subSizeY, 0<=yy<subSizeY, or there'll be arrayIndexOutOfBoundary exception. if ((x < (blockRadius + dx)) || (x >= ((subSizeX - blockRadius) + dx)) || (x < blockRadius) || (x >= (subSizeX - blockRadius)) || (y < (blockRadius + dy)) || (y >= ((subSizeY - blockRadius) + dy)) || (y < blockRadius) || (y >= (subSizeY - blockRadius))) { return 1; } float sad = 0, retVal = 0; float validPixNumCurrSli = 0, validPixNumPrevSli = 0; // The valid pixel number in the current block for (int xx = x - blockRadius; xx <= (x + blockRadius); xx++) { for (int yy = y - blockRadius; yy <= (y + blockRadius); yy++) { boolean currSlicePol = curSlice.get((xx + 1) + ((yy) * subSizeX)); // binary value on (xx, yy) for current slice boolean prevSlicePol = prevSlice.get(((xx + 1) - dx) + ((yy - dy) * subSizeX)); // binary value on (xx, yy) for previous slice int d = (currSlicePol ? 1 : 0) - (prevSlicePol ? 1 : 0); if (currSlicePol == true) { validPixNumCurrSli += 1; } if (prevSlicePol == true) { validPixNumPrevSli += 1; } if (d <= 0) { d = -d; } sad += d; } } // Calculate the metric confidence value float validPixNum = this.validPixOccupancy * (((2 * blockRadius) + 1) * ((2 * blockRadius) + 1)); if ((validPixNumCurrSli <= validPixNum) || (validPixNumPrevSli <= validPixNum)) { // If valid pixel number of any slice is 0, then we set the distance to very big value so we can exclude it. retVal = 1; } else { /* retVal is consisted of the distance and the dispersion, dispersion is used to describe the spatial relationship within one block. Here we use the difference between validPixNumCurrSli and validPixNumPrevSli to calculate the dispersion. Inspired by paper "Measuring the spatial dispersion of evolutionist search process: application to Walksat" by Alain Sidaner. */ retVal = ((sad * weightDistance) + (Math.abs(validPixNumCurrSli - validPixNumPrevSli) * (1 - weightDistance))) / (((2 * blockRadius) + 1) * ((2 * blockRadius) + 1)); } return retVal; } private class SADResult { float dx, dy; float sadValue; int xidx, yidx; // x and y indices into 2d matrix of result. 0,0 corresponds to motion SW. dx, dy may be negative, like (-1, -1) represents SW. // However, for histgram index, it's not possible to use negative number. That's the reason for intrducing xidx and yidx. public SADResult(float dx, float dy, float sadValue) { this.dx = dx; this.dy = dy; this.sadValue = sadValue; } @Override public String toString() { return String.format("dx,dy=%d,%5 SAD=%d", dx, dy, sadValue); } } /** * @return the patchDimension */ public int getPatchDimension() { return patchDimension; } /** * @param patchDimension the patchDimension to set */ public void setPatchDimension(int patchDimension) { this.patchDimension = patchDimension; putInt("patchDimension", patchDimension); } // public int getEventPatchDimension() { // return eventPatchDimension; // public void setEventPatchDimension(int eventPatchDimension) { // this.eventPatchDimension = eventPatchDimension; // putInt("eventPatchDimension", eventPatchDimension); // public int getForwardEventNum() { // return forwardEventNum; // public void setForwardEventNum(int forwardEventNum) { // this.forwardEventNum = forwardEventNum; // putInt("forwardEventNum", forwardEventNum); // public float getCost() { // return cost; // public void setCost(float cost) { // this.cost = cost; // putFloat("cost", cost); // public int getThresholdTime() { // return thresholdTime; // public void setThresholdTime(int thresholdTime) { // this.thresholdTime = thresholdTime; // putInt("thresholdTime", thresholdTime); /** * @return the sliceMethod */ public SliceMethod getSliceMethod() { return sliceMethod; } /** * @param sliceMethod the sliceMethod to set */ public void setSliceMethod(SliceMethod sliceMethod) { this.sliceMethod = sliceMethod; putString("sliceMethod", sliceMethod.toString()); } public PatchCompareMethod getPatchCompareMethod() { return patchCompareMethod; } public void setPatchCompareMethod(PatchCompareMethod patchCompareMethod) { this.patchCompareMethod = patchCompareMethod; putString("patchCompareMethod", patchCompareMethod.toString()); } /** * * @return the search method */ public SearchMethod getSearchMethod() { return searchMethod; } /** * * @param searchMethod the method to be used for searching */ public void setSearchMethod(SearchMethod searchMethod) { this.searchMethod = searchMethod; putString("searchMethod", searchMethod.toString()); } /** * @return the sliceDurationUs */ public int getSliceDurationUs() { return sliceDurationUs; } /** * @param sliceDurationUs the sliceDurationUs to set */ public void setSliceDurationUs(int sliceDurationUs) { this.sliceDurationUs = sliceDurationUs; /* If the slice duration is changed, reset FSCnt and DScorrect so we can get more accurate evaluation result */ FSCnt = 0; DSCorrectCnt = 0; putInt("sliceDurationUs", sliceDurationUs); } /** * @return the sliceEventCount */ public int getSliceEventCount() { return sliceEventCount; } /** * @param sliceEventCount the sliceEventCount to set */ public void setSliceEventCount(int sliceEventCount) { this.sliceEventCount = sliceEventCount; putInt("sliceEventCount", sliceEventCount); } // public boolean isPreProcessEnable() { // return preProcessEnable; // public void setPreProcessEnable(boolean preProcessEnable) { // this.preProcessEnable = preProcessEnable; // putBoolean("preProcessEnable", preProcessEnable); public float getConfidenceThreshold() { return confidenceThreshold; } public void setConfidenceThreshold(float confidenceThreshold) { if (confidenceThreshold < 0) { confidenceThreshold = 0; } else if (confidenceThreshold > 1) { confidenceThreshold = 1; } this.confidenceThreshold = confidenceThreshold; putFloat("confidenceThreshold", confidenceThreshold); } public float getValidPixOccupancy() { return validPixOccupancy; } public void setValidPixOccupancy(float validPixOccupancy) { if (validPixOccupancy < 0) { validPixOccupancy = 0; } else if (validPixOccupancy > 1) { validPixOccupancy = 1; } this.validPixOccupancy = validPixOccupancy; putFloat("validPixOccupancy", validPixOccupancy); } public float getWeightDistance() { return weightDistance; } public void setWeightDistance(float weightDistance) { if (weightDistance < 0) { weightDistance = 0; } else if (weightDistance > 1) { weightDistance = 1; } this.weightDistance = weightDistance; putFloat("weightDistance", weightDistance); } private void checkArrays() { // if (lastFireIndex == null) { // lastFireIndex = new int[chip.getSizeX()][chip.getSizeY()]; // if (eventSeqStartTs == null) { // eventSeqStartTs = new int[chip.getSizeX()][chip.getSizeY()]; } /** * * @param distResult * @return the confidence of the result. True means it's not good and should * be rejected, false means we should accept it. */ public synchronized boolean isNotSufficientlyAccurate(SADResult distResult) { boolean retVal = super.accuracyTests(); // check accuracy in super, if reject returns true // additional test, normalized blaock distance must be small enough // distance has max value 1 if (distResult.sadValue >= (1 - confidenceThreshold)) { retVal = true; } return retVal; } /** * @return the skipProcessingEventsCount */ public int getSkipProcessingEventsCount() { return skipProcessingEventsCount; } /** * @param skipProcessingEventsCount the skipProcessingEventsCount to set */ public void setSkipProcessingEventsCount(int skipProcessingEventsCount) { int old = this.skipProcessingEventsCount; if (skipProcessingEventsCount < 0) { skipProcessingEventsCount = 0; } if (skipProcessingEventsCount > 300) { skipProcessingEventsCount = 300; } this.skipProcessingEventsCount = skipProcessingEventsCount; getSupport().firePropertyChange("skipProcessingEventsCount", old, this.skipProcessingEventsCount); putInt("skipProcessingEventsCount", skipProcessingEventsCount); } /** * @return the displayResultHistogram */ public boolean isDisplayResultHistogram() { return displayResultHistogram; } /** * @param displayResultHistogram the displayResultHistogram to set */ public void setDisplayResultHistogram(boolean displayResultHistogram) { this.displayResultHistogram = displayResultHistogram; putBoolean("displayResultHistogram", displayResultHistogram); } /** * @return the adaptiveEventSkipping */ public boolean isAdaptiveEventSkipping() { return adaptiveEventSkipping; } /** * @param adaptiveEventSkipping the adaptiveEventSkipping to set */ public void setAdaptiveEventSkipping(boolean adaptiveEventSkipping) { this.adaptiveEventSkipping = adaptiveEventSkipping; putBoolean("adaptiveEventSkipping", adaptiveEventSkipping); } public boolean isOutputSearchErrorInfo() { return outputSearchErrorInfo; } synchronized public void setOutputSearchErrorInfo(boolean outputSearchErrorInfo) { this.outputSearchErrorInfo = outputSearchErrorInfo; if (!outputSearchErrorInfo) { searchMethod = SearchMethod.valueOf(getString("searchMethod", SearchMethod.FullSearch.toString())); // make sure method is reset } } private int adaptiveEventSkippingUpdateIntervalPackets = 10; private int adaptiveEventSkippingUpdateCounter = 0; private void adaptEventSkipping() { if (!adaptiveEventSkipping) { return; } if (chip.getAeViewer() == null) { return; } if (adaptiveEventSkippingUpdateCounter++ < adaptiveEventSkippingUpdateIntervalPackets) { return; } adaptiveEventSkippingUpdateCounter = 0; if (chip.getAeViewer().getFrameRater().getAverageFPS() < (int) (0.8f * chip.getAeViewer().getFrameRate())) { setSkipProcessingEventsCount(skipProcessingEventsCount + 1); } else { setSkipProcessingEventsCount(skipProcessingEventsCount - 1); } } }
package com.drewschrauf.example.robotronic; import org.json.JSONException; import org.json.JSONObject; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.drewschrauf.robotronic.activities.RobotronicActivity; import com.drewschrauf.robotronic.threads.ThreadHandler; public class ExampleSimple extends RobotronicActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // set the layout setContentView(R.layout.simple); } @Override protected void onStart() { super.onStart(); // get the image ImageView image = (ImageView) findViewById(R.id.image); getThreadHandler() .makeImageDownloader( "https://raw.github.com/drewschrauf/robotronic/master/src/com/drewschrauf/example/robotronic/smiley.jpg", image); // get the text final TextView text = (TextView) findViewById(R.id.text); getThreadHandler() .makeDataDownloader( "https://raw.github.com/drewschrauf/robotronic/master/src/com/drewschrauf/example/robotronic/example.json", new Handler() { /** * Define a handler to deal with the retrieved data */ @Override public void handleMessage(Message msg) { // check the return code (msg.what) to see if it // isData or isError if (ThreadHandler.isData(msg.what)) { // parse the data (msg.obj) as a String try { JSONObject feed = new JSONObject( (String) msg.obj); text.setText(feed.getString("text")); } catch (JSONException e) { Toast.makeText(ExampleSimple.this, "Error parsing result", Toast.LENGTH_LONG).show(); } } else { // deal with any errors that arose while // retrieving data Toast.makeText(ExampleSimple.this, "Error retriving text", Toast.LENGTH_LONG).show(); } } }); } }
package com.itmill.toolkit.terminal.gwt.client.ui; import com.google.gwt.user.client.Command; import com.itmill.toolkit.terminal.gwt.client.ApplicationConnection; public abstract class Action implements Command { protected ActionOwner owner; protected String iconUrl = null; protected String caption = ""; public Action(ActionOwner owner) { this.owner = owner; } /** * Executed when action fired */ public abstract void execute(); public String getHTML() { StringBuffer sb = new StringBuffer(); if (getIconUrl() != null) { sb.append("<img src=\"" + getIconUrl() + "\" alt=\"icon\" />"); } sb.append(getCaption()); return sb.toString(); } public String getCaption() { return caption; } public void setCaption(String caption) { this.caption = caption; } public String getIconUrl() { return iconUrl; } } /** * Action owner must provide a set of actions for context menu and IAction * objects. */ interface ActionOwner { /** * @return Array of IActions */ public Action[] getActions(); public ApplicationConnection getClient(); public String getPaintableId(); }
package com.jcwhatever.nucleus.providers.citizensnpc; import com.jcwhatever.nucleus.Nucleus; import com.jcwhatever.nucleus.managed.messaging.IMessenger; /** * Console message helper utility. */ public class Msg { private Msg() {} /** * Write a debug message to the console and log. Message is * disregarded unless debugging is turned on. * * @param message The message to write. * @param params Optional format parameters. */ public static void debug(String message, Object...params) { if (!Nucleus.getPlugin().isDebugging()) return; msg().debug(prefix(message), params); } /** * Write an info message to the console and log. * * @param message The message to write. * @param params Optional format parameters. */ public static void info(String message, Object...params) { msg().info(prefix(message), params); } /** * Write a warning message to the console and log. * * @param message The message to write. * @param params Optional format parameters. */ public static void warning(String message, Object...params) { msg().warning(prefix(message), params); } /** * Write a severe error message to the console and log. * * @param message The message to write. * @param params Optional format parameters. */ public static void severe(String message, Object...params) { msg().severe(prefix(message), params); } private static String prefix(String message) { return "[CitizensNPCProvider] " + message; } private static IMessenger msg() { return Nucleus.getPlugin().getAnonMessenger(); } }
package com.jcwhatever.nucleus.utils.potions; import com.jcwhatever.nucleus.internal.NucLang; import com.jcwhatever.nucleus.managed.language.Localizable; import com.jcwhatever.nucleus.utils.PreCon; import com.jcwhatever.nucleus.utils.ThreadSingletons; import com.jcwhatever.nucleus.utils.nms.INmsPotionHandler; import com.jcwhatever.nucleus.utils.nms.NmsUtils; import org.bukkit.potion.Potion; import org.bukkit.potion.PotionType; /** * Stores potion display names. */ public class PotionNames { private PotionNames() {} @Localizable static final String _AWKWARD = "Awkward Potion"; @Localizable static final String _THICK = "Thick Potion"; @Localizable static final String _MUNDANE = "Mundane Potion"; @Localizable static final String _WATER = "Water Bottle"; @Localizable static final String _REGEN = "Potion of Regeneration"; @Localizable static final String _SPEED = "Potion of Swiftness"; @Localizable static final String _FIRE_RESISTANCE = "Potion of Fire Resistance"; @Localizable static final String _POISON = "Potion of Poison"; @Localizable static final String _INSTANT_HEAL ="Potion of Healing"; @Localizable static final String _NIGHT_VISION = "Potion of Night Vision"; @Localizable static final String _WEAKNESS = "Potion of Weakness"; @Localizable static final String _STRENGTH = "Potion of Strength"; @Localizable static final String _SLOWNESS = "Potion of Slowness"; @Localizable static final String _JUMP = "Potion of Leaping"; @Localizable static final String _INSTANT_DAMAGE = "Potion of Harming"; @Localizable static final String _WATER_BREATHING = "Potion of Water Breathing"; @Localizable static final String _INVISIBILITY = "Potion of Invisibility"; @Localizable static final String _SPLASH = "Splash"; @Localizable static final String _LEVEL2 = "II"; @Localizable static final String _EXTENDED_DURATION = "(ext)"; private static final ThreadSingletons<StringBuilder> BUFFERS = new ThreadSingletons<>( new ThreadSingletons.ISingletonFactory<StringBuilder>() { @Override public StringBuilder create(Thread thread) { return new StringBuilder(45); } }); private static final INmsPotionHandler _handler = NmsUtils.getPotionHandler(); /** * Get a simple potion name. * * @param potion The potion. */ public static String getSimple(Potion potion) { PreCon.notNull(potion); return getSimple(potion.getType()); } /** * Get a simple potion name. * * @param type The potion type. */ public static String getSimple(PotionType type) { PreCon.notNull(type); StringBuilder buffer = buffer(); appendSimple(buffer, type); return buffer.toString(); } /** * Get a full potion name including characteristics. * * @param potion The potion ID. */ public static String getFull(Potion potion) { PreCon.notNull(potion); int potionId = _handler.getPotionId( potion.getType(), potion.getLevel(), potion.isSplash(), potion.hasExtendedDuration()); return getFull(potionId); } /** * Get a full potion name including characteristics. * * @param potionId The potion ID. */ public static String getFull(int potionId) { if (potionId <= 64) { switch (potionId) { case 0: return NucLang.get(_WATER); case 16: return NucLang.get(_AWKWARD); case 32: return NucLang.get(_THICK); case 64: return NucLang.get(_MUNDANE); default: throw new IllegalArgumentException( "Failed to get PotionType for Potion name Id: " + potionId); } } PotionType type = PotionType.getByDamageValue(potionId & 15); StringBuilder buffer = buffer(); if ((potionId & 16384) == 16384) { append(buffer, _SPLASH); buffer.append(' '); } appendSimple(buffer, type); if ((potionId & 32) == 32) { buffer.append(' '); append(buffer, _LEVEL2); } if ((potionId & 64) == 64) { buffer.append(' '); append(buffer, _EXTENDED_DURATION); } return buffer.toString(); } private static void appendSimple(StringBuilder buffer, PotionType type) { switch (type) { case WATER: append(buffer, _WATER); break; case REGEN: append(buffer, _REGEN); break; case SPEED: append(buffer, _SPEED); break; case FIRE_RESISTANCE: append(buffer, _FIRE_RESISTANCE); break; case POISON: append(buffer, _POISON); break; case INSTANT_HEAL: append(buffer, _INSTANT_HEAL); break; case NIGHT_VISION: append(buffer, _NIGHT_VISION); break; case WEAKNESS: append(buffer, _WEAKNESS); break; case STRENGTH: append(buffer, _STRENGTH); break; case SLOWNESS: append(buffer, _SLOWNESS); break; case JUMP: append(buffer, _JUMP); break; case INSTANT_DAMAGE: append(buffer, _INSTANT_DAMAGE); break; case WATER_BREATHING: append(buffer, _WATER_BREATHING); break; case INVISIBILITY: append(buffer, _INVISIBILITY); break; } } private static void append(StringBuilder buffer, String text) { buffer.append(NucLang.get(text)); } private static StringBuilder buffer() { StringBuilder buffer = BUFFERS.get(); buffer.setLength(0); return buffer; } }
package org.upennapo.app; import android.support.v4.app.Fragment; import android.annotation.SuppressLint; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.webkit.WebView; /** * @author Ronald Martin * */ public class CalendarFragment extends Fragment { public static final String URL_KEY = "url"; public CalendarFragment() { } @SuppressLint("SetJavaScriptEnabled") @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View calendarView = inflater.inflate(R.layout.fragment_calendar_tab, container, false); final String urlToLoad = getArguments().getString(URL_KEY); if (urlToLoad != null) { WebView calendarWebView = (WebView) calendarView.findViewById(R.id.calendar_webview); calendarWebView.getSettings().setJavaScriptEnabled(true); calendarWebView.loadUrl(urlToLoad); } return calendarView; } }
// This file was generated by RobotBuilder. It contains sections of // code that are automatically generated and assigned by robotbuilder. // These sections will be updated in the future when you export to // Java from RobotBuilder. Do not put any code or make any change in // the blocks indicating autogenerated code or it will be lost on an // update. Deleting the comments indicating the section will prevent // it from being updated in the future. package com.team3546.season2014.RoboBuilt.commands; import com.team3546.season2014.RoboBuilt.RobotSystemsGroup; import com.team3546.season2014.RoboBuilt.StatusManager; import edu.wpi.first.wpilibj.DoubleSolenoid; import edu.wpi.first.wpilibj.command.Command; import com.team3546.season2014.RoboBuilt.Robot; /** * Extends the backboard and pickup arm and then retracts them when finished */ public class Catch extends Command { //Stores the systems this command uses RobotSystemsGroup requiredSystems; //Stores the result given by the status manager so we don't undo what we haven't done boolean executeCommand; public Catch() { //Build a profile to describe the usage of this command requiredSystems = new RobotSystemsGroup(); //This command needs to use the backboard and pickup arm requiredSystems.backboardSolenoid.value = StatusManager.uses; requiredSystems.armMovementSolenoid.value = StatusManager.uses; // Use requires() here to declare subsystem dependencies // eg. requires(chassis); // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=REQUIRES // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=REQUIRES } // Called just before this Command runs the first time protected void initialize() { //Check for any conflicts between other commands executeCommand = Robot.statusManager.checkForConflictsAndSetNewStatus(requiredSystems); if (executeCommand) { //Set the position of the backboard and arm movement solenoid to extended Robot.backboard.setBackboardSolenoid(DoubleSolenoid.Value.kForward); Robot.pickupArm.setArmMovementSolenoid(DoubleSolenoid.Value.kForward); } } // Called repeatedly when this Command is scheduled to run protected void execute() { } // Make this return true when this Command no longer needs to run execute() protected boolean isFinished() { return true; } // Called once after isFinished returns true protected void end() { if (executeCommand) { Robot.backboard.setBackboardSolenoid(DoubleSolenoid.Value.kReverse); Robot.pickupArm.setArmMovementSolenoid(DoubleSolenoid.Value.kReverse); Robot.statusManager.doneWithSystems(requiredSystems); } } // Called when another command which requires one or more of the same // subsystems is scheduled to run protected void interrupted() { end(); } }
package org.pentaho.di.i18n; import java.util.MissingResourceException; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogWriter; import org.pentaho.di.laf.BasePropertyHandler; /** * @author dhushon * */ public class LAFMessageHandler extends GlobalMessages { private static String replace = "org.pentaho.di"; private static String replaceWith = null; private static int offset = -1; private static String replaceSysBundle = null; static { replaceWith = BasePropertyHandler.getProperty("LAFpackage"); } //TODO: modify base class to include a mandatory accessor so that this singleton instantiation can be //TODO: better controlled public LAFMessageHandler() { super(); reinit(); } public synchronized static MessageHandler getInstance() { if (GMinstance == null) { GMinstance = new LAFMessageHandler(); } return (MessageHandler)GMinstance; } protected void reinit() { replaceWith = BasePropertyHandler.getProperty("LAFpackage"); replaceSysBundle = replacePackage(SYSTEM_BUNDLE_PACKAGE); offset = -1; } /** * replace the application packagename target with ours for proper resolution * e.g. replace org.pentaho.di.* with pointers to new package structure * * @param packageName * @return */ private String replacePackage(String packageName) { // we haven't yet discovered the offset for the trim if (offset<0) { offset = packageName.indexOf(replace); if (offset>=0) { offset = replace.length(); } } return new String(replaceWith + packageName.substring(offset)); } private String internalCalc(String packageName, String global, String key, Object[] parameters) { String string = null; // Then try the original package try { string = findString(packageName, langChoice.getDefaultLocale(), key, parameters); } catch(MissingResourceException e) {}; if (string!=null) { //System.out.println("found: "+key+"/"+string+" in "+packageName+" lang "+langChoice.getDefaultLocale()); return string; } // Then try to find it in the i18n package, in the system messages of the preferred language. try { string = findString(global, langChoice.getDefaultLocale(), key, parameters); } catch(MissingResourceException e) {}; if (string!=null) { //System.out.println("found: "+key+"/"+string+" in "+global+" lang "+langChoice.getDefaultLocale()); return string; } // Then try the failover locale, in the local package try { string = findString(packageName, langChoice.getFailoverLocale(), key, parameters); } catch(MissingResourceException e) {}; if (string!=null) { //System.out.println("found: "+key+"/"+string+" in "+packageName+" lang "+langChoice.getFailoverLocale()); return string; } // Then try to find it in the i18n package, in the system messages of the failover language. try { string = findString(global, langChoice.getFailoverLocale(), key, parameters); } catch(MissingResourceException e) {}; //System.out.println("found: "+key+"/"+string+" in "+global+" lang "+langChoice.getFailoverLocale()); return string; } protected String calculateString(String packageName, String key, Object[] parameters) { String string=null; if (replaceWith != null) { string = internalCalc(replacePackage(packageName),replaceSysBundle,key, parameters); if (string!=null) return string; } string = internalCalc(packageName,SYSTEM_BUNDLE_PACKAGE, key, parameters); if (string != null) return string; string = "!"+key+"!"; if(LogWriter.getInstance().isDetailed()) { String message = "Message not found in the preferred and failover locale: key=["+key+"], package="+packageName; LogWriter.getInstance().logDetailed("i18n", Const.getStackTracker(new KettleException(message))); } return string; } }
package com.wb.nextgenlibrary.activity; import android.app.ActivityManager; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.ActivityInfo; import android.content.res.Configuration; import android.graphics.drawable.ColorDrawable; import android.media.MediaPlayer; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; import android.support.v4.app.Fragment; import android.support.v7.widget.ListPopupWindow; import android.view.LayoutInflater; import android.view.Menu; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.view.animation.Animation; import android.view.animation.Transformation; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.ListAdapter; import android.widget.ProgressBar; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import com.google.android.gms.cast.framework.CastButtonFactory; import com.google.android.gms.cast.framework.CastState; import com.wb.nextgenlibrary.NextGenExperience; import com.wb.nextgenlibrary.R; import com.wb.nextgenlibrary.analytic.NGEAnalyticData; import com.wb.nextgenlibrary.fragment.AbstractCastMainMovieFragment; import com.wb.nextgenlibrary.fragment.AbstractNGEMainMovieFragment; import com.wb.nextgenlibrary.fragment.IMEBottomFragment; import com.wb.nextgenlibrary.interfaces.NGEFragmentTransactionInterface; import com.wb.nextgenlibrary.interfaces.NGEPlaybackStatusListener.NextGenPlaybackStatus; import com.wb.nextgenlibrary.util.concurrent.ResultListener; import com.wb.nextgenlibrary.util.utils.F; import com.wb.nextgenlibrary.util.utils.NGEFragmentTransactionEngine; import com.wb.nextgenlibrary.util.utils.NextGenGlide; import com.wb.nextgenlibrary.util.utils.NextGenLogger; import com.wb.nextgenlibrary.util.utils.StringHelper; import com.wb.nextgenlibrary.videoview.IVideoViewActionListener; import com.wb.nextgenlibrary.videoview.ObservableVideoView; import com.wb.nextgenlibrary.widget.CustomMediaController; import com.wb.nextgenlibrary.widget.FontFitTextView; import com.wb.nextgenlibrary.widget.MainFeatureMediaController; import java.util.ArrayList; import java.util.List; import java.util.Timer; import java.util.TimerTask; public class InMovieExperience extends AbstractNGEActivity implements NGEFragmentTransactionInterface, DialogInterface.OnCancelListener { private final static String IS_PLAYER_PLAYING = "IS PLAYING"; private final static String RESUME_PLAYBACK_TIME = "RESUME_PLAYBACK_TIME"; protected ObservableVideoView interstitialVideoView; protected RelativeLayout containerView; protected View skipThisView; protected ProgressBar skipThisCounter; private View actionbarPlaceHolder; private TimerTask imeUpdateTask; private Timer imeUpdateTimer; private MainFeatureMediaController mediaController; //private ProgressDialog mDialog; private ProgressBar loadingView; NGEFragmentTransactionEngine fragmentTransactionEngine; IMEBottomFragment imeBottomFragment; private Uri INTERSTITIAL_VIDEO_URI = null; private Uri currentUri = null; private DRMStatus drmStatus = DRMStatus.NOT_INITIATED; private boolean bInterstitialVideoComplete = false; //TextView imeText; //IMEElementsGridFragment imeGridFragment; private long lastTimeCode = -1; private NextGenPlaybackStatus lastPlaybackStatus = null; AbstractNGEMainMovieFragment mainMovieFragment; int ecFragmentsCounter = 0; MediaPlayer commentaryAudioPlayer; CommentaryOnOffAdapter commentaryOnOffAdapter; ListPopupWindow commentaryPopupWindow; Menu mOptionsMenu; static enum DRMStatus{ SUCCESS, FAILED, IN_PROGRESS, NOT_INITIATED } @Override public void onCastStateChanged(int i) { switch (i){ case CastState.CONNECTED: setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_PORTRAIT); resetMainMovieFragment(); startStreamPreparations(); if (isCommentaryAvailable()) actionBarRightTextView.setVisibility(View.GONE); break; case CastState.CONNECTING: break; case CastState.NO_DEVICES_AVAILABLE: break; case CastState.NOT_CONNECTED: setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR); resetMainMovieFragment(); startStreamPreparations(); if (isCommentaryAvailable()) actionBarRightTextView.setVisibility(View.VISIBLE); break; } } @Override public void onCreate(Bundle savedInstanceState) { supportRequestWindowFeature(Window.FEATURE_ACTION_BAR_OVERLAY); super.onCreate(savedInstanceState); detectScreenShotService(); if (NextGenExperience.getMovieMetaData() == null) finish(); INTERSTITIAL_VIDEO_URI = Uri.parse(NextGenExperience.getMovieMetaData().getInterstitialVideoURL()); fragmentTransactionEngine = new NGEFragmentTransactionEngine(this); setContentView(R.layout.next_gen_videoview); loadingView = (ProgressBar)findViewById(R.id.next_gen_loading_progress_bar); if (loadingView != null){ loadingView.setBackgroundColor(getResources().getColor(android.R.color.transparent)); } actionbarPlaceHolder = findViewById(R.id.next_gen_ime_actionbar_placeholder); backgroundImageView = (ImageView)findViewById(R.id.ime_background_image_view); if (backgroundImageView != null){ if (NextGenExperience.getMovieMetaData().getExtraExperience().style != null) { String bgImgUrl = NextGenExperience.getMovieMetaData().getExtraExperience().style.getBackground().getImage().url; NextGenGlide.load(this, bgImgUrl).into(backgroundImageView); } } containerView = (RelativeLayout)findViewById(R.id.interstitial_container); skipThisView = findViewById(R.id.skip_this_layout); skipThisCounter = (ProgressBar) findViewById(R.id.skip_this_countdown); interstitialVideoView = (ObservableVideoView) findViewById(R.id.interstitial_video_view); interstitialVideoView.setOnErrorListener(getOnErrorListener()); interstitialVideoView.setOnPreparedListener(getOnPreparedListener()); interstitialVideoView.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { updateImeFragment(NextGenPlaybackStatus.STOP, -1L); NextGenExperience.getNextGenEventHandler().setInterstitialWatchedForContent(NextGenExperience.getNextgenPlaybackObject()); bInterstitialVideoComplete = true; playMainMovie(); } }); interstitialVideoView.requestFocus(); imeBottomFragment = new IMEBottomFragment(); transitMainFragment(imeBottomFragment); resetMainMovieFragment(); if (isCommentaryAvailable()) { actionBarRightTextView.setText(getResources().getString(R.string.nge_commentary)); actionBarRightTextView.setCompoundDrawablesWithIntrinsicBounds(null, null, getResources().getDrawable(R.drawable.nge_commentary), null); actionBarRightTextView.setTextColor( getResources().getColor(R.color.gray)); commentaryOnOffAdapter = new CommentaryOnOffAdapter(); commentaryPopupWindow = new ListPopupWindow(this); commentaryPopupWindow.setModal(false); commentaryPopupWindow.setAdapter(commentaryOnOffAdapter); commentaryPopupWindow.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { commentaryOnOffAdapter.setSelection(position); if (position == 0){ // OFF switchCommentary(false); } else { switchCommentary(true); } commentaryPopupWindow.dismiss(); } }); actionBarRightTextView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { //toggleCommentary(); if (commentaryPopupWindow.isShowing()) commentaryPopupWindow.dismiss(); else { commentaryPopupWindow.setAnchorView(actionBarRightTextView); commentaryPopupWindow.setContentWidth(measureContentWidth(commentaryOnOffAdapter)); commentaryPopupWindow.setBackgroundDrawable(new ColorDrawable(getResources().getColor(R.color.black))); commentaryPopupWindow.show(); } } }); } //resetActivePlaybackFragment(); } private void resetMainMovieFragment(){ try { int resumeTime = -1; if (mainMovieFragment != null) resumeTime = mainMovieFragment.getCurrentPosition(); if (isCasting()){ setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_PORTRAIT); mainMovieFragment = NextGenExperience.getCastMovieFragmentClass().newInstance(); ((AbstractCastMainMovieFragment)mainMovieFragment).setCastControllers(mCastSession, remoteMediaClient, mSessionManager); }else mainMovieFragment = NextGenExperience.getMainMovieFragmentClass().newInstance(); mainMovieFragment.setResumeTime(resumeTime); mainMovieFragment.setLoadingView(loadingView); mainMovieFragment.setOnCompletionLister(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { //launch extra Intent intent = new Intent(InMovieExperience.this, OutOfMovieActivity.class); startActivity(intent); finish(); } }); mediaController = new MainFeatureMediaController(this, mainMovieFragment); mediaController.setVisibilityChangeListener(new CustomMediaController.MediaControllerVisibilityChangeListener(){ public void onVisibilityChange(boolean bShow){ if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) { if (bShow) getSupportActionBar().show(); else getSupportActionBar().hide(); } } }); mainMovieFragment.setCustomMediaController(mediaController); mainMovieFragment.setPlaybackObject(NextGenExperience.getNextgenPlaybackObject()); //mainMovieFragment.setOnCompletionListener mainMovieFragment.setNextGenVideoViewListener(new IVideoViewActionListener() { @Override public void onTimeBarSeekChanged(int currentTime) { updateImeFragment(NextGenPlaybackStatus.SEEK, currentTime); } @Override public void onVideoResume() { updateImeFragment(NextGenPlaybackStatus.RESUME, mainMovieFragment.getCurrentPosition()); } @Override public void onVideoStart() { updateImeFragment(NextGenPlaybackStatus.RESUME, mainMovieFragment.getCurrentPosition()); } @Override public void onVideoPause() { updateImeFragment(NextGenPlaybackStatus.PAUSE, mainMovieFragment.getCurrentPosition()); } @Override public void onVideoComplete(){} }); }catch (InstantiationException ex){ }catch (IllegalAccessException iex){ } prepareCommentaryTrack(); playMainMovie(); } @Override public boolean onPrepareOptionsMenu(final Menu menu) { if (mOptionsMenu == null) { getMenuInflater().inflate(R.menu.next_gen_cast_option_menu, menu); mOptionsMenu = menu; return super.onCreateOptionsMenu(menu); } //resetMenuItems(); return true; } /** * Set up action bar items */ @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); getMenuInflater().inflate(R.menu.next_gen_cast_option_menu, menu); mOptionsMenu = menu; try { CastButtonFactory.setUpMediaRouteButton(getApplicationContext(), menu, R.id.menuChromecast); }catch (Exception ex){} if (actionBarRightTextView != null && actionBarRightTextView instanceof FontFitTextView) ((FontFitTextView)actionBarRightTextView).setNumberOfLinesAllowed(1); return true; } private int measureContentWidth(ListAdapter listAdapter) { ViewGroup mMeasureParent = null; int maxWidth = 0; View itemView = null; int itemType = 0; final ListAdapter adapter = listAdapter; final int widthMeasureSpec = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED); final int heightMeasureSpec = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED); final int count = adapter.getCount(); for (int i = 0; i < count; i++) { final int positionType = adapter.getItemViewType(i); if (positionType != itemType) { itemType = positionType; itemView = null; } if (mMeasureParent == null) { mMeasureParent = new FrameLayout(this); } itemView = adapter.getView(i, itemView, mMeasureParent); itemView.measure(widthMeasureSpec, heightMeasureSpec); final int itemWidth = itemView.getMeasuredWidth(); if (itemWidth > maxWidth) { maxWidth = itemWidth; } } return maxWidth; } class CommentaryOnOffAdapter extends BaseAdapter { private final String[] items = new String[]{getResources().getString(R.string.off_text), getResources().getString(R.string.director_commentary)}; int selectedIndex = 0; CommentaryOnOffAdapter() { } public void setSelection(int index){ selectedIndex = index; notifyDataSetChanged(); } @Override public int getCount() { return 2; } @Override public String getItem(int index) { return items[index]; } @Override public long getItemId(int position) { return position; } @Override public View getView(final int index, View view, ViewGroup arg2) { View target = null; if (view != null){ target = view; }else{ LayoutInflater inflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); target = inflate.inflate(R.layout.nge_commentary_onoff, arg2, false); } TextView tView = (TextView)target.findViewById(R.id.commentary_text_button); tView.setText(getItem(index)); TextView description = (TextView) target.findViewById(R.id.commentary_description); ImageView iView = (ImageView)target.findViewById(R.id.commentary_radio_image); /* if (index > 0){ description.setText("Hear from the director in his own words about the decision he made"); description.setVisibility(View.VISIBLE); }else { description.setVisibility(View.GONE); }*/ if (index == selectedIndex){ tView.setTextColor(getResources().getColor(R.color.drawer_yellow)); iView.setImageDrawable(getResources().getDrawable(R.drawable.commentary_radio_button_selected)); }else { tView.setTextColor(getResources().getColor(R.color.white)); iView.setImageDrawable(getResources().getDrawable(R.drawable.commentary_radio_button)); } return target; } } @Override protected void onStart() { super.onStart(); hideShowNextGenView(); } @Override public void onBackPressed(){ if (ecFragmentsCounter == 1) finish(); else { getSupportFragmentManager().popBackStackImmediate(); ecFragmentsCounter = ecFragmentsCounter - 1; if (isPausedByIME){ isPausedByIME = false; if (mediaController.isShowing()){ mediaController.hide(); } //tr 9/28 } } } @Override protected boolean shouldUseActionBarSpacer(){ return false; } @Override String getTitleImageUrl(){ return NextGenExperience.getMovieMetaData().getTitletreatmentImageUrl(); } @Override public void onCancel(DialogInterface dialog) { dialog.cancel(); //this.finish(); } protected void updateImeFragment(final NextGenPlaybackStatus playbackStatus, final long timecode){ if (INTERSTITIAL_VIDEO_URI.equals(currentUri)) return; if (lastTimeCode == timecode - mainMovieFragment.getMovieOffsetMilliSecond() && lastPlaybackStatus == playbackStatus) return; lastPlaybackStatus = playbackStatus; lastTimeCode = timecode - mainMovieFragment.getMovieOffsetMilliSecond(); if (lastTimeCode < 0) lastTimeCode = 0; runOnUiThread(new Runnable() { @Override public void run() { if (InMovieExperience.this == null || InMovieExperience.this.isDestroyed() || InMovieExperience.this.isFinishing() ) return; if (imeBottomFragment != null) imeBottomFragment.playbackStatusUpdate(playbackStatus, lastTimeCode); } }); switch (playbackStatus){ case PREPARED: break; case STARTED: break; case STOP: break; case TIMESTAMP_UPDATE: break; } if (isCommentaryOn && isCommentaryAvailable()){ resyncCommentary(); /* TODO: should handle commentary here. resync if the timediference is more than 1/2 second and keep track of the player state with it's paused or playing */ } } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); hideShowNextGenView(); } public int getLayoutViewId(){ return R.layout.next_gen_videoview; } private void hideShowNextGenView(){ View nextGenView = findViewById(R.id.next_gen_ime_bottom_view); if (nextGenView == null) return; String label = "interstitial"; if (bInterstitialVideoComplete){ double percentageDbl = (double)mainMovieFragment.getCurrentPosition()/ (double)mainMovieFragment.getDuration() * 100.0; int percentage = ((int)((percentageDbl + 2.5) / 5) * 5); label = Integer.toString(percentage); } switch (this.getResources().getConfiguration().orientation) { case Configuration.ORIENTATION_PORTRAIT: nextGenView.setVisibility(View.VISIBLE); actionbarPlaceHolder.setVisibility(View.VISIBLE); if (mediaController != null) mediaController.hideShowControls(true); NGEAnalyticData.reportEvent(InMovieExperience.this, null, NGEAnalyticData.AnalyticAction.ACTION_ROTATE_SCREEN_SHOW_EXTRAS, label, null); break; case Configuration.ORIENTATION_LANDSCAPE: nextGenView.setVisibility(View.GONE); actionbarPlaceHolder.setVisibility(View.GONE); getSupportActionBar().hide(); if (mediaController != null) mediaController.hideShowControls(false); NGEAnalyticData.reportEvent(InMovieExperience.this, null, NGEAnalyticData.AnalyticAction.ACTION_ROTATE_SCREEN_HIDE_EXTRAS, label, null); imeBottomFragment.onOrientationChange(this.getResources().getConfiguration().orientation); } } @Override boolean shouldHideActionBar(){ return (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE); } private class ErrorListener implements MediaPlayer.OnErrorListener { @Override public boolean onError(MediaPlayer mp, int what, int extra) { return true; } } protected MediaPlayer.OnErrorListener getOnErrorListener(){ return new ErrorListener(); } private class ProgressBarAnimation extends Animation{ private ProgressBar progressBar; public ProgressBarAnimation(ProgressBar progressBar) { super(); this.progressBar = progressBar; } @Override protected void applyTransformation(float interpolatedTime, Transformation t) { super.applyTransformation(interpolatedTime, t); float value = 0 + (100) * interpolatedTime; progressBar.setProgress((int) value); } } private class PreparedListener implements MediaPlayer.OnPreparedListener { @Override public void onPrepared(MediaPlayer mp) { interstitialVideoView.start(); skipThisView.setVisibility(View.VISIBLE); skipThisView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { runOnUiThread(new Runnable() { @Override public void run() { bInterstitialVideoComplete = true; NextGenExperience.getNextGenEventHandler().setInterstitialSkippedForContent(NextGenExperience.getNextgenPlaybackObject()); playMainMovie(); NGEAnalyticData.reportEvent(InMovieExperience.this, null, NGEAnalyticData.AnalyticAction.ACTION_SKIP_INTERSTITIAL, null, null); } }); } }); skipThisCounter.setProgress(0); skipThisCounter.setProgress(100); ProgressBarAnimation skipProgressBarAnim = new ProgressBarAnimation(skipThisCounter); skipProgressBarAnim.setDuration(mp.getDuration()); skipThisCounter.startAnimation(skipProgressBarAnim); return; } } protected MediaPlayer.OnPreparedListener getOnPreparedListener(){ return new PreparedListener(); } private synchronized void playMainMovie(){ currentUri = Uri.parse(""); if (drmStatus == DRMStatus.IN_PROGRESS){ // show loading mainMovieFragment.showLoadingView(); /*mDialog.setCanceledOnTouchOutside(false); mDialog.setMessage(getResources().getString(R.string.loading)); mDialog.show();*/ return; }else if (drmStatus == DRMStatus.FAILED){ //Show error Message and exit finish(); return; } else if (!bInterstitialVideoComplete){ // wait for interstitial video to be finished return; } if (skipThisView != null) { skipThisView.setVisibility(View.GONE); skipThisView.setOnClickListener(null); } interstitialVideoView.stopPlayback(); interstitialVideoView.setVisibility(View.GONE); mainMovieFragment.hideLoadingView(); //mDialog.hide(); fragmentTransactionEngine.replaceFragment(getSupportFragmentManager(), R.id.video_view_frame, mainMovieFragment); if (imeUpdateTimer == null){ imeUpdateTimer = new Timer(); } if (imeUpdateTask == null){ imeUpdateTask = new TimerTask() { @Override public void run() { if (InMovieExperience.this != null && !InMovieExperience.this.isFinishing() && !InMovieExperience.this.isDestroyed()) updateImeFragment(NextGenPlaybackStatus.TIMESTAMP_UPDATE, mainMovieFragment.getCurrentPosition()); } }; imeUpdateTimer.scheduleAtFixedRate(imeUpdateTask, 0, 1000); } updateImeFragment(NextGenPlaybackStatus.PREPARED, -1L); } int resumePlayTime = -1; boolean shouldStartAfterResume = true; @Override public void onPause() { shouldStartAfterResume = mainMovieFragment.isPlaying(); resumePlayTime = mainMovieFragment.getCurrentPosition(); if (isCommentaryOn && commentaryAudioPlayer != null){ commentaryAudioPlayer.pause(); } if (!bInterstitialVideoComplete && interstitialVideoView.isPlaying()){ interstitialVideoView.pause(); } super.onPause(); } public void onResume() { if (resumePlayTime != -1){ mainMovieFragment.setResumeTime(resumePlayTime); if (!shouldStartAfterResume) mainMovieFragment.pause(); } super.onResume(); if (currentUri == null || StringHelper.isEmpty(currentUri.toString())) { currentUri = INTERSTITIAL_VIDEO_URI; drmStatus = DRMStatus.IN_PROGRESS; if (NextGenExperience.getNextGenEventHandler().shouldShowInterstitialForContent(NextGenExperience.getNextgenPlaybackObject())) { interstitialVideoView.setVisibility(View.VISIBLE); interstitialVideoView.setVideoURI(currentUri); } else { bInterstitialVideoComplete = true; playMainMovie(); } startStreamPreparations(); } else{ skipThisView.setVisibility(View.GONE); skipThisView.setOnClickListener(null); currentUri = Uri.parse(""); } if (!bInterstitialVideoComplete && interstitialVideoView.getVisibility() == View.VISIBLE){ interstitialVideoView.resume(); } hideShowNextGenView(); } private void startStreamPreparations(){ mainMovieFragment.streamStartPreparations(new ResultListener<Boolean>() { @Override public void onResult(Boolean result) { drmStatus = DRMStatus.SUCCESS; runOnUiThread(new Runnable() { @Override public void run() { if (!InMovieExperience.this.isDestroyed() && !InMovieExperience.this.isFinishing()) playMainMovie(); } }); } @Override public <E extends Exception> void onException(E e) { drmStatus = DRMStatus.FAILED; } }); } @Override public void onDestroy() { super.onDestroy(); if (mediaController != null){ mediaController.onPlayerDestroy(); } if (imeUpdateTask != null) { imeUpdateTask.cancel(); imeUpdateTask = null; } if (imeUpdateTimer != null){ imeUpdateTimer.cancel(); imeUpdateTimer = null; } if (fragmentTransactionEngine != null) fragmentTransactionEngine.onDestroy(); if (commentaryAudioPlayer != null){ if (isCommentaryOn) commentaryAudioPlayer.stop(); commentaryAudioPlayer.release(); } } @Override public void resetUI(boolean isRoot){ } public int getMainFrameId(){ return R.id.next_gen_ime_bottom_view; } public int getLeftFrameId(){ return R.id.next_gen_ime_bottom_view; } public int getRightFrameId(){ return R.id.next_gen_ime_bottom_view; } boolean isPausedByIME = false; public void pauseMovieForImeECPiece(){ if (mediaController.isShowing()){ mediaController.hide(); } mainMovieFragment.pauseForIME(); isPausedByIME = true; } //tr 9/28 public void resumeMovideForImeECPiece(){ if (mediaController.isShowing()){ mediaController.hide(); } mainMovieFragment.resumePlaybackFromIME(); isPausedByIME = true; } @Override public void transitLeftFragment(Fragment nextFragment){ fragmentTransactionEngine.transitFragment(getSupportFragmentManager(), getLeftFrameId(), nextFragment); } @Override public void transitRightFragment(Fragment nextFragment){ fragmentTransactionEngine.transitFragment(getSupportFragmentManager(), getRightFrameId(), nextFragment); } @Override public void transitMainFragment(Fragment nextFragment){ ecFragmentsCounter = ecFragmentsCounter + 1; fragmentTransactionEngine.transitFragment(getSupportFragmentManager(), getMainFrameId(), nextFragment); } @Override public int getLeftButtonLogoId(){ return R.drawable.home_logo; } @Override public String getBackgroundImgUri(){ return ""; } @Override public String getLeftButtonText(){ return getResources().getString(R.string.home_button_text); } public String getRightTitleImageUri(){ return ""; } @Override protected void onLeftTopActionBarButtonPressed(){ finish(); } @Override public String getRightTitleText(){ return ""; } private boolean isCommentaryAvailable(){ return !StringHelper.isEmpty(NextGenExperience.getMovieMetaData().getCommentaryTrackURL()); } private boolean isCommentaryOn = false; private NGECommentaryPlayersStatusListener commentaryPlayersStatusListener = null; private void prepareCommentaryTrack(){ if (isCommentaryAvailable()) { if (mainMovieFragment.canHandleCommentaryAudioTrackSwitching()){ List<String> commentaries = new ArrayList<>(); commentaries.add(NextGenExperience.getMovieMetaData().getCommentaryTrackURL()); mainMovieFragment.setCommentaryTrackUrls(commentaries); }else { try { commentaryAudioPlayer = MediaPlayer.create(this, Uri.parse(NextGenExperience.getMovieMetaData().getCommentaryTrackURL())); commentaryAudioPlayer.setLooping(false); commentaryPlayersStatusListener = new NGECommentaryPlayersStatusListener(); commentaryAudioPlayer.setOnPreparedListener(commentaryPlayersStatusListener); commentaryAudioPlayer.setOnInfoListener(commentaryPlayersStatusListener); } catch (Exception ex) { NextGenLogger.e(F.TAG, ex.getMessage()); } } } } private class NGECommentaryPlayersStatusListener implements MediaPlayer.OnPreparedListener, MediaPlayer.OnCompletionListener, MediaPlayer.OnInfoListener { private NextGenPlaybackStatus playerStatus = NextGenPlaybackStatus.BUFFERING; public NextGenPlaybackStatus getPlayerStatus(){ return playerStatus; } @Override public void onPrepared(MediaPlayer mp) { playerStatus = NextGenPlaybackStatus.READY; resyncCommentary(); } @Override public void onCompletion(MediaPlayer mp) { playerStatus = NextGenPlaybackStatus.COMPLETED; } @Override public boolean onInfo(MediaPlayer mp, int what, int extra) { NextGenLogger.d(F.TAG_COMMENTARY, "CommentaryPlayer " + InMovieExperience.this.getClass().getSimpleName() + ".onInfo: " + what); switch (what) { case MediaPlayer.MEDIA_INFO_BUFFERING_START: // @since API Level 9 NextGenLogger.d(F.TAG_COMMENTARY, "CommentaryPlayer.onInfo: MEDIA_INFO_BUFFERING_START"); playerStatus = NextGenPlaybackStatus.BUFFERING; resyncCommentary(); break; case MediaPlayer.MEDIA_INFO_BUFFERING_END: // @since API Level 9 NextGenLogger.d(F.TAG_COMMENTARY, "CommentaryPlayer.onInfo: MEDIA_INFO_BUFFERING_END"); playerStatus = NextGenPlaybackStatus.READY; resyncCommentary(); //mDialog.hide(); break; case MediaPlayer.MEDIA_INFO_VIDEO_RENDERING_START: playerStatus = NextGenPlaybackStatus.READY; resyncCommentary(); //mDialog.hide(); break; default: //mDialog.hide(); break; } return true; } } /* private void toggleCommentary(){ if (isCommentaryAvailable() && commentaryAudioPlayer != null){ if (isCommentaryOn) { // turning commentary off isCommentaryOn = false; resyncCommentary(); }else { // turning commentary on isCommentaryOn = true; resyncCommentary(); commentaryAudioPlayer.start(); commentaryAudioPlayer.seekTo(mainMovieFragment.getCurrentPosition() - mainMovieFragment.getMovieOffsetMilliSecond()); } } actionBarRightTextView.setTextColor(isCommentaryOn? getResources().getColor(R.color.drawer_yellow) : getResources().getColor(R.color.gray)); }*/ private void switchCommentary(boolean bOnOff){ isCommentaryOn = bOnOff; if (isCommentaryAvailable()){ if (mainMovieFragment.canHandleCommentaryAudioTrackSwitching()){ mainMovieFragment.setActiveCommentaryTrack(bOnOff ? 0 : -1); }else if (commentaryAudioPlayer != null) { if (!bOnOff) { // turning commentary off resyncCommentary(); } else { // turning commentary on resyncCommentary(); commentaryAudioPlayer.start(); commentaryAudioPlayer.seekTo(mainMovieFragment.getCurrentPosition() - mainMovieFragment.getMovieOffsetMilliSecond()); } } } if (isCommentaryOn) { actionBarRightTextView.setTextColor(getResources().getColor(R.color.drawer_yellow)); actionBarRightTextView.setText(getResources().getString(R.string.nge_commentary_on)); actionBarRightTextView.setCompoundDrawablesWithIntrinsicBounds(null, null, getResources().getDrawable(R.drawable.nge_commentary_on), null); } else { actionBarRightTextView.setTextColor(getResources().getColor(R.color.white)); actionBarRightTextView.setText(getResources().getString(R.string.nge_commentary)); actionBarRightTextView.setCompoundDrawablesWithIntrinsicBounds(null, null, getResources().getDrawable(R.drawable.nge_commentary), null); } } private boolean bPausedForCommentaryBuffering = false; private void resyncCommentary(){ if (isCommentaryAvailable() && !mainMovieFragment.canHandleCommentaryAudioTrackSwitching()){ if (isCommentaryOn){ if (commentaryPlayersStatusListener.getPlayerStatus() == NextGenPlaybackStatus.READY && mainMovieFragment.getPlaybackStatus() == NextGenPlaybackStatus.READY){ // both ready int mainMovieTime = mainMovieFragment.getCurrentPosition() - mainMovieFragment.getMovieOffsetMilliSecond(); if (mainMovieTime < 0) mainMovieTime = 0; int commentaryTime = commentaryAudioPlayer.getCurrentPosition(); int timeDifference = Math.abs(mainMovieTime - commentaryTime); if (timeDifference > 150) { // when they are out of sync i.e. more than 150 mini seconds apart. commentaryAudioPlayer.start(); commentaryAudioPlayer.seekTo(mainMovieTime); final boolean bWasPlaying ; if (mainMovieFragment.isPlaying()) { mainMovieFragment.pause(); bWasPlaying = true; }else bWasPlaying = false; commentaryAudioPlayer.setOnSeekCompleteListener(new MediaPlayer.OnSeekCompleteListener() { @Override public void onSeekComplete(MediaPlayer mp) { if (bWasPlaying){ try{ mainMovieFragment.resumePlayback(); }catch (Exception ex){} } } }); } if (mainMovieFragment.isPlaying()) { // if it's playing, check to see if recy commentaryAudioPlayer.start(); }else if (bPausedForCommentaryBuffering){ // if it's paused for commentary buffering bPausedForCommentaryBuffering = false; mainMovieFragment.resumePlayback(); commentaryAudioPlayer.start(); } else if (commentaryAudioPlayer.isPlaying()){ // if main movie is paused. commentaryAudioPlayer.pause(); } } else if (commentaryPlayersStatusListener.getPlayerStatus() == NextGenPlaybackStatus.BUFFERING || mainMovieFragment.getPlaybackStatus() == NextGenPlaybackStatus.BUFFERING){ // show loading progress bar and pause and wait for buffering completion if (commentaryPlayersStatusListener.getPlayerStatus() == NextGenPlaybackStatus.READY && commentaryAudioPlayer.isPlaying()) commentaryAudioPlayer.pause(); if (mainMovieFragment.getPlaybackStatus() == NextGenPlaybackStatus.READY){ bPausedForCommentaryBuffering = true; mainMovieFragment.pause(); } } mainMovieFragment.switchMainFeatureAudio(false); // turn off movie audio track } else{ // switch off commentary mainMovieFragment.switchMainFeatureAudio(true); // turn on movie audio track if (commentaryPlayersStatusListener.getPlayerStatus() == NextGenPlaybackStatus.READY && commentaryAudioPlayer.isPlaying()) commentaryAudioPlayer.pause(); } } } private android.os.Handler mHandler = null; private HandlerThread mHandlerThread = null; public void startHandlerThread(){ mHandlerThread = new HandlerThread("HandlerThread"); mHandlerThread.start(); mHandler = new Handler(mHandlerThread.getLooper()); } public void detectScreenShotService() { mHandlerThread = new HandlerThread("HandlerThread"); mHandlerThread.start(); mHandler = new Handler(mHandlerThread.getLooper()); final int delay = 3000; //milliseconds final ActivityManager am = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE); mHandler.postDelayed(new Runnable() { public void run() { List<ActivityManager.RunningServiceInfo> rs = am.getRunningServices(200); for (ActivityManager.RunningServiceInfo ar : rs) { if (ar.process.equals("com.android.systemui:screenshot")) { Toast.makeText(InMovieExperience.this, "Screenshot captured!!", Toast.LENGTH_LONG).show(); } } mHandler.postDelayed(this, delay); } }, delay); } }
package com.wolfesoftware.mipsos.simulator; public class SimulatorCore { // registers private int registers[] = new int[32]; private int pc; private int hi = 0; private int lo = 0; private Memory memory; private SimulatorStatus status = SimulatorStatus.Ready; private ISimulatorListener listener = null; private SimulatorOptions options; public SimulatorCore(SimulatorOptions options, ISimulatorListener listener) { this.options = options; this.listener = listener; memory = new Memory(options.pageSizeExponent); } public void storeSegment(Segment segment) { memory.storeSegment(segment); } public void setPc(int address) { pc = address; } public SimulatorStatus getStatus() { return status; } public void run() { while (status != SimulatorStatus.Done) internalStep(); } private void internalStep() { int instruction = memory.loadWord(pc); pc += 4; status = SimulatorStatus.Ready; // assume success executeInstruction(instruction); // fix the zero register registers[0] = 0; } /** has a big switch in it */ private void executeInstruction(int instruction) { // get all possible fields int opcode = instruction >>> 26; int rs = instruction >> 21 & 0x1F; int rt = instruction >> 16 & 0x1F; int rd = instruction >> 11 & 0x1F; int shamt = instruction >> 6 & 0x1F; int funct = instruction & 0x1F; int zeroExtImm = instruction & 0xFFFF; int signExtImm = ((zeroExtImm & 0x8000) == 0 ? zeroExtImm : zeroExtImm - 0x10000); int target = instruction & 0x02FFFFFF; int targetAddress = (pc & 0xF0000000) | (target << 2); // get instruction from opcode and maybe funct MipsInstr instr = MipsInstr.fromOpcodeAndFunct(opcode, funct); // execute switch (instr) { case ADD: registers[rd] = registers[rs] + registers[rt]; break; case ADDI: registers[rt] = registers[rs] + signExtImm; break; case AND: registers[rd] = registers[rs] & registers[rt]; break; case ANDI: registers[rt] = registers[rs] & zeroExtImm; break; case BEQ: if (registers[rt] == registers[rs]) pc += signExtImm << 2; break; case BNE: if (registers[rt] != registers[rs]) pc += signExtImm << 2; break; case BREAK: status = SimulatorStatus.Break; break; case DIV: hi = registers[rs] / registers[rt]; lo = registers[rs] % registers[rt]; break; case J: pc = targetAddress; break; case JAL: registers[31] = pc; pc = targetAddress; break; case JALR: registers[rd] = pc; pc = registers[rs]; break; case JR: pc = registers[rs]; break; case LB: registers[rt] = memory.loadByte(signExtImm + registers[rs]); break; case LH: registers[rt] = memory.loadHalf(signExtImm + registers[rs]); break; case LUI: registers[rt] = zeroExtImm << 16; break; case LW: registers[rt] = memory.loadWord(signExtImm + registers[rs]); break; case MFHI: registers[rd] = hi; break; case MFLO: registers[rd] = lo; break; case MTHI: registers[rs] = hi; break; case MTLO: registers[rs] = lo; break; case MULT: long rslt = (long)registers[rs] * (long)registers[rt]; hi = (int)(0xFFFFFFFFL & (rslt >> 32)); lo = (int)(0xFFFFFFFFL & rslt); break; case NOP: break; case NOR: registers[rd] = ~(registers[rs] | registers[rt]); break; case OR: registers[rd] = registers[rs] | registers[rt]; break; case ORI: registers[rt] = registers[rs] | zeroExtImm; break; case SB: memory.storeByte(signExtImm + registers[rs], (byte)(registers[rt] & 0xFF)); break; case SH: memory.storeHalf(signExtImm + registers[rs], (short)(registers[rt] & 0xFFFF)); break; case SLL: registers[rd] = registers[rt] << shamt; break; case SLLV: registers[rd] = registers[rt] << registers[rs]; break; case SLT: registers[rd] = (registers[rs] < registers[rt] ? 1 : 0); break; case SLTI: registers[rt] = (registers[rs] < signExtImm ? 1 : 0); break; case SRA: registers[rd] = registers[rt] >> shamt; break; case SRAV: registers[rd] = registers[rt] >> registers[rs]; break; case SRL: registers[rd] = registers[rt] >>> shamt; break; case SRLV: registers[rd] = registers[rt] >>> registers[rs]; break; case SUB: registers[rd] = registers[rs] - registers[rt]; break; case SW: memory.storeWord(signExtImm + registers[rs], registers[rt]); break; case SYSCALL: syscall(); break; case XOR: registers[rd] = registers[rs] ^ registers[rt]; break; case XORI: registers[rt] = registers[rs] ^ zeroExtImm; break; default: throw new RuntimeException(); // TODO } } private void syscall() { // spim syscall codes int syscallCode = registers[2]; switch (syscallCode) { case 1: // print_int $a0 = integer checkFancyIoSupport(); for (char c : Integer.toString(registers[4]).toCharArray()) listener.printCharacter(c); break; case 2: // print_float $f12 = float case 3: // print_double $f12 = double throw new RuntimeException("floating point operations are not supported"); case 4: // print_string $a0 = string { checkFancyIoSupport(); int cursor = registers[4]; while (true) { byte c = memory.loadByte(cursor); if (c == 0) break; listener.printCharacter((char)c); cursor++; } break; } case 5: // read_int integer (in $v0) { checkFancyIoSupport(); StringBuilder builder = new StringBuilder(); while (true) { char c = listener.readCharacter(); if (c == '\n') break; builder.append(c); } registers[2] = Integer.parseInt(builder.toString().trim()); break; } case 6: // read_float float (in $f0) case 7: // read_double double (in $f0) throw new RuntimeException("floating point operations are not supported"); case 8: // read_string $a0 = buffer, $a1 = length { checkFancyIoSupport(); int cursor = registers[4]; int maxLenght = registers[5]; for (int i = 0; i < maxLenght; i++) { char c = listener.readCharacter(); memory.storeByte(cursor, (byte)c); if (c == '\n') break; cursor++; } break; } case 9: // sbrk $a0 = amount address (in $v0) throw new RuntimeException("sbrk is not supported"); case 10: // exit status = SimulatorStatus.Done; break; case 11: // print_character $a0 = character listener.printCharacter((char)registers[4]); break; case 12: // read_character character (in $v0) registers[2] = listener.readCharacter(); break; case 13: // open $a0 = filename, $a1 = flags, $a2 = mode file descriptor (in $v0) case 14: // read $a0 = file descriptor, $a1 = buffer, $a2 = count bytes read (in $v0) case 15: // write $a0 = file descriptor, $a1 = buffer, $a2 = count bytes written (in $v0) case 16: // close $a0 = file descriptor 0 (in $v0) throw new RuntimeException("file io is not supported"); case 17: // exit2 $a0 = value throw new RuntimeException("exit2 is not supported"); default: throw new RuntimeException("illegal syscall code"); } } private void checkFancyIoSupport() { if (options.fancyIoSupport) return; throw new RuntimeException("fancy IO is not enabled"); } public static class SimulatorOptions { public int pageSizeExponent = 6; public boolean fancyIoSupport = false; } }
// JSON.java package ed.js; import java.util.*; import org.mozilla.javascript.*; import ed.js.func.*; import ed.js.engine.*; public class JSON { static Set<String> IGNORE_NAMES = new HashSet<String>(); static { IGNORE_NAMES.add( "_save" ); IGNORE_NAMES.add( "_update" ); IGNORE_NAMES.add( "_ns" ); } public static void init( Scope s ){ s.put( "tojson" , new JSFunctionCalls1(){ public Object call( Scope s , Object o , Object foo[] ){ return serialize( o , true ); } } , true ); s.put( "tojson_u" , new JSFunctionCalls1(){ public Object call( Scope s , Object o , Object foo[] ){ return serialize( o , false ); } } , true ); s.put( "fromjson" , new JSFunctionCalls1(){ public Object call( Scope s , Object o , Object foo[] ){ return parse( o.toString() ); } } , true ); } public static String serialize( Object o ){ // Backwards compatibility return serialize( o, true ); } public static String serialize( Object o , boolean trusted ){ return serialize( o , trusted , "\n" ); } public static String serialize( Object o , boolean trusted , String nl ){ StringBuilder buf = new StringBuilder(); try { serialize( buf , o , trusted , nl ); } catch ( java.io.IOException e ){ throw new RuntimeException( e ); } return buf.toString(); } public static void serialize( Appendable a , Object o , boolean trusted ) throws java.io.IOException { serialize( a , o , trusted , "\n" ); } public static void serialize( Appendable a , Object o , boolean trusted , String nl ) throws java.io.IOException { Serializer.go( a , o , trusted , 0 , nl ); } static class Serializer { static Map<Integer,String> _indents = new HashMap<Integer,String>(); static String _i( final int i ){ String s = _indents.get( i ); if ( s == null ){ s = ""; for ( int j=0; j<i; j++ ) s += " "; _indents.put( i , s ); } return s; } static void string( Appendable a , String s ) throws java.io.IOException { a.append("\""); for(int i = 0; i < s.length(); ++i){ char c = s.charAt(i); if(c == '\\') a.append("\\\\"); else if(c == '"') a.append("\\\""); else if(c == '\n') a.append("\\n"); else if(c == '\r') a.append("\\r"); else if(c == '\t') a.append("\\t"); else a.append(c); } a.append("\""); } static void go( Appendable a , Object something , boolean trusted , int indent , String nl ) throws java.io.IOException { if ( nl.length() > 0 ){ if ( a instanceof StringBuilder ){ StringBuilder sb = (StringBuilder)a; int lastNL = sb.lastIndexOf( nl ); if ( sb.length() - lastNL > 60 ){ a.append( nl ); } } } if ( something == null ){ a.append( "null" ); return; } if ( something instanceof Number || something instanceof Boolean || something instanceof JSRegex ){ a.append( something.toString() ); return; } if ( something instanceof JSDate ){ if ( trusted ) { a.append( "new Date( " + ((JSDate)something)._time + " ) " ); return; } else { a.append( new Long(((JSDate)something)._time).toString() ); return; } } if ( something instanceof JSString || something instanceof String ){ string( a , something.toString() ); return; } if ( something instanceof JSFunction ){ if ( trusted ) { a.append( something.toString() ); return; } throw new java.io.IOException("can't serialize functions in untrusted mode"); } if ( something instanceof ed.db.ObjectId ){ if ( trusted ) { a.append( "ObjectId( \"" + something + "\" )" ); return; } else { string( a , something.toString() ); return; } } if ( ! ( something instanceof JSObject ) ){ a.append( something.toString() ); return; } if ( something instanceof JSArray ){ JSArray arr = (JSArray)something; a.append( "[ " ); for ( int i=0; i<arr._array.size(); i++ ){ if ( i > 0 ) a.append( " , " ); go( a , arr._array.get( i ) , trusted, indent , nl ); } a.append( " ]" ); return; } if ( something instanceof Scope ){ a.append( something.toString() ); return; } JSObject o = (JSObject)something; { Object foo = o.get( "tojson" ); if ( foo != null && foo instanceof JSFunction ){ a.append( ((JSFunction)foo).call( Scope.GLOBAL ).toString() ); return; } } a.append( _i( indent ) ); a.append( "{" ); boolean first = true; for ( String s : o.keySet() ){ if ( IGNORE_NAMES.contains( s ) ) continue; Object val = o.get( s ); if ( val instanceof JSObjectBase ){ ((JSObjectBase)val).prefunc(); if ( o.get( s ) == null ) continue; } if ( first ) first = false; else a.append( " ," ); a.append( _i( indent + 1 ) ); string( a , s ); a.append( " : " ); go( a , val , trusted , indent + 1 , nl ); } a.append( _i( indent + 1 ) ); a.append( " }\n" ); } } public static Object parse( String s ){ CompilerEnvirons ce = new CompilerEnvirons(); Parser p = new Parser( ce , ce.getErrorReporter() ); s = "return " + s.trim() + ";"; ScriptOrFnNode theNode = p.parse( s , "foo" , 0 ); Node ret = theNode.getFirstChild(); Convert._assertType( ret , Token.RETURN ); Convert._assertOne( ret ); Node lit = ret.getFirstChild(); if ( lit.getType() != Token.OBJECTLIT && lit.getType() != Token.ARRAYLIT ){ Debug.printTree( lit , 0 ); throw new JSException( "not a literal" ); } return build( lit ); } private static Object build( Node n ){ if ( n == null ) return null; Node c; switch ( n.getType() ){ case Token.OBJECTLIT: JSObject o = new JSObjectBase(); Object[] names = (Object[])n.getProp( Node.OBJECT_IDS_PROP ); int i=0; c = n.getFirstChild(); while ( c != null ){ o.set( names[i++].toString() , build( c ) ); c = c.getNext(); } return o; case Token.ARRAYLIT: JSArray a = new JSArray(); c = n.getFirstChild(); while ( c != null ){ a.add( build( c ) ); c = c.getNext(); } return a; case Token.NUMBER: double d = n.getDouble(); if ( JSNumericFunctions.couldBeInt( d ) ) return (int)d; return d; case Token.STRING: return new JSString( n.getString() ); } Debug.printTree( n , 0 ); throw new RuntimeException( "what: " + n.getType() ); } }
import javax.persistence.*; @Entity @Table(name = "owner") public class Owner { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; private String name; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } }
package me.capit.entropy.util; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.UUID; import me.capit.entropy.EntropyMain; public class Town implements Serializable { private static final long serialVersionUID = -8979383970183615150L; final EntropyMain plugin; private final UUID owner; private int daysOfInactivityLeft = 7; private int storageUsed; private String name; // The List object is more adaptive to the situation and consumes less activation memory than ArrayList. private List<Structure> structures = new ArrayList<Structure>(); private List<UUID> players = new ArrayList<UUID>(); // What is storageUsed for? public Town(EntropyMain plugin, UUID owner, String name, int storageUsed){ this.plugin = plugin; this.owner = owner; this.name = name; this.storageUsed = storageUsed; } public String getName(){ return name; } public UUID getOwner(){ return owner; } public int getStorageUsed(){ return storageUsed; } public void setStorageUsed(int amount){ storageUsed = amount; } public void inactiveDay(){ daysOfInactivityLeft } public int getInactiveDays(){ return daysOfInactivityLeft; } public void setInactiveDays(int days){ daysOfInactivityLeft = days; } public void addStructure(Structure structure){ structures.add(structure); } public void removeStructure(Structure structure){ structures.remove(structure); } public List<Structure> getStructures(){ return structures; } public void addPlayer(UUID uuid){ players.add(uuid); } public void removePlayer(UUID uuid){ players.remove(uuid); } public List<UUID> getPlayers(){ return players; } }
/** * STFrame is an extension of JFrame that contains * the logic required for the StreamThing program. * * @author jake-freeman * */ import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.util.*; import java.io.*; public class STFrame extends JFrame { private final String CHAR_PATH = "chars"; //private char[] currentColors; private JButton[] colors; private JComboBox[] p; public STFrame() { super("Stream Thing"); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); setResizable(false); ArrayList<File> fileList = getFileList(CHAR_PATH); ArrayList<String> charList = getCharList(fileList); colors = new JButton[12]; for (int i = 0; i < colors.length; i++) colors[i] = new JButton(); p = new JComboBox[2]; p[0] = new JComboBox<Object>(charList.toArray()); p[1] = new JComboBox<Object>(charList.toArray()); addActionListeners(); addComponentsToPane(getContentPane()); pack(); setVisible(true); } /** * Adds all required action listeners to combo boxes * and buttons. */ private void addActionListeners() { for (int i = 0; i < p.length; i++) { final int j = i; p[i].addActionListener ( new ActionListener() { public void actionPerformed(ActionEvent e) { System.out.println("Selected(" + j + "): " + (String)((JComboBox)e.getSource()).getSelectedItem()); charSelect((String)((JComboBox)e.getSource()).getSelectedItem(), 'A', CHAR_PATH); } } ); } for (int i = 0; i < colors.length; i++) { final int j = i; colors[i].addActionListener ( new ActionListener() { public void actionPerformed(ActionEvent e) { System.out.println("Selected(p" + j + "): " + (String)((JButton)e.getSource()).getText()); charSelect((String)((JButton)e.getSource()).getText(), 'A'/*[HACK]*/, CHAR_PATH); } } ); } } /** * Function to add components to the main content area. * * @param pane the content area to add UI items to * */ private void addComponentsToPane(Container pane) { pane.setLayout(new GridBagLayout()); GridBagConstraints c = new GridBagConstraints(); c.fill = GridBagConstraints.HORIZONTAL; c.gridwidth = 6; c.insets = new Insets(20,10,5,10); // padding c.gridx = 0; c.gridy = 0; pane.add(p[0], c); c.gridx = 6; pane.add(p[1], c); c.gridy = 1; c.gridwidth = 1; c.insets = new Insets(5,5,10,5); for (int i = 0; i < colors.length; i++) { c.gridx = i; pane.add(colors[i],c); } } /** * Include code to handle character changes to * update files and button colors. * * @param charName name of character currently selected * @param color * @param charPath path to the character folder */ private void charSelect(String charName, char color, String charPath) { String fileNameBase = charName.replace(" ", "-"); // select character } /** * Creates a list of character names based on file list * * @param files an ArrayList of files to extract names from * @return a list of character names from file list * @see ArrayList */ private ArrayList<String> getCharList(ArrayList<File> files) { ArrayList<String> chars = new ArrayList<String>(); for (File f: files) { String character = f.getName().replace(".png", ""); // this effectively picks the default character file for a name if (character.charAt(character.length() - 1) == 'A') { // omits 'A' at the beginning of the string and replaces dashes with spaces chars.add(character.substring(0, character.length() - 1).replace("-", " ")); } } // sorts by numerical value Collections.sort(chars); return chars; } /** * Gets a list of the image files in the form of an * abstract file list * * @param path the directory to create the file list of * @return a list of files in the given directory * @see ArrayList */ private ArrayList<File> getFileList(String path) { File dir = new File(path); File[] fileListArray = dir.listFiles(); return new ArrayList<File>(Arrays.asList(fileListArray)); } }
package edu.wustl.catissuecore.query; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Vector; import edu.wustl.catissuecore.dao.JDBCDAO; import edu.wustl.common.util.dbManager.DAOException; import edu.wustl.common.util.logger.Logger; public abstract class Query { /** * Advanced query type constant */ public static final String ADVANCED_QUERY = "AdvancedQuery"; /** * Simple query type constant */ public static final String SIMPLE_QUERY = "SimpleQuery"; /** * Vector of DataElement objects that need to be selected in the output */ private Vector resultView = new Vector(); /** * Starting object from which all related objects can be part of the query */ protected String queryStartObject = new String(); /** * Parent object of the queryStartObject i.e. the object from which the * queryStartObject is derived */ private String parentOfQueryStartObject = new String(); /** * Object that forms the where part of query. * This is SimpleConditionsImpl object in case of Simple query * and AdvancedConditionsImpl object in case of Advanced query */ protected ConditionsImpl whereConditions; /** * Suffix that is appended to all table aliases which is helpful in case of nested queries * to differentiate between super query object from the subquery object of same type */ protected int tableSufix=1; /** * Participant object constant */ public static final String PARTICIPANT = "Participant"; public static final String COLLECTION_PROTOCOL_REGISTRATION = "CollectionProtocolRegistration"; public static final String COLLECTION_PROTOCOL = "CollectionProtocol"; public static final String COLLECTION_PROTOCOL_EVENT = "CollectionProtocolEvent"; public static final String SPECIMEN_COLLECTION_GROUP = "SpecimenCollectionGroup"; public static final String SPECIMEN = "Specimen"; public static final String PARTICIPANT_MEDICAL_IDENTIFIER = "ParticipantMedicalIdentifier"; public static final String INSTITUTION = "Institution"; public static final String DEPARTMENT = "Department"; public static final String CANCER_REESEARCH_GROUP = "CancerResearchGroup"; public static final String USER = "User"; public static final String ADDRESS = "Address"; public static final String CSM_USER = "CsmUser"; public static final String SITE = "Site"; public static final String STORAGE_TYPE = "StorageType"; public static final String STORAGE_CONTAINER_CAPACITY = "StorageContainerCapacity"; public static final String BIO_HAZARD = "BioHazard"; public static final String SPECIMEN_PROTOCOL = "SpecimenProtocol"; public static final String COLLECTION_COORDINATORS = "CollectionCoordinators"; public static final String SPECIMEN_REQUIREMENT = "SpecimenRequirement"; public static final String COLLECTION_SPECIMEN_REQUIREMENT = "CollectionSpecimenRequirement"; public static final String DISTRIBUTION_SPECIMEN_REQUIREMENT = "DistributionSpecimenRequirement"; public static final String DISTRIBUTION_PROTOCOL = "DistributionProtocol"; public static final String REPORTED_PROBLEM = "ReportedProblem"; /** * This method executes the query string formed from getString method and creates a temporary table. * @return Returns true in case everything is successful else false */ public List execute() throws DAOException { try { JDBCDAO dao = new JDBCDAO(); dao.openSession(); List list = dao.executeQuery(getString()); Logger.out.debug("SQL************"+getString()); return list; // dao.delete(tableName); // dao.create(tableName,Constants.DEFAULT_SPREADSHEET_COLUMNS); // Iterator iterator = list.iterator(); // while (iterator.hasNext()) // List row = (List) iterator.next(); // dao.insert(tableName, row); // dao.closeSession(); } catch(DAOException daoExp) { throw new DAOException(daoExp.getMessage(), daoExp); } catch(ClassNotFoundException classExp) { throw new DAOException(classExp.getMessage(), classExp); } } /** * Adds the dataElement to result view * @param dataElement - Data Element to be added * @return - true (as per the general contract of Collection.add). */ public boolean addElementToView(DataElement dataElement) { return resultView.add(dataElement); } public void setViewElements(String aliasName) { Vector vector = new Vector(); List list = new ArrayList(); Iterator iterator = list.iterator(); while(iterator.hasNext()) { List rowList = (List) iterator.next(); DataElement dataElement = new DataElement(); dataElement.setTable((String)rowList.get(0)); dataElement.setField((String)rowList.get(1)); vector.add(dataElement); } setResultView(vector); } /** * Returns the SQL representation of this query object * @return */ public String getString() { StringBuffer query= new StringBuffer(); HashSet set =new HashSet(); /** * Forming SELECT part of the query */ query.append("Select "); if(resultView.size()==0) { query.append(" * "); } else { DataElement dataElement; for(int i=0; i < resultView.size();i++) { dataElement = (DataElement)resultView.get(i); set.add(dataElement.getTable()); if(i != resultView.size()-1) query.append(dataElement.getTable()+tableSufix+"."+dataElement.getField()+", "); else query.append(dataElement.getTable()+tableSufix+"."+dataElement.getField()+" "); } } /** * Forming FROM part of query */ set.addAll(whereConditions.getQueryObjects()); set.add(this.queryStartObject); // HashSet set = this.getQueryObjects(this.queryStartObject); // for(int i = 0 ; i < resultView.size(); i++) // set.add(((DataElement)resultView.get(i)).getTable()); System.out.println("Set : "+ set.toString()); query.append("\nFROM "); query.append(this.formFromString(set)); /** * Forming WHERE part of the query */ query.append("\nWHERE "); String joinConditionString = this.getJoinConditionString(set); query.append(joinConditionString); String whereConditionsString = whereConditions.getString(tableSufix); if(whereConditionsString !=null) { if(joinConditionString!=null && joinConditionString.length()!=0) { query.append(" "+Operator.AND); } query.append(whereConditionsString); } return query.toString(); } /** * This method returns set of all objects related to queryStartObject transitively * @param string - Starting object to which all related objects should be found * @return set of all objects related to queryStartObject transitively */ private HashSet getQueryObjects(String queryStartObject) { HashSet set = new HashSet(); set.add(queryStartObject); Vector relatedObjectsCollection = (Vector)Client.relations.get(queryStartObject); if(relatedObjectsCollection == null) { return set; } set.addAll(relatedObjectsCollection); for(int i=0;i<relatedObjectsCollection.size();i++) { set.addAll(getQueryObjects((String) relatedObjectsCollection.get(i))); } // while(queryStartObject != null) // set.add(queryStartObject); // queryStartObject = (String) Client.relations.get(queryStartObject); return set; } /** * This method returns the Join Conditions string that joins all the tables/objects in the set. * In case its a subquery relation with the superquery is also appended in this string * @param set - objects in the query * @return - string containing all join conditions */ private String getJoinConditionString( final HashSet set) { StringBuffer joinConditionString =new StringBuffer(); Object[] tablesArray = set.toArray(); RelationCondition relationCondition = null; //If subquery then join with the superquery if(tableSufix>1) { relationCondition=(RelationCondition) Client.relationConditionsForRelatedTables.get(new Relation(this.parentOfQueryStartObject,this.queryStartObject)); if(relationCondition !=null) { joinConditionString.append(" "+relationCondition.getRightDataElement().toSQLString(tableSufix)); joinConditionString.append(Operator.EQUAL); joinConditionString.append(relationCondition.getRightDataElement().toSQLString(tableSufix-1)+" "); } } //For all permutations of tables find the joining conditions for(int i =0; i< tablesArray.length; i++) { for(int j=i+1; j < tablesArray.length; j++) { relationCondition=(RelationCondition) Client.relationConditionsForRelatedTables.get(new Relation((String)tablesArray[i],(String)tablesArray[j])); if ( relationCondition != null) { System.out.println(tablesArray[i]+" "+tablesArray[j]+" "+relationCondition.toSQLString(tableSufix)); if(joinConditionString.length() != 0) { joinConditionString.append(Operator.AND+" "); } joinConditionString.append(relationCondition.toSQLString(tableSufix)); } else { relationCondition=(RelationCondition) Client.relationConditionsForRelatedTables.get(new Relation((String)tablesArray[j],(String)tablesArray[i])); if ( relationCondition != null) { System.out.println(tablesArray[j]+" "+tablesArray[i]+" "+relationCondition.toSQLString(tableSufix)); if(joinConditionString.length() != 0) { joinConditionString.append(Operator.AND+" "); } joinConditionString.append(relationCondition.toSQLString(tableSufix)); } } } } return joinConditionString.toString(); } /** * This method returns the string of table names in set that forms FROM part of query * which forms the FROM part of the query * @param set - set of tables * @return A comma separated list of the tables in the set */ private String formFromString(final HashSet set) { StringBuffer fromString = new StringBuffer(); Iterator it = set.iterator(); Object tableAlias; while(it.hasNext()) { fromString.append(" "); tableAlias = it.next(); fromString.append(Client.objectTableNames.get(tableAlias)+ " "+ tableAlias+tableSufix+" "); if(it.hasNext()) { fromString.append(","); } } fromString.append(" "); return fromString.toString(); } public int getTableSufix() { return tableSufix; } public void setTableSufix(int tableSufix) { this.tableSufix = tableSufix; } public String getParentOfQueryStartObject() { return parentOfQueryStartObject; } public void setParentOfQueryStartObject(String parentOfQueryStartObject) { this.parentOfQueryStartObject = parentOfQueryStartObject; } /** * @param resultView The resultView to set. */ public void setResultView(Vector resultView) { this.resultView = resultView; } }
package com.malhartech.demos.pi; import com.malhartech.api.ApplicationFactory; import com.malhartech.api.DAG; import com.malhartech.api.Operator.InputPort; import com.malhartech.lib.io.ConsoleOutputOperator; import com.malhartech.lib.io.HttpOutputOperator; import com.malhartech.lib.math.*; import com.malhartech.lib.stream.AbstractAggregator; import com.malhartech.lib.stream.ArrayListAggregator; import com.malhartech.lib.stream.Counter; import com.malhartech.lib.testbench.RandomEventGenerator; import java.net.URI; import org.apache.hadoop.conf.Configuration; /** * * @author Chetan Narsude <chetan@malhar-inc.com> */ public class Calculator implements ApplicationFactory { private final boolean allInline = false; private InputPort<Object> getConsolePort(DAG b, String name) { // output to HTTP server when specified in environment setting String serverAddr = System.getenv("MALHAR_AJAXSERVER_ADDRESS"); if (serverAddr == null) { ConsoleOutputOperator oper = b.addOperator(name, new ConsoleOutputOperator()); oper.setStringFormat(name + ": %s"); return oper.input; } HttpOutputOperator<Object> oper = b.addOperator(name, new HttpOutputOperator<Object>()); URI u = URI.create("http://" + serverAddr + "/channel/" + name); oper.setResourceURL(u); return oper.input; } @Override public DAG getApplication(Configuration conf) { DAG dag = new DAG(conf); /* keep generating random values between 0 and 30000 */ RandomEventGenerator xyGenerator = dag.addOperator("GenerateX", RandomEventGenerator.class); xyGenerator.setMinvalue(0); xyGenerator.setMaxvalue(30000); /* calculate square of each of the values it receives */ SquareCalculus squareOperator = dag.addOperator("SquareX", SquareCalculus.class); /* pair the consecutive values */ AbstractAggregator<Integer> pairOperator = dag.addOperator("PairXY", new ArrayListAggregator<Integer>()); pairOperator.setSize(2); Sigma<Integer> sumOperator = dag.addOperator("SumXY", new Sigma<Integer>()); LogicalCompareToConstant<Integer> comparator = dag.addOperator("AnalyzeLocation", new LogicalCompareToConstant<Integer>()); comparator.setConstant(30000 * 30000); Counter inCircle = dag.addOperator("CountInCircle", Counter.class); Counter inSquare = dag.addOperator("CountInSquare", Counter.class); Division division = dag.addOperator("Ratio", Division.class); MultiplyByConstant multiplication = dag.addOperator("InstantPI", MultiplyByConstant.class); multiplication.setMultiplier(4); RunningAverage average = dag.addOperator("AveragePI", new RunningAverage()); dag.addStream("x", xyGenerator.integer_data, squareOperator.input).setInline(allInline); dag.addStream("sqr", squareOperator.integerResult, pairOperator.input).setInline(allInline); dag.addStream("x2andy2", pairOperator.output, sumOperator.input).setInline(allInline); dag.addStream("x2plusy2", sumOperator.integerResult, comparator.input, inSquare.input).setInline(allInline); dag.addStream("inCirclePoints", comparator.greaterThanOrEqualTo, inCircle.input).setInline(allInline); dag.addStream("numerator", inCircle.output, division.numerator).setInline(allInline); dag.addStream("denominator", inSquare.output, division.denominator).setInline(allInline); dag.addStream("ratio", division.doubleQuotient, multiplication.input).setInline(allInline); dag.addStream("instantPi", multiplication.doubleProduct, average.input).setInline(allInline); dag.addStream("averagePi", average.doubleAverage, getConsolePort(dag, "Console")).setInline(allInline); return dag; } }
package de.lmu.ifi.dbs.algorithm.result.clustering; import de.lmu.ifi.dbs.data.RealVector; import de.lmu.ifi.dbs.varianceanalysis.LocalPCA; import java.util.ArrayList; import java.util.List; /** * Provides a hierarchical correlation cluster in an arbitrary subspace * that holds the PCA, the ids of the objects * belonging to this cluster and the children and parents of this cluster. * * @author Elke Achtert (<a href="mailto:achtert@dbs.ifi.lmu.de">achtert@dbs.ifi.lmu.de</a>) */ public class HierarchicalCorrelationCluster extends HierarchicalCluster<HierarchicalCorrelationCluster> { /** * The PCA of this cluster. */ private final LocalPCA pca; /** * The centroid of this cluster. */ private RealVector centroid; /** * Provides a new hierarchical correlation cluster with the * specified parameters. * * @param pca the PCA of this cluster * @param ids the ids of the objects belonging to this cluster * @param label the label of this cluster * @param level the level of this cluster in the graph * @param levelIndex the index of this cluster within the level */ public HierarchicalCorrelationCluster(LocalPCA pca, List<Integer> ids, String label, int level, int levelIndex) { this(pca, ids, new ArrayList<HierarchicalCorrelationCluster>(), new ArrayList<HierarchicalCorrelationCluster>(), label, level, levelIndex); } /** * Provides a hierarchical correlation cluster in an arbitrary subspace * that holds the basis vectors of this cluster, the similarity matrix for * distance computations, the ids of the objects * belonging to this cluster and the children and parents of this cluster. * * @param pca the PCA of this cluster * @param ids the ids of the objects belonging to this cluster * @param children the list of children of this cluster * @param parents the list of parents of this cluster * @param label the label of this cluster * @param level the level of this cluster in the graph * @param levelIndex the index of this cluster within the level */ public HierarchicalCorrelationCluster(LocalPCA pca, List<Integer> ids, List<HierarchicalCorrelationCluster> children, List<HierarchicalCorrelationCluster> parents, String label, int level, int levelIndex) { super(ids, children, parents, label, level, levelIndex); this.pca = pca; } /** * Returns the PCA of this cluster. * * @return the PCA of this cluster */ public LocalPCA getPCA() { return pca; } /** * Returns a hash code value for this cluster. * * @return a hash code value for this cluster */ public int hashCode() { return pca.hashCode(); } /** * Sets the centroid of this cluster. * * @param centroid the centroid to be set */ public void setCentroid(RealVector centroid) { this.centroid = centroid; } /** * Returns the centroid of this cluster. * * @return the centroid of this clusterx */ public RealVector getCentroid() { return centroid; } }
package de.uni.freiburg.iig.telematik.wolfgang.editor.properties; public enum WolfgangProperty { ICON_SIZE, DEFAULT_PLACE_SIZE, DEFAULT_TRANSITION_WIDTH, DEFAULT_TRANSITION_HEIGHT, DEFAULT_TOKEN_SIZE, DEFAULT_TOKEN_DISTANCE, DEFAULT_VERTICAL_LABEL_OFFSET, DEFAULT_HORIZONTAL_LABEL_OFFSET, DEFAULT_LABEL_BG_COLOR, DEFAULT_LABEL_LINE_COLOR, DEFAULT_PLACE_COLOR, DEFAULT_TRANSITION_COLOR, DEFAULT_LINE_COLOR, DEFAULT_GRADIENT_COLOR, DEFAULT_GRADIENT_DIRECTION, DEFAULT_FONT_FAMILY, DEFAULT_FONT_SIZE, DEFAULT_ZOOM_STEP, BACKGROUD_COLOR, GRID_SIZE, GRID_COLOR, GRID_VISIBILITY, SNAP_TO_GRID, SHOW_UPDATE_NOTIFICATION, SHOW_FILE_EXTENSION_ASSOCIATION, REQUIRE_NET_TYPE, PN_VALIDATION; }
package fr.adrienbrault.idea.symfony2plugin.config.yaml; import com.intellij.codeInsight.completion.*; import com.intellij.patterns.PlatformPatterns; import com.intellij.patterns.StandardPatterns; import com.intellij.psi.PsiElement; import com.intellij.util.ProcessingContext; import fr.adrienbrault.idea.symfony2plugin.Symfony2ProjectComponent; import fr.adrienbrault.idea.symfony2plugin.dic.ServiceStringLookupElement; import org.jetbrains.annotations.NotNull; import org.jetbrains.yaml.YAMLLanguage; import org.jetbrains.yaml.YAMLTokenTypes; import com.intellij.codeInsight.completion.CompletionContributor; import java.util.Map; public class YamlCompletionContributor extends CompletionContributor { public YamlCompletionContributor() { extend( CompletionType.BASIC, // @TODO: look if we can filter more here PlatformPatterns.psiElement(YAMLTokenTypes.TEXT).withText(StandardPatterns.string().startsWith("@")).withLanguage(YAMLLanguage.INSTANCE), new CompletionProvider<CompletionParameters>() { public void addCompletions(@NotNull CompletionParameters parameters, ProcessingContext context, @NotNull CompletionResultSet resultSet) { PsiElement element = parameters.getOriginalPosition(); if(element == null) { return; } Symfony2ProjectComponent symfony2ProjectComponent = element.getProject().getComponent(Symfony2ProjectComponent.class); Map<String,String> map = symfony2ProjectComponent.getServicesMap().getMap(); for( Map.Entry<String, String> entry: map.entrySet() ) { resultSet.addElement( new ServiceStringLookupElement(entry.getKey(), entry.getValue()) ); } } } ); } }
package ch.dockergarten.randomuser.business.user.boundary; import ch.dockergarten.randomuser.business.user.control.UserService; import ch.dockergarten.randomuser.business.user.entity.User; import javax.inject.Inject; import javax.validation.constraints.NotNull; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.NotFoundException; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.net.URI; import java.util.Collection; import java.util.UUID; @Path("users") public class UsersResource { private UserService userService; @Inject public UsersResource(@NotNull final UserService userService) { this.userService = userService; } @POST public Response create(@NotNull final User user, @Context final UriInfo info) { final UUID id = userService.addUser(user); final URI uri = info.getAbsolutePathBuilder().path("/" + id).build(); return Response.created(uri).build(); } @GET public Collection<User> read() { return userService.getAllUsers(); } @GET @Path("{id}") public User read(@PathParam("id") @NotNull final UUID id) { return userService.getUser(id) .orElseThrow(NotFoundException::new); } @PUT @Path("{id}") public User update(@PathParam("id") @NotNull final UUID id, @NotNull final User user) { return userService.updateUser(user.toBuilder().id(id).build()) .orElseThrow(NotFoundException::new); } @DELETE @Path("{id}") public Response delete(@PathParam("id") @NotNull final UUID id) { userService.deleteUser(id) .orElseThrow(NotFoundException::new); return Response.noContent().build(); } }
package com.ait.lienzo.client.core.shape.wires.util; import com.ait.lienzo.client.core.shape.IDestroyable; import com.ait.lienzo.client.core.shape.Layer; import com.ait.lienzo.client.core.shape.Text; import com.ait.lienzo.client.core.shape.wires.WiresConnector; import com.ait.lienzo.client.core.shape.wires.event.WiresConnectorPointsChangedEvent; import com.ait.lienzo.client.core.shape.wires.event.WiresConnectorPointsChangedHandler; import com.ait.tooling.common.api.java.util.function.BiConsumer; import com.ait.tooling.common.api.java.util.function.Consumer; import com.ait.tooling.nativetools.client.event.HandlerRegistrationManager; public class WiresConnectorLabel implements IDestroyable { private final WiresConnector connector; private final HandlerRegistrationManager m_registrationManager; private final Text text; private final BiConsumer<WiresConnector, Text> executor; WiresConnectorLabel(final String text, final WiresConnector connector, final BiConsumer<WiresConnector, Text> executor) { this(new Text(text), connector, executor, new HandlerRegistrationManager()); } WiresConnectorLabel(final Text text, final WiresConnector connector, final BiConsumer<WiresConnector, Text> executor, final HandlerRegistrationManager registrationManager) { this.connector = connector; this.executor = executor; this.m_registrationManager = registrationManager; this.text = text; init(); } public WiresConnectorLabel configure(Consumer<Text> consumer) { consumer.accept(text); refresh(); return this; } public WiresConnectorLabel show() { text.setAlpha(1); refresh(); return this; } public WiresConnectorLabel hide() { text.setAlpha(0); batch(); return this; } public Text getText() { return text; } public boolean isVisible() { return text.getAlpha() > 0; } @Override public void destroy() { m_registrationManager.destroy(); text.removeFromParent(); } private void init() { text.setListening(false); text.setDraggable(false); connector.getGroup().add(text); refresh(); m_registrationManager.register(connector.addWiresConnectorPointsChangedHandler(pointsUpdatedHandler)); } private void refresh() { executor.accept(connector, text); batch(); } private void batch() { final Layer layer = connector.getGroup().getLayer(); if(layer != null){ layer.batch(); } } private final WiresConnectorPointsChangedHandler pointsUpdatedHandler = new WiresConnectorPointsChangedHandler() { @Override public void onPointsChanged(WiresConnectorPointsChangedEvent event) { if (isVisible()) { refresh(); } } }; }
package com.blocklaunch.blwarps.commands.elements; import jersey.repackaged.com.google.common.collect.Lists; import org.spongepowered.api.command.CommandSource; import org.spongepowered.api.command.args.ArgumentParseException; import org.spongepowered.api.command.args.CommandArgs; import org.spongepowered.api.command.args.CommandContext; import org.spongepowered.api.command.args.CommandElement; import org.spongepowered.api.command.spec.CommandExecutor; import org.spongepowered.api.command.spec.CommandSpec; import org.spongepowered.api.text.Text; import java.util.Iterator; import java.util.List; import java.util.Map; public class WarpSubCommandElement extends CommandElement { private Map<List<String>, CommandSpec> subCommands; public WarpSubCommandElement(Map<List<String>, CommandSpec> subCommands, Text key) { super(key); this.subCommands = subCommands; } @Override protected CommandExecutor parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException { String subCommand = args.next(); Iterator<List<String>> keySetIterator = subCommands.keySet().iterator(); while (keySetIterator.hasNext()) { List<String> subCommandAliases = keySetIterator.next(); for (String subCommandAlias : subCommandAliases) { if (subCommandAlias.equalsIgnoreCase(subCommand)) { return subCommands.get(subCommandAliases).getExecutor(); } } } throw args.createError(Text.of("'%s' did not match any subcommands", subCommand)); } @Override public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) { return Lists.newArrayList(); } }
package com.censoredsoftware.Demigods.Engine.Listener; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.inventory.InventoryClickEvent; import com.censoredsoftware.Demigods.Engine.Demigods; import com.censoredsoftware.Demigods.Engine.Object.Ability.AbilityBind; import com.censoredsoftware.Demigods.Engine.Object.Player.PlayerCharacter; import com.censoredsoftware.Demigods.Engine.Object.Player.PlayerWrapper; public class InventoryListener implements Listener { @EventHandler(priority = EventPriority.HIGHEST) private void onInventoryClickEvent(InventoryClickEvent event) { Player player = (Player) event.getWhoClicked(); PlayerCharacter character = PlayerWrapper.getPlayer(player).getCurrent(); // Return if no character exists if(character == null) return; for(AbilityBind bind : character.getMeta().getBinds()) { // Set<Integer> hotBar = Ranges.closed(event.getClickedInventory().getSize(), event.getClickedInventory().getSize() + 9).asSet(DiscreteDomains.integers()); Demigods.message.broadcast("Range: " + event.getInventory().getSize() + ", " + (event.getClickedInventory().getSize() + 9)); Demigods.message.broadcast("Slot: " + (event.getSlot() - 9)); Demigods.message.broadcast("Raw Slot: " + event.getRawSlot()); if(bind.getSlot() == (event.getSlot() - 9)) event.setCancelled(true); } } }
package com.cloudbees.jenkins.plugins.awscredentials; import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.regions.Region; import com.amazonaws.regions.Regions; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2Client; import com.amazonaws.services.ec2.model.DescribeAvailabilityZonesResult; import com.cloudbees.plugins.credentials.CredentialsDescriptor; import com.cloudbees.plugins.credentials.CredentialsScope; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.ProxyConfiguration; import hudson.Util; import hudson.util.FormValidation; import hudson.util.Secret; import jenkins.model.Jenkins; import org.apache.commons.lang.StringUtils; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.QueryParameter; import java.net.HttpURLConnection; public class AWSCredentialsImpl extends BaseAmazonWebServicesCredentials implements AmazonWebServicesCredentials { private final String accessKey; private final Secret secretKey; @DataBoundConstructor public AWSCredentialsImpl(@CheckForNull CredentialsScope scope, @CheckForNull String id, @CheckForNull String accessKey, @CheckForNull String secretKey, @CheckForNull String description) { super(scope, id, description); this.accessKey = Util.fixNull(accessKey); this.secretKey = Secret.fromString(secretKey); } public String getAccessKey() { return accessKey; } public Secret getSecretKey() { return secretKey; } public AWSCredentials getCredentials() { return new BasicAWSCredentials(accessKey, secretKey.getPlainText()); } public void refresh() { // no-op } public String getDisplayName() { return accessKey; } @Extension public static class DescriptorImpl extends CredentialsDescriptor { @Override public String getDisplayName() { return Messages.AWSCredentialsImpl_DisplayName(); } public FormValidation doCheckSecretKey(@QueryParameter("accessKey") final String accessKey, @QueryParameter final String value) { if (StringUtils.isBlank(accessKey) && StringUtils.isBlank(value)) { return FormValidation.ok(); } if (StringUtils.isBlank(accessKey)) { return FormValidation.error(Messages.AWSCredentialsImpl_SpecifyAccessKeyId()); } if (StringUtils.isBlank(value)) { return FormValidation.error(Messages.AWSCredentialsImpl_SpecifySecretAccessKey()); } ProxyConfiguration proxy = Jenkins.getInstance().proxy; ClientConfiguration clientConfiguration = new ClientConfiguration(); if(proxy != null) { clientConfiguration.setProxyHost(proxy.name); clientConfiguration.setProxyPort(proxy.port); clientConfiguration.setProxyUsername(proxy.getUserName()); clientConfiguration.setProxyPassword(proxy.getPassword()); } AmazonEC2 ec2 = new AmazonEC2Client( new BasicAWSCredentials(accessKey, Secret.fromString(value).getPlainText()), clientConfiguration); String region = "us-east-1"; try { DescribeAvailabilityZonesResult zonesResult = ec2.describeAvailabilityZones(); return FormValidation .ok(Messages.AWSCredentialsImpl_CredentialsValidWithAccessToNZones( zonesResult.getAvailabilityZones().size())); } catch (AmazonServiceException e) { if (HttpURLConnection.HTTP_UNAUTHORIZED == e.getStatusCode()) { return FormValidation.warning(Messages.AWSCredentialsImpl_CredentialsInValid(e.getMessage())); } else if (HttpURLConnection.HTTP_FORBIDDEN == e.getStatusCode()) { return FormValidation.ok(Messages.AWSCredentialsImpl_CredentialsValidWithoutAccessToAwsServiceInZone(e.getServiceName(), region, e.getErrorMessage() + " (" + e.getErrorCode() + ")")); } else { return FormValidation.error(e.getMessage()); } } catch (AmazonClientException e) { return FormValidation.error(e.getMessage()); } } } }
package com.ctrip.zeus.service.model.handler.impl; import com.ctrip.zeus.dal.core.*; import com.ctrip.zeus.exceptions.ValidationException; import com.ctrip.zeus.model.entity.Group; import com.ctrip.zeus.model.entity.GroupServer; import com.ctrip.zeus.model.entity.GroupVirtualServer; import com.ctrip.zeus.model.entity.VirtualServer; import com.ctrip.zeus.service.model.PathRewriteParser; import com.ctrip.zeus.service.model.handler.GroupServerValidator; import com.ctrip.zeus.service.model.handler.GroupValidator; import com.ctrip.zeus.service.model.handler.VirtualServerValidator; import com.ctrip.zeus.util.PathUtils; import com.google.common.collect.Sets; import org.springframework.stereotype.Component; import sun.reflect.generics.reflectiveObjects.NotImplementedException; import javax.annotation.Resource; import java.util.*; import java.util.regex.Pattern; @Component("groupModelValidator") public class DefaultGroupValidator implements GroupValidator { @Resource private VirtualServerValidator virtualServerModelValidator; @Resource private GroupServerValidator groupServerModelValidator; @Resource private RGroupVsDao rGroupVsDao; @Resource private RGroupVgDao rGroupVgDao; @Resource private RGroupStatusDao rGroupStatusDao; @Resource private GroupDao groupDao; private final Set<String> pathPrefixModifier = Sets.newHashSet("=", "~", "~*", "^~"); private final String standardSuffix = "($|/|\\?)"; private final String[] standardSuffixIdentifier = new String[]{"$", "/", "\\?"}; private final Pattern basicPathPath = Pattern.compile("^(\\w+\\/?)+(\\$|\\\\\\?)?"); @Override public boolean exists(Long targetId) throws Exception { return groupDao.findById(targetId, GroupEntity.READSET_FULL) != null && rGroupVgDao.findByGroup(targetId, RGroupVgEntity.READSET_FULL) == null; } @Override public void validate(Group target) throws Exception { validate(target, false); } @Override public void checkVersion(Group target) throws Exception { GroupDo check = groupDao.findById(target.getId(), GroupEntity.READSET_FULL); if (check == null) throw new ValidationException("Group with id " + target.getId() + " does not exists."); if (!target.getVersion().equals(check.getVersion())) throw new ValidationException("Newer Group version is detected."); } @Override public void removable(Long targetId) throws Exception { RelGroupStatusDo check = rGroupStatusDao.findByGroup(targetId, RGroupStatusEntity.READSET_FULL); if (check.getOnlineVersion() != 0) { throw new ValidationException("Group must be deactivated before deletion."); } } @Override public void validate(Group target, boolean escapePathValidation) throws Exception { if (target.getName() == null || target.getName().isEmpty() || target.getAppId() == null || target.getAppId().isEmpty()) { throw new ValidationException("Group name and app id are required."); } if (target.getHealthCheck() != null) { if (target.getHealthCheck().getUri() == null || target.getHealthCheck().getUri().isEmpty()) throw new ValidationException("Health check path cannot be empty."); } validateGroupVirtualServers(target.getId(), target.getGroupVirtualServers(), escapePathValidation); validateGroupServers(target.getGroupServers()); } @Override public void validateGroupVirtualServers(Long groupId, List<GroupVirtualServer> groupVirtualServers, boolean escapePathValidation) throws Exception { if (groupVirtualServers == null || groupVirtualServers.size() == 0) throw new ValidationException("No virtual server is found bound to this group."); if (groupId == null) groupId = 0L; GroupVirtualServer dummy = new GroupVirtualServer(); Map<Long, GroupVirtualServer> addingGvs = new HashMap<>(); for (GroupVirtualServer gvs : groupVirtualServers) { if (gvs.getRewrite() != null && !gvs.getRewrite().isEmpty()) { if (!PathRewriteParser.validate(gvs.getRewrite())) { throw new ValidationException("Invalid rewrite value."); } } VirtualServer vs = gvs.getVirtualServer(); if (!virtualServerModelValidator.exists(vs.getId())) { throw new ValidationException("Virtual server with id " + vs.getId() + " does not exist."); } if (addingGvs.containsKey(vs.getId())) { throw new ValidationException("Group and virtual server is an unique combination."); } else { addingGvs.put(vs.getId(), dummy); } if (!escapePathValidation) { doPathValidationAndMapping(addingGvs, gvs); } } if (escapePathValidation || addingGvs.size() == 0) return; List<RelGroupVsDo> retainedGvs = rGroupVsDao.findAllByVses(addingGvs.keySet().toArray(new Long[addingGvs.size()]), RGroupVsEntity.READSET_FULL); checkPathOverlappingAcrossVs(groupId, addingGvs, retainedGvs); // reset priority after auto reorder enabled(priority is originally null) for (GroupVirtualServer e : groupVirtualServers) { Integer ref = addingGvs.get(e.getVirtualServer().getId()).getPriority(); if (e.getPriority() == null) { e.setPriority(ref); } else if (e.getPriority().intValue() != ref.intValue()) { throw new ValidationException("Potential path overlapping problem exists at vs-" + e.getVirtualServer().getId() + ". Recommend priority is " + ref + "."); } } } @Override public void validateGroupServers(List<GroupServer> groupServers) throws Exception { groupServerModelValidator.validateGroupServers(groupServers); } private void doPathValidationAndMapping(Map<Long, GroupVirtualServer> mappingResult, GroupVirtualServer gvs) throws ValidationException { if (gvs.getPath() == null || gvs.getPath().isEmpty()) { throw new ValidationException("Path cannot be empty."); } List<String> pathValues = new ArrayList<>(2); for (String pv : gvs.getPath().split(" ", 0)) { if (pv.isEmpty()) continue; if (pathValues.size() == 2) throw new ValidationException("Invalid path, too many whitespace modifiers is found."); pathValues.add(pv); } if (pathValues.size() == 2) { if (!pathPrefixModifier.contains(pathValues.get(0))) { throw new ValidationException("Invalid path, invalid prefix modifier is found."); } // format path value gvs.setPath(pathValues.get(0) + " " + pathValues.get(1)); } String path = extractValue(gvs.getPath()); if ("/".equals(path)) { mappingResult.put(gvs.getVirtualServer().getId(), new GroupVirtualServer().setPath(gvs.getPath()).setPriority(gvs.getPriority() == null ? -1000 : gvs.getPriority())); } else { mappingResult.put(gvs.getVirtualServer().getId(), new GroupVirtualServer().setPath(path).setPriority(gvs.getPriority() == null ? 1000 : gvs.getPriority())); } } private void checkPathOverlappingAcrossVs(Long groupId, Map<Long, GroupVirtualServer> addingGvs, List<RelGroupVsDo> retainedGvs) throws ValidationException { List<RelGroupVsDo> retained = new ArrayList<>(); for (RelGroupVsDo retainedEntry : retainedGvs) { if (groupId.equals(retainedEntry.getGroupId())) continue; if (retainedEntry.getPriority() == 0) retainedEntry.setPriority(1000); String retainedPath = retainedEntry.getPath(); try { retainedPath = extractValue(retainedEntry.getPath()); } catch (ValidationException ex) { } GroupVirtualServer addingEntry = addingGvs.get(retainedEntry.getVsId()); if (addingEntry == null) { throw new ValidationException("Unexpected path validation is reached. Related group and vs: " + groupId + ", " + retainedEntry.getVsId()); } String addingPath = addingEntry.getPath(); try { addingPath = extractValue(addingPath); } catch (ValidationException ex) { } // check if root path is completely equivalent, otherwise escape comparing with root path if ("/".equals(retainedPath) || "/".equals(addingPath)) { if (retainedEntry.getPath().equals(addingEntry.getPath())) { retained.add(retainedEntry); } continue; } List<String> addingPathMembers = regexLevelSplit(addingPath, 1); List<String> retainedPathMembers = regexLevelSplit(retainedPath, 1); if (addingPathMembers.size() == 0) addingPathMembers.add(addingPath); if (retainedPathMembers.size() == 0) retainedPathMembers.add(retainedPath); // for (String pathMember : addingPathMembers) { // if (!basicPathPath.matcher(pathMember).matches()) { // throw new ValidationException("Invalid characters are found in sub path " + pathMember + "."); for (String ap : addingPathMembers) { for (String rp : retainedPathMembers) { int ol = PathUtils.prefixOverlapped(ap, rp, standardSuffix); switch (ol) { case -1: break; case 0: retained.add(retainedEntry); break; case 1: if (addingEntry.getPriority() == null || addingEntry.getPriority() <= retainedEntry.getPriority()) { addingEntry.setPriority(retainedEntry.getPriority() + 100); } break; case 2: if (addingEntry.getPriority() == null || addingEntry.getPriority() >= retainedEntry.getPriority()) { addingEntry.setPriority(retainedEntry.getPriority() - 100); } break; default: throw new NotImplementedException(); } } } } if (retained.size() > 0) { StringBuilder sb = new StringBuilder(); for (RelGroupVsDo d : retained) { sb.append(d.getVsId() + "(" + d.getPath() + ")"); } throw new ValidationException("Path is prefix-overlapped across virtual server " + sb.toString() + "."); } } // expose api for testing public static String extractValue(String path) throws ValidationException { int idxPrefix = 0; int idxModifier = 0; boolean quote = false; char[] pathArray = path.toCharArray(); for (char c : pathArray) { if (c == '"') { quote = true; idxPrefix++; } else if (c == ' ') { idxPrefix++; idxModifier = idxPrefix; } else if (c == '^' || c == '~' || c == '=' || c == '*') { idxPrefix++; } else if (c == '/') { idxPrefix++; if (!quote && idxPrefix < pathArray.length && pathArray[idxPrefix] == '"') { quote = true; idxPrefix++; } break; } else { break; } } if (quote && !path.endsWith("\"")) { throw new ValidationException("Path should end up with quote if regex quotation is used. Path=" + path + "."); } int idxSuffix = quote ? path.length() - 1 : path.length(); if (idxPrefix == idxSuffix) { if (path.charAt(idxSuffix - 1) == '/') { return "/"; } else { throw new ValidationException("Path could not be validated. Path=" + path + "."); } } idxPrefix = idxPrefix < idxSuffix ? (idxModifier > idxPrefix ? idxModifier : idxPrefix) : idxModifier; return path.substring(idxPrefix, idxSuffix); } private List<String> restrictAndDecorate(String path, boolean appendSuffix) throws ValidationException { if (path == null || path.isEmpty()) throw new ValidationException("Get empty path when trying to decorate."); List<String> subPaths = new ArrayList<>(); StringBuilder pb = new StringBuilder(); char[] pp = path.toCharArray(); int startIdx, endIdx; startIdx = 0; endIdx = pp.length - 1; if (pp[startIdx] == '(' && pp[endIdx] == ')') { startIdx++; endIdx } for (int i = startIdx; i <= endIdx; i++) { switch (pp[i]) { case '|': if (appendSuffix) { String p = pb.toString(); pb.setLength(0); for (String s : standardSuffixIdentifier) { subPaths.add(p + s); } } break; default: pb.append(pp[i]); break; } } if (pb.length() > 0) { String p = pb.toString(); pb.setLength(0); if (appendSuffix) { for (String s : standardSuffixIdentifier) { subPaths.add(p + s); } } else { subPaths.add(p); } } if (subPaths.size() == 0) { for (String s : standardSuffixIdentifier) { subPaths.add(path + s); } } return subPaths; } public List<String> regexLevelSplit(String path, int depth) throws ValidationException { List<String> pathMembers = new ArrayList<>(); if (depth > 1) { throw new ValidationException("Function regexLevelSplit only support first level split."); } int fromIdx, idxSuffix; fromIdx = idxSuffix = 0; while ((idxSuffix = path.indexOf(standardSuffix, fromIdx)) != -1) { if (fromIdx > 0) { if (path.charAt(fromIdx) == '|') { fromIdx++; pathMembers.addAll(restrictAndDecorate(path.substring(fromIdx, idxSuffix), true)); } else { String prev = pathMembers.get(pathMembers.size() - 1); List<String> subPaths = restrictAndDecorate(prev + path.substring(fromIdx, idxSuffix + 8), true); pathMembers.set(pathMembers.size() - 1, subPaths.get(0)); for (int i = 1; i < subPaths.size(); i++) { pathMembers.add(pathMembers.get(i)); } } } else { pathMembers.addAll(restrictAndDecorate(path.substring(0, idxSuffix), true)); } fromIdx = idxSuffix + 8; } if (pathMembers.size() == 0) { pathMembers.addAll(restrictAndDecorate(path, false)); } return pathMembers; } }
package com.felhr.usbmassstorageforandroid.filesystems.fat32; import android.hardware.usb.UsbDevice; import android.hardware.usb.UsbDeviceConnection; import android.util.Log; import com.felhr.usbmassstorageforandroid.filesystems.MasterBootRecord; import com.felhr.usbmassstorageforandroid.filesystems.Partition; import com.felhr.usbmassstorageforandroid.scsi.SCSICommunicator; import com.felhr.usbmassstorageforandroid.scsi.SCSIInterface; import com.felhr.usbmassstorageforandroid.scsi.SCSIRead10Response; import com.felhr.usbmassstorageforandroid.scsi.SCSIResponse; import com.felhr.usbmassstorageforandroid.utilities.HexUtil; import com.felhr.usbmassstorageforandroid.utilities.UnsignedUtil; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; public class FATHandler { private SCSICommunicator comm; private final Object monitor; private SCSIResponse currentResponse; private boolean currentStatus; private AtomicBoolean waiting; private MasterBootRecord mbr; //Mounted Partition private Partition partition; private ReservedRegion reservedRegion; private Path path; public FATHandler(UsbDevice mDevice, UsbDeviceConnection mConnection) { this.comm = new SCSICommunicator(mDevice, mConnection); this.comm.openSCSICommunicator(scsiInterface); this.monitor = new Object(); this.path = new Path(); this.waiting = new AtomicBoolean(true); } public boolean mount(int partitionIndex) { testUnitReady(); if(currentStatus) mbr = getMbr(); else return false; if(mbr.getPartitions().length >= partitionIndex + 1) { partition = mbr.getPartitions()[partitionIndex]; reservedRegion = getReservedRegion(); List<Long> clustersRoot = getClusterChain(2); byte[] data = readClusters(clustersRoot); path.setDirectoryContent(getFileEntries(data)); return true; }else { return false; } } public boolean unMount() { return preventAllowRemoval(false); } public List<FileEntry> list() { return path.getDirectoryContent(); } public List<FileEntry> getPath() { return path.getAbsolutePath(); } public boolean changeDir(String directoryName) { Iterator<FileEntry> e = path.getDirectoryContent().iterator(); while(e.hasNext()) { FileEntry entry = e.next(); String name; if(!entry.getLongName().equals("")) name = entry.getLongName(); else name = entry.getShortName(); if(name.equalsIgnoreCase(directoryName) && entry.isDirectory()) { path.addDirectory(entry); long firstCluster = entry.getFirstCluster(); List<Long> clusterChain = getClusterChain(firstCluster); byte[] data = readClusters(clusterChain); path.clearDirectoryContent(); path.setDirectoryContent(getFileEntries(data)); return true; } } return false; } public boolean changeDirBack() { FileEntry currentEntry = path.getCurrentDirectory(); if(currentEntry != null) { path.clearDirectoryContent(); if(path.deleteLastDir()) { if(!path.isRoot()) { FileEntry backEntry = path.getCurrentDirectory(); long firstCluster = backEntry.getFirstCluster(); List<Long> clusterChain = getClusterChain(firstCluster); byte[] data = readClusters(clusterChain); path.setDirectoryContent(getFileEntries(data)); return true; }else { List<Long> clustersRoot = getClusterChain(2); byte[] data = readClusters(clustersRoot); path.setDirectoryContent(getFileEntries(data)); return true; } }else { //You are in root directory, no back dir to go!! return false; } }else { return false; } } public byte[] readFile(String fileName) { Iterator<FileEntry> e = path.getDirectoryContent().iterator(); while(e.hasNext()) { FileEntry entry = e.next(); String name; if(!entry.getLongName().equals("")) name = entry.getLongName(); else name = entry.getShortName(); if(name.equalsIgnoreCase(fileName) && !entry.isDirectory()) { long firstCluster = entry.getFirstCluster(); List<Long> clusterChain = getClusterChain(firstCluster); byte[] data = readClusters(clusterChain); return Arrays.copyOf(data, (int) entry.getSize()); } } return null; } /* Write a file in the current Path TODO: Another possible prototype would be: public boolean writeNewFile(java.io.File file); */ public boolean writeNewFile(String fileName, byte[] data, boolean isRead, boolean isHidden, boolean isdirectory, long lastModified) { // Get clusterchain of the current folder List<Long> clusterChain; if(!path.isRoot()) { FileEntry dir = path.getCurrentDirectory(); clusterChain = getClusterChain(dir.getFirstCluster()); }else { clusterChain = getClusterChain(2); } // LFN entries required + 1 fileEntry + 1 more if fileName.length() % 11 != 0 int fileEntriesRequired = fileName.length() / 11 + 1; if(fileName.length() % 11 != 0) fileEntriesRequired += 1; // There is no space for a new entry. resize the directory. if(path.getFreeEntries() < fileEntriesRequired) { Log.i("DEBUG", "RESIZE DIR"); long lastCluster = clusterChain.get(clusterChain.size()-1); // TODO: Resize clusterchain method needed } // get dir fileEntries and obtain a valid cluster chain for the new file byte[] dirData = readClusters(clusterChain); List<Long> fileClusterChain = new ArrayList<Long>(); int clusters = (int) (data.length / (reservedRegion.getSectorsPerCluster() * reservedRegion.getBytesPerSector())); if(data.length % (reservedRegion.getSectorsPerCluster() * reservedRegion.getBytesPerSector()) != 0) clusters += 1; fileClusterChain = setClusterChain(clusters); // get a raw FileEntry FileEntry newEntry = FileEntry.getEntry( fileName, fileClusterChain.get(0), data.length, path.getDirectoryContent() , isRead, isHidden, isdirectory, lastModified); byte[] rawFileEntry = newEntry.getRawFileEntry(); // Write fileEntry in dir clusters int index = getFirstFileEntryIndex(dirData); Log.i("RAW_FILE_ENTRY", HexUtil.hexToString(rawFileEntry)); System.arraycopy(rawFileEntry, 0, dirData, index, rawFileEntry.length); // Write file entry writeClusters(clusterChain, dirData); // Write file in return writeClusters(fileClusterChain, data); } private void testUnitReady() { comm.testUnitReady(); waitTillNotification(); } private MasterBootRecord getMbr() { byte[] data = readBytes(0, 1); if(data != null) return MasterBootRecord.parseMbr(data); else return null; } private List<Long> getClusterChain(long cluster) { boolean keepSearching = true; List<Long> clusterChain = new ArrayList<Long>(); clusterChain.add(cluster); while(keepSearching) { long lbaCluster = getEntryLBA(cluster); Log.i("DEBUG", "Cluster LBA: " + String.valueOf(lbaCluster)); byte[] sector = readBytes(lbaCluster, 1); int entrySectorIndex = getEntrySectorIndex(cluster); int[] indexes = getRealIndexes(entrySectorIndex); cluster = UnsignedUtil.convertBytes2Long(sector[indexes[3]], sector[indexes[2]], sector[indexes[1]], sector[indexes[0]]); if(cluster != 0xfffffff) { Log.i("DEBUG", "Cluster: " + String.valueOf(cluster)); clusterChain.add(cluster); }else { keepSearching = false; } } return clusterChain; } /* Set a clusterchain on the FAT Return null if is not possible to get clusterchain */ private List<Long> setClusterChain(int clusters) { List<Long> clusterChainList = new ArrayList<Long>(); long[] lbaChain = new long[clusters]; int[] entries = new int[clusters]; // 0-127 range int i = 0; // index for clusterchain long lbaFatStart = getEntryLBA(0); long lbaIndex = lbaFatStart; long lbaFatEnd = lbaFatStart + reservedRegion.getNumberSectorsPerFat(); boolean keep = true; while(keep) { byte[] data = readBytes(lbaIndex, 1); for(int indexEntry=0;indexEntry<=127;indexEntry++) { int[] indexes = getRealIndexes(indexEntry); long value = UnsignedUtil.convertBytes2Long(data[indexes[3]], data[indexes[2]], data[indexes[1]], data[indexes[0]]); if(value == 0x0000000) { long clusterEntry = getFatEntryFromLBA(lbaIndex, indexes[0]); Log.i("CLUSTER_CHAIN", String.valueOf(clusterEntry)); clusterChainList.add(clusterEntry); lbaChain[i] = lbaIndex; entries[i] = indexEntry; if(++i == clusters) // All empty clusters has been located. Set the clusterchain { for(int j=0;j<=clusters-1;j++) { long lba = lbaChain[j]; byte[] data2 = readBytes(lba, 1); long nextCluster; if(j < clusters-1) nextCluster = clusterChainList.get(j+1); else nextCluster = 0xfffffff; int fatEntry = entries[j]; // 0-127 range int[] currentIndexes = getRealIndexes(fatEntry); byte[] nextClusterRaw = UnsignedUtil.convertULong2Bytes(nextCluster); data2[currentIndexes[0]] = nextClusterRaw[3]; data2[currentIndexes[1]] = nextClusterRaw[2]; data2[currentIndexes[2]] = nextClusterRaw[1]; data2[currentIndexes[3]] = nextClusterRaw[0]; writeBytes(lba, data2); } keep = false; break; } } } lbaIndex++; if(lbaIndex > lbaFatEnd) return null; } return clusterChainList; } private boolean writeClusters(List<Long> clusters, byte[] data) { int bufferLength = (int) (reservedRegion.getBytesPerSector() * reservedRegion.getSectorsPerCluster()); int k = 0; byte[] buffer = new byte[bufferLength]; long firstClusterLba = partition.getLbaStart() + reservedRegion.getNumberReservedSectors() + (reservedRegion.getFatCopies() * reservedRegion.getNumberSectorsPerFat()); Iterator<Long> e = clusters.iterator(); while(e.hasNext()) { long cluster = e.next(); long lbaCluster = firstClusterLba + (cluster - 2) * reservedRegion.getSectorsPerCluster(); Log.i("CLUSTER_CHAIN", "LBA CLUSTER" + String.valueOf(lbaCluster)); if(k * bufferLength + bufferLength <= data.length) System.arraycopy(data, k * bufferLength, buffer, 0, bufferLength); else System.arraycopy(data, k * bufferLength, buffer, 0, data.length - k * bufferLength); boolean result = writeBytes(lbaCluster, buffer); if(!result) return false; k++; } return true; } private byte[] readClusters(List<Long> clusters) { long firstClusterLba = partition.getLbaStart() + reservedRegion.getNumberReservedSectors() + (reservedRegion.getFatCopies() * reservedRegion.getNumberSectorsPerFat()); int lengthData = clusters.size() * ((int) (reservedRegion.getSectorsPerCluster() * reservedRegion.getBytesPerSector())); byte[] data = new byte[lengthData]; int index = 0; Iterator<Long> e = clusters.iterator(); while(e.hasNext()) { long cluster = e.next(); long lbaCluster = firstClusterLba + (cluster - 2) * reservedRegion.getSectorsPerCluster(); byte[] clusterData = readBytes(lbaCluster, (int) reservedRegion.getSectorsPerCluster()); System.arraycopy(clusterData, 0, data, index, clusterData.length); index += reservedRegion.getSectorsPerCluster() * reservedRegion.getBytesPerSector(); } return data; } private ReservedRegion getReservedRegion() { long lbaPartitionStart = partition.getLbaStart(); byte[] data = readBytes(lbaPartitionStart, 1); if(data != null) return ReservedRegion.getReservedRegion(data); else return null; } private byte[] readBytes(long lba, int length) { comm.read10(0, false, false, false, UnsignedUtil.ulongToInt(lba), 0, length); waitTillNotification(); if(currentStatus) { return ((SCSIRead10Response) currentResponse).getBuffer(); }else { return null; } } private boolean writeBytes(long lba, byte[] data) { int length = data.length / 512; if(data.length % 512 != 0) length += 1; comm.write10(0, false, false, false, UnsignedUtil.ulongToInt(lba), 0, length, data); waitTillNotification(); return currentStatus; } private boolean preventAllowRemoval(boolean prevent) { comm.preventAllowRemoval(0, prevent); waitTillNotification(); return currentStatus; } private int getFirstFileEntryIndex(byte[] data) { int k = 0; boolean keep = true; while(keep) { if(data[k * 32] == 0x00) keep = false; else k++; } return k * 32; } private List<FileEntry> getFileEntries(byte[] data) { int freeEntries = 0; List<FileEntry> entries = new ArrayList<FileEntry>(); List<String> longFileEntryNames = new ArrayList<String>(); int entrySize = 32; byte[] bufferEntry = new byte[entrySize]; int i = 0; int index1 = entrySize * i; while(index1 < data.length) { System.arraycopy(data, index1, bufferEntry, 0, entrySize); Log.i("FILE_ENTRIES", HexUtil.hexToString(bufferEntry)); if((bufferEntry[0] != 0x00 && bufferEntry[0] != (byte) 0xe5) && (bufferEntry[11] == 0x0f || bufferEntry[11] == 0x1f || bufferEntry[11] == 0x2f || bufferEntry[11] == 0x3f)) // LFN Entry { longFileEntryNames.add(parseLFN(bufferEntry)); }else if((bufferEntry[0] != 0x00 && bufferEntry[0] != (byte) 0xe5)) // Normal entry { if(longFileEntryNames != null) // LFN is present { String lfn = ""; int index2 = longFileEntryNames.size() - 1; while(index2 >= 0) { lfn += longFileEntryNames.get(index2); index2 } entries.add(FileEntry.getEntry(lfn, bufferEntry)); longFileEntryNames.clear(); }else // No LFN { entries.add(FileEntry.getEntry(null, bufferEntry)); } }else if(bufferEntry[0] == 0x00) // Free entries batch started. Calculate free entries and break { int freeBytes = data.length - index1; freeEntries = freeBytes / 32 + 1; if(freeEntries % 32 != 0) freeEntries += 1; break; } i++; index1 = entrySize * i; } path.setFreeEntries(freeEntries); return entries; } private String parseLFN(byte[] lfnData) { boolean endChar = false; List<Byte> unicodeList = new ArrayList<Byte>(); if((lfnData[1] != 0x00 || lfnData[2] != 0x00)) { unicodeList.add(lfnData[1]); if(lfnData[2] != 0x00) unicodeList.add(lfnData[2]); }else endChar = true; if((lfnData[3] != 0x00 || lfnData[4] != 0x00) && !endChar) { unicodeList.add(lfnData[3]); if(lfnData[4] != 0x00) unicodeList.add(lfnData[4]); }else endChar = true; if((lfnData[5] != 0x00 || lfnData[6] != 0x00) && !endChar) { unicodeList.add(lfnData[5]); if(lfnData[6] != 0x00) unicodeList.add(lfnData[6]); }else endChar = true; if((lfnData[7] != 0x00 || lfnData[8] != 0x00) && !endChar) { unicodeList.add(lfnData[7]); if(lfnData[8] != 0x00) unicodeList.add(lfnData[8]); }else endChar = true; if((lfnData[9] != 0x00 || lfnData[10] != 0x00) && !endChar) { unicodeList.add(lfnData[9]); if(lfnData[10] != 0x00) unicodeList.add(lfnData[10]); }else endChar = true; if((lfnData[14] != 0x00 || lfnData[15] != 0x00) && !endChar) { unicodeList.add(lfnData[14]); if(lfnData[15] != 0x00) unicodeList.add(lfnData[15]); }else endChar = true; if((lfnData[16] != 0x00 || lfnData[17] != 0x00) && !endChar) { unicodeList.add(lfnData[16]); if(lfnData[17] != 0x00) unicodeList.add(lfnData[17]); }else endChar = true; if((lfnData[18] != 0x00 || lfnData[19] != 0x00) && !endChar) { unicodeList.add(lfnData[18]); if(lfnData[19] != 0x00) unicodeList.add(lfnData[19]); }else endChar = true; if((lfnData[20] != 0x00 || lfnData[21] != 0x00) && !endChar) { unicodeList.add(lfnData[20]); if(lfnData[21] != 0x00) unicodeList.add(lfnData[21]); }else endChar = true; if((lfnData[22] != 0x00 || lfnData[23] != 0x00) && !endChar) { unicodeList.add(lfnData[22]); if(lfnData[23] != 0x00) unicodeList.add(lfnData[23]); }else endChar = true; if((lfnData[24] != 0x00 || lfnData[25] != 0x00) && !endChar) { unicodeList.add(lfnData[24]); if(lfnData[25] != 0x00) unicodeList.add(lfnData[25]); }else endChar = true; if((lfnData[28] != 0x00 || lfnData[29] != 0x00) && !endChar) { unicodeList.add(lfnData[28]); if(lfnData[29] != 0x00) unicodeList.add(lfnData[29]); }else endChar = true; if((lfnData[30] != 0x00 || lfnData[31] != 0x00) && !endChar) { unicodeList.add(lfnData[30]); if(lfnData[31] != 0x00) unicodeList.add(lfnData[31]); } byte[] unicodeBuffer = new byte[unicodeList.size()]; int i = 0; while(i <= unicodeBuffer.length -1) { unicodeBuffer[i] = unicodeList.get(i); i++; } try { return new String(unicodeBuffer, "UTF-8"); }catch (UnsupportedEncodingException e) { e.printStackTrace(); return null; } } private long getEntryLBA(long entry) { long fatLBA = partition.getLbaStart() + reservedRegion.getNumberReservedSectors(); return fatLBA + (entry / 128); } private int getEntrySectorIndex(long entry) // range of returned value: [0-127] { return ((int) (entry - ((entry / 128) * 128))); } private int[] getRealIndexes(int entryBlock) { int[] indexes = new int[4]; int value = 4 * entryBlock; indexes[0] = value; indexes[1] = value + 1; indexes[2] = value + 2; indexes[3] = value + 3; return indexes; } private long getFatEntryFromLBA(long lba, int index) { return (lba - getEntryLBA(0)) * 128 + (index / 4); } private void waitTillNotification() { synchronized(monitor) { while(waiting.get()) { try { monitor.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } waiting.set(true); } } private void scsiSuccessNotification() { synchronized(monitor) { waiting.set(false); monitor.notify(); } } private SCSIInterface scsiInterface = new SCSIInterface() { @Override public void onSCSIOperationCompleted(int status, int dataResidue) { if(status == 0) { currentStatus = true; scsiSuccessNotification(); }else { currentStatus = false; scsiSuccessNotification(); } } @Override public void onSCSIDataReceived(SCSIResponse response) { currentResponse = response; } @Override public void onSCSIOperationStarted(boolean status) { } }; }
package com.gurkensalat.calendar.perrypedia.releasecalendar; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.joda.time.DateTime; import org.mediawiki.xml.export_0.MediaWikiType; import org.mediawiki.xml.export_0.PageType; import org.mediawiki.xml.export_0.RevisionType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.orm.jpa.EntityScan; import org.springframework.context.annotation.Bean; import org.springframework.core.env.Environment; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.Unmarshaller; import javax.xml.transform.stream.StreamSource; import java.io.StringReader; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; @SpringBootApplication @EntityScan @EnableJpaRepositories public class Application { private static final Logger logger = LoggerFactory.getLogger(Application.class); private static final String MACRO_PREFIX_ROMAN = "{{Roman"; private static final String MACRO_PREFIX_HANDLUNGSZUSAMMENFASSUNG = "{{Handlungszusammenfassung"; private static final String MACRO_POSTFIX = "}}"; @Autowired private Environment environment; @Autowired private WikiPageRepository wikiPageRepository; public static void main(String[] args) { SpringApplication.run(Application.class); } @Bean public CommandLineRunner work() throws Exception { // first, calculate which issues we need to check DateTime start = DateTime.now().minusDays(7).withMillisOfDay(0); DateTime end = DateTime.now().plusDays(60).withMillisOfDay(0); List<Issue> issuesToCheck = new ArrayList<Issue>(); // check Perry Rhodan Classic first issuesToCheck.addAll(calculateIssues(new PerryRhodanSeries(), 2838, 2860, start, end)); // check Perry Rhodan NEO next issuesToCheck.addAll(calculateIssues(new PerryRhodanNeoSeries(), 110, 120, start, end)); // check Perry Rhodan NEO Story next issuesToCheck.addAll(calculateIssues(new PerryRhodanNeoStorySeries(), 1, 12, start, end)); // check Perry Rhodan Arkon next issuesToCheck.addAll(calculateIssues(new PerryRhodanArkonSeries(), 1, 12, start, end)); // Now, to the Perrypedia checks... for (Issue issue : issuesToCheck) { checkIssueOnPerryPedia(issue); } return null; } private List<Issue> calculateIssues(Series series, int startIssue, int endIssue, DateTime start, DateTime end) { List<Issue> result = new ArrayList<Issue>(); for (int i = startIssue; i < endIssue; i++) { DateTime issueDate = series.getIssueReleaseDate(i); if (issueDate != null) { if (start.isBefore(issueDate)) { if (end.isAfter(issueDate)) { Issue issue = new Issue(series, i); result.add(issue); } } } } return result; } private void checkIssueOnPerryPedia(Issue issue) { logger.info("Have to check issue {}", issue); WikiPage wikiPage = findFirstWikiPage(issue); // logger.debug(" Wiki Page is {}", wikiPage); if (wikiPage == null) { wikiPage = new WikiPage(); wikiPage.setSeriesPrefix(issue.getSeries().getSourcePrefix()); wikiPage.setIssueNumber(issue.getNumber()); } // logger.debug(" before save {}", wikiPage); wikiPage = wikiPageRepository.save(wikiPage); // logger.debug(" after save {}", wikiPage); if (!(WikiPage.getVALID().equals(wikiPage.getSourcePageValid()))) { try { wikiPage.setSourcePageTitle("Quelle:" + issue.getSeries().getSourcePrefix() + issue.getNumber()); wikiPage = wikiPageRepository.save(wikiPage); MediaWikiType mwt = downloadAndDecode(wikiPage.getSourcePageTitle()); if ((mwt.getPage() != null) && (mwt.getPage().size() > 0)) { PageType page = mwt.getPage().get(0); logger.info(" page: {}", page); logger.info(" id: {}", page.getId()); logger.info(" title: {}", page.getTitle()); logger.info(" redir: {}", page.getRedirect().getTitle()); wikiPage.setSourcePageId(page.getId().toString()); wikiPage.setFullPageTitle(page.getRedirect().getTitle()); if (StringUtils.isNotEmpty(wikiPage.getSourcePageId()) && StringUtils.isNotEmpty(wikiPage.getSourcePageTitle())) { if (StringUtils.isNotEmpty(wikiPage.getFullPageTitle())) { wikiPage.setSourcePageValid(WikiPage.getVALID()); } } wikiPage = wikiPageRepository.save(wikiPage); } } catch (Exception e) { logger.error("While loading 'Quelle' page", e); } } if (WikiPage.getVALID().equals((wikiPage.getSourcePageValid()))) { if (!(WikiPage.getVALID().equals(wikiPage.getFullPageValid()))) { try { MediaWikiType mwt = downloadAndDecode(wikiPage.getFullPageTitle()); if ((mwt.getPage() != null) && (mwt.getPage().size() > 0)) { PageType page = mwt.getPage().get(0); logger.info(" page: {}", page); logger.info(" id: {}", page.getId()); wikiPage.setFullPageId(page.getId().toString()); wikiPage.setFullPageTitle(page.getTitle()); wikiPage.setFullPageText(null); if (StringUtils.isNotEmpty(wikiPage.getFullPageId()) && StringUtils.isNotEmpty(wikiPage.getFullPageId())) { if ((page.getRevisionOrUpload() != null) && (page.getRevisionOrUpload().size() > 0)) { RevisionType revision = (RevisionType) page.getRevisionOrUpload().get(0); String text = revision.getText().getValue(); int startMacroPrefixRoman = text.indexOf(MACRO_PREFIX_ROMAN); if (startMacroPrefixRoman > -1) { text = text.substring(startMacroPrefixRoman); int startMacroPostfix = text.indexOf(MACRO_POSTFIX); if (startMacroPostfix > -1) { text = text.substring(0, startMacroPostfix); wikiPage.setFullPageValid(wikiPage.getValid()); wikiPage.setFullPageText(text); } } int startMacroPrefixNeo = text.indexOf(MACRO_PREFIX_HANDLUNGSZUSAMMENFASSUNG); if (startMacroPrefixNeo > -1) { text = text.substring(startMacroPrefixNeo); int startMacroPostfix = text.indexOf(MACRO_POSTFIX); if (startMacroPostfix > -1) { text = text.substring(0, startMacroPostfix); wikiPage.setFullPageValid(wikiPage.getValid()); wikiPage.setFullPageText(text); } } } } // if (StringUtils.isNotEmpty(wikiPage.getFullPageText())) // wikiPage.setFullPageValid(wikiPage.getVALID()); wikiPage = wikiPageRepository.save(wikiPage); } } catch (Exception e) { logger.error("While loading full page", e); } } } String wikiPageAsString = ToStringBuilder.reflectionToString(wikiPage, ToStringStyle.MULTI_LINE_STYLE); logger.info("wikiPage is {}", wikiPageAsString); } private WikiPage findFirstWikiPage(Issue issue) { WikiPage wikiPage = null; List<WikiPage> wikiPages = wikiPageRepository.findBySeriesPrefixAndIssueNumber(issue.getSeries().getSourcePrefix(), issue.getNumber()); if ((wikiPages != null) && (wikiPages.size() > 0)) { wikiPage = wikiPages.get(0); } return wikiPage; } // @Bean public CommandLineRunner seriesCalculator() throws Exception { logger.info("seriesCalculator method called..."); Series classic = new PerryRhodanSeries(); logger.info("Series {}", classic); Series neo = new PerryRhodanNeoSeries(); logger.info("Series {}", neo); Series neoStory = new PerryRhodanNeoStorySeries(); logger.info("Series {}", neoStory); Series arkon = new PerryRhodanArkonSeries(); logger.info("Series {}", arkon); return null; } private MediaWikiType downloadAndDecode(String pageName) throws Exception { logger.debug("downloadAndDecode '{}'", pageName); // --data 'catname=&pages=Quelle:PRN111&curonly=1&wpDownload=1' // --data 'catname=&pages=Seid+ihr+wahres+Leben%3F&curonly=1&wpDownload=1' // --data 'catname=&pages=Leticrons+S%C3%A4ule&curonly=1&wpDownload=1' // GEHT NICHT... // --data 'catname=&pages=Leticrons+S%E4ule&curonly=1&wpDownload=1' final String EXPORT_URL = "http: CloseableHttpClient httpclient = HttpClients.createDefault(); // HttpGet httpGet = new HttpGet(url); HttpPost httpPost = new HttpPost(EXPORT_URL); List<NameValuePair> params = new ArrayList<NameValuePair>(); params.add(new BasicNameValuePair("catname", "")); params.add(new BasicNameValuePair("pages", pageName)); params.add(new BasicNameValuePair("curonly", "1")); params.add(new BasicNameValuePair("wpDownload", "1")); httpPost.setEntity(new UrlEncodedFormEntity(params, Charset.forName("UTF-8"))); CloseableHttpResponse response1 = httpclient.execute(httpPost); MediaWikiType mwt = null; try { logger.debug("{}", response1.getStatusLine()); HttpEntity entity1 = response1.getEntity(); // logger.debug("{}", entity1.getContent()); // do something useful with the response body String data = EntityUtils.toString(entity1); // logger.debug("{}", data); JAXBContext jaxbContext = JAXBContext.newInstance(MediaWikiType.class); Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); StreamSource source = new StreamSource(new StringReader(data)); JAXBElement<MediaWikiType> userElement = unmarshaller.unmarshal(source, MediaWikiType.class); mwt = userElement.getValue(); // logger.debug("Parsed Data: {}", mwt); // for (PageType page : mwt.getPage()) // logger.debug(" page: {}", page); // logger.debug(" id: {}", page.getId()); // logger.debug(" title: {}", page.getTitle()); // logger.debug(" redir: {}", page.getRedirect().getTitle()); // and ensure it is fully consumed EntityUtils.consume(entity1); } finally { response1.close(); } return mwt; } }
package com.gurkensalat.calendar.perrypedia.releasecalendar; import biweekly.component.VEvent; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.apache.http.util.VersionInfo; import org.joda.time.DateTime; import org.mediawiki.xml.export_0.MediaWikiType; import org.mediawiki.xml.export_0.PageType; import org.mediawiki.xml.export_0.RevisionType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.orm.jpa.EntityScan; import org.springframework.context.annotation.Bean; import org.springframework.core.env.Environment; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.Unmarshaller; import javax.xml.transform.stream.StreamSource; import java.io.StringReader; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; @SpringBootApplication @EntityScan @EnableJpaRepositories public class Application { private static final Logger logger = LoggerFactory.getLogger(Application.class); private static final String MACRO_PREFIX_ROMAN = "{{Roman"; private static final String MACRO_PREFIX_HANDLUNGSZUSAMMENFASSUNG = "{{Handlungszusammenfassung"; private static final String MACRO_POSTFIX = "}}"; @Autowired private Environment environment; @Autowired private PersistenceContext persistenceContext; @Autowired private WikiPageRepository wikiPageRepository; @Autowired private EventUtil eventUtil; @Autowired private ICalendarUtil iCalendarUtil; @Value("${info.build.artifact}") private String projectArtifact; @Value("${info.build.version}") private String projectVersion; public static void main(String[] args) { SpringApplication.run(Application.class); } @Bean public CommandLineRunner work() throws Exception { // first, calculate which issues we need to check DateTime start = DateTime.now().minusDays(14).withMillisOfDay(0); DateTime end = DateTime.now().plusDays(90).withMillisOfDay(0); List<Issue> issuesToCheck = new ArrayList<Issue>(); // check Perry Rhodan Classic first Series perryRhodanSeries = new PerryRhodanSeries(); Map<String, VEvent> perryRhodanEvents = new TreeMap<String, VEvent>(); issuesToCheck.addAll(calculateIssues(perryRhodanSeries, 2860, 2999, start, end)); // check Perry Rhodan NEO next Series perryRhodanNeoSeries = new PerryRhodanNeoSeries(); Map<String, VEvent> perryRhodanNeoEvents = new TreeMap<String, VEvent>(); issuesToCheck.addAll(calculateIssues(perryRhodanNeoSeries, 140, 180, start, end)); // check Perry Rhodan NEO Story next Series perryRhodanNeoStorySeries = new PerryRhodanNeoStorySeries(); Map<String, VEvent> perryRhodanNeoStoryEvents = new TreeMap<String, VEvent>(); issuesToCheck.addAll(calculateIssues(perryRhodanNeoStorySeries, 1, 12, start, end)); // check Perry Rhodan Arkon next Series perryRhodanArkonSeries = new PerryRhodanArkonSeries(); Map<String, VEvent> perryRhodanArkonEvents = new TreeMap<String, VEvent>(); issuesToCheck.addAll(calculateIssues(perryRhodanArkonSeries, 1, 12, start, end)); // check Perry Rhodan Jupiter next Series perryRhodanJupiterSeries = new PerryRhodanJupiterSeries(); Map<String, VEvent> perryRhodanJupiterEvents = new TreeMap<String, VEvent>(); issuesToCheck.addAll(calculateIssues(perryRhodanJupiterSeries, 1, 12, start, end)); // check Perry Rhodan Terminus next Series perryRhodanTerminusSeries = new PerryRhodanTerminusSeries(); Map<String, VEvent> perryRhodanTerminusEvents = new TreeMap<String, VEvent>(); issuesToCheck.addAll(calculateIssues(perryRhodanTerminusSeries, 1, 12, start, end)); // Now, to the Perrypedia checks... Map<String, VEvent> allEvents = new TreeMap<String, VEvent>(); for (Issue issue : issuesToCheck) { WikiPage wikiPage = checkIssueOnPerryPedia(issue); if (WikiPage.getVALID().equals(wikiPage.getFullPageValid())) { VEvent event = eventUtil.convertToIcalEvent(issue, wikiPage); if (event != null) { String key = issue.getReleaseDate().toString(); key = issue.getSeries().getSourcePrefix() + issue.getNumber(); allEvents.put(key, event); if (perryRhodanSeries.getSourcePrefix().equals(issue.getSeries().getSourcePrefix())) { perryRhodanEvents.put(key, event); } if (perryRhodanNeoSeries.getSourcePrefix().equals(issue.getSeries().getSourcePrefix())) { perryRhodanNeoEvents.put(key, event); } if (perryRhodanNeoStorySeries.getSourcePrefix().equals(issue.getSeries().getSourcePrefix())) { perryRhodanNeoStoryEvents.put(key, event); } if (perryRhodanArkonSeries.getSourcePrefix().equals(issue.getSeries().getSourcePrefix())) { perryRhodanArkonEvents.put(key, event); } if (perryRhodanJupiterSeries.getSourcePrefix().equals(issue.getSeries().getSourcePrefix())) { perryRhodanJupiterEvents.put(key, event); } if (perryRhodanTerminusSeries.getSourcePrefix().equals(issue.getSeries().getSourcePrefix())) { perryRhodanTerminusEvents.put(key, event); } } } } persistenceContext.exportDatabase(); // Finally, create the iCal file iCalendarUtil.saveIcal(allEvents, "All"); iCalendarUtil.saveIcal(perryRhodanEvents, perryRhodanSeries.getSourcePrefix()); iCalendarUtil.saveIcal(perryRhodanNeoEvents, perryRhodanNeoSeries.getSourcePrefix()); iCalendarUtil.saveIcal(perryRhodanNeoStoryEvents, perryRhodanNeoStorySeries.getSourcePrefix()); iCalendarUtil.saveIcal(perryRhodanArkonEvents, perryRhodanArkonSeries.getSourcePrefix()); iCalendarUtil.saveIcal(perryRhodanJupiterEvents, perryRhodanJupiterSeries.getSourcePrefix()); iCalendarUtil.saveIcal(perryRhodanTerminusEvents, perryRhodanTerminusSeries.getSourcePrefix()); return null; } private List<Issue> calculateIssues(Series series, int startIssue, int endIssue, DateTime start, DateTime end) { List<Issue> result = new ArrayList<Issue>(); for (int i = startIssue; i <= endIssue; i++) { DateTime issueDate = series.getIssueReleaseDate(i); if (issueDate != null) { if (start.isBefore(issueDate)) { if (end.isAfter(issueDate)) { Issue issue = new Issue(series, i); result.add(issue); } } } } return result; } private WikiPage checkIssueOnPerryPedia(Issue issue) { logger.info("Have to check issue {}", issue); WikiPage wikiPage = findFirstWikiPage(issue); // logger.debug(" Wiki Page is {}", wikiPage); if (wikiPage == null) { wikiPage = new WikiPage(); wikiPage.setSeriesPrefix(issue.getSeries().getSourcePrefix()); wikiPage.setIssueNumber(issue.getNumber()); } // logger.debug(" before save {}", wikiPage); wikiPage = wikiPageRepository.save(wikiPage); // logger.debug(" after save {}", wikiPage); if (!(WikiPage.getVALID().equals(wikiPage.getSourcePageValid()))) { try { wikiPage.setSourcePageTitle("Quelle:" + issue.getSeries().getSourcePrefix() + issue.getNumber()); wikiPage = wikiPageRepository.save(wikiPage); MediaWikiType mwt = downloadAndDecode(wikiPage.getSourcePageTitle()); if ((mwt.getPage() != null) && (mwt.getPage().size() > 0)) { PageType page = mwt.getPage().get(0); logger.info(" page: {}", page); logger.info(" id: {}", page.getId()); logger.info(" title: {}", page.getTitle()); logger.info(" redir: {}", page.getRedirect().getTitle()); wikiPage.setSourcePageId(page.getId().toString()); wikiPage.setFullPageTitle(page.getRedirect().getTitle()); if (StringUtils.isNotEmpty(wikiPage.getSourcePageId()) && StringUtils.isNotEmpty(wikiPage.getSourcePageTitle())) { if (StringUtils.isNotEmpty(wikiPage.getFullPageTitle())) { wikiPage.setSourcePageValid(WikiPage.getVALID()); } } wikiPage = wikiPageRepository.save(wikiPage); } } catch (Exception e) { logger.error("While loading 'Quelle' page", e); } } if (WikiPage.getVALID().equals((wikiPage.getSourcePageValid()))) { if (!(WikiPage.getVALID().equals(wikiPage.getFullPageValid()))) { try { MediaWikiType mwt = downloadAndDecode(wikiPage.getFullPageTitle()); if ((mwt.getPage() != null) && (mwt.getPage().size() > 0)) { PageType page = mwt.getPage().get(0); logger.info(" page: {}", page); logger.info(" id: {}", page.getId()); wikiPage.setFullPageId(page.getId().toString()); wikiPage.setFullPageTitle(page.getTitle()); wikiPage.setFullPageText(null); if (StringUtils.isNotEmpty(wikiPage.getFullPageId()) && StringUtils.isNotEmpty(wikiPage.getFullPageId())) { if ((page.getRevisionOrUpload() != null) && (page.getRevisionOrUpload().size() > 0)) { RevisionType revision = (RevisionType) page.getRevisionOrUpload().get(0); String text = revision.getText().getValue(); int startMacroPrefixRoman = text.indexOf(MACRO_PREFIX_ROMAN); if (startMacroPrefixRoman > -1) { text = text.substring(startMacroPrefixRoman); int startMacroPostfix = text.indexOf(MACRO_POSTFIX); if (startMacroPostfix > -1) { text = text.substring(0, startMacroPostfix); wikiPage.setFullPageValid(WikiPage.getVALID()); wikiPage.setFullPageText(text); } } int startMacroPrefixNeo = text.indexOf(MACRO_PREFIX_HANDLUNGSZUSAMMENFASSUNG); if (startMacroPrefixNeo > -1) { text = text.substring(startMacroPrefixNeo); int startMacroPostfix = text.indexOf(MACRO_POSTFIX); if (startMacroPostfix > -1) { text = text.substring(0, startMacroPostfix); wikiPage.setFullPageValid(WikiPage.getVALID()); wikiPage.setFullPageText(text); } } } } // if (StringUtils.isNotEmpty(wikiPage.getFullPageText())) // wikiPage.setFullPageValid(wikiPage.getVALID()); wikiPage = wikiPageRepository.save(wikiPage); } } catch (Exception e) { logger.error("While loading full page", e); } } } String wikiPageAsString = ToStringBuilder.reflectionToString(wikiPage, ToStringStyle.MULTI_LINE_STYLE); logger.info("wikiPage is {}", wikiPageAsString); return wikiPage; } private WikiPage findFirstWikiPage(Issue issue) { WikiPage wikiPage = null; List<WikiPage> wikiPages = wikiPageRepository.findBySeriesPrefixAndIssueNumber(issue.getSeries().getSourcePrefix(), issue.getNumber()); if ((wikiPages != null) && (wikiPages.size() > 0)) { wikiPage = wikiPages.get(0); } return wikiPage; } private MediaWikiType downloadAndDecode(String pageName) throws Exception { logger.debug("downloadAndDecode '{}'", pageName); // --data 'catname=&pages=Quelle:PRN111&curonly=1&wpDownload=1' // --data 'catname=&pages=Seid+ihr+wahres+Leben%3F&curonly=1&wpDownload=1' // --data 'catname=&pages=Leticrons+S%C3%A4ule&curonly=1&wpDownload=1' // GEHT NICHT... // --data 'catname=&pages=Leticrons+S%E4ule&curonly=1&wpDownload=1' final String EXPORT_URL = "http: String userAgent = projectArtifact + "/" + projectVersion; userAgent = userAgent + " " + VersionInfo.getUserAgent("Apache-HttpClient", "org.apache.http.client", getClass()); CloseableHttpClient httpclient = HttpClients.custom().setUserAgent(userAgent).build(); HttpPost httpPost = new HttpPost(EXPORT_URL); List<NameValuePair> params = new ArrayList<NameValuePair>(); params.add(new BasicNameValuePair("catname", "")); params.add(new BasicNameValuePair("pages", pageName)); params.add(new BasicNameValuePair("curonly", "1")); params.add(new BasicNameValuePair("wpDownload", "1")); httpPost.setEntity(new UrlEncodedFormEntity(params, Charset.forName("UTF-8"))); CloseableHttpResponse response1 = httpclient.execute(httpPost); MediaWikiType mwt = null; try { logger.debug("{}", response1.getStatusLine()); HttpEntity entity1 = response1.getEntity(); // logger.debug("{}", entity1.getContent()); // do something useful with the response body String data = EntityUtils.toString(entity1); // logger.debug("{}", data); JAXBContext jaxbContext = JAXBContext.newInstance(MediaWikiType.class); Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); StreamSource source = new StreamSource(new StringReader(data)); JAXBElement<MediaWikiType> userElement = unmarshaller.unmarshal(source, MediaWikiType.class); mwt = userElement.getValue(); // logger.debug("Parsed Data: {}", mwt); // for (PageType page : mwt.getPage()) // logger.debug(" page: {}", page); // logger.debug(" id: {}", page.getId()); // logger.debug(" title: {}", page.getTitle()); // logger.debug(" redir: {}", page.getRedirect().getTitle()); // and ensure it is fully consumed EntityUtils.consume(entity1); } finally { response1.close(); } return mwt; } }
package com.markgrand.cryptoShuffle.keyManagement; import com.markgrand.cryptoShuffle.RandomKeyGenerator; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.function.Consumer; /** * Abstract superclass for implementations of {@link OneTimeKeyPad}. * * @author Mark Grand */ public abstract class AbstractOneTimeKeyPad implements OneTimeKeyPad { private static final Subroutine NULL_AUTOGENERATE_STRATEGY = () -> { }; private Subroutine autogenerationStrategy = NULL_AUTOGENERATE_STRATEGY; @Override public Map<UUID, byte[]> generateKeys(int count, int keyLength) { ensureCountIsPositive(count); final RandomKeyGenerator keyGenerator = RandomKeyGenerator.getThreadLocalInstance(); final Map<UUID, byte[]> newKeyMap = new HashMap<>(); for (int i = 0; i < count; i++) { newKeyMap.put(UUID.randomUUID(), keyGenerator.generateKey(keyLength)); } addNewKeys(newKeyMap); return newKeyMap; } @Override public Map<UUID, byte[]> generateKeys(int count, int minKeyLength, int maxKeyLength) { ensureCountIsPositive(count); final RandomKeyGenerator keyGenerator = RandomKeyGenerator.getThreadLocalInstance(); final Map<UUID, byte[]> newKeyMap = new HashMap<>(); for (int i = 0; i < count; i++) { newKeyMap.put(UUID.randomUUID(), keyGenerator.generateKey(minKeyLength, maxKeyLength)); } addNewKeys(newKeyMap); return newKeyMap; } /** * Add the encryption keys in the given map to this {@code OneTimeKeyPad} as new keys. * * @param newKeyMap the keys to add. */ protected abstract void addNewKeys(Map<UUID, byte[]> newKeyMap); private void ensureCountIsPositive(int count) { if (count < 1) { throw new IllegalArgumentException("count must be greater than zero but is " + count); } } @Override public void autoGenerateKeys(int count, int keyLength, Consumer<Map<UUID, byte[]>> transmitter) { autogenerationStrategy = () -> { final Map<UUID, byte[]> keys = generateKeys(count, keyLength); transmitter.accept(keys); }; } @Override public void autoGenerateKeys(int count, int minKeyLength, int maxKeyLength, Consumer<Map<UUID, byte[]>> transmitter) { autogenerationStrategy = () -> { final Map<UUID, byte[]> keys = generateKeys(count, minKeyLength, maxKeyLength); transmitter.accept(keys); }; } @Override public Optional<Map.Entry<UUID, byte[]>> getUnusedKey() { if (getUnusedKeyCount() == 0) { autogenerationStrategy.doIt(); } return doGetUnusedKey(); } /** * Return the next unused encryption key in this pad and its UUID. * * @return an {@link Optional} object that contains a {@link java.util.Map.Entry} whose value is the encryption key * and whose key is the encryption key's UUID, if there are any unused keys in the pad. If there are no unused keys, * returns an empty {@code Optional} object. * @implNote This is called by {@link #getUnusedKey()} after the strategy to {@link #autoGenerateKeys} keys has been * run. */ protected abstract Optional<Map.Entry<UUID, byte[]>> doGetUnusedKey(); @Override public void clearAutoGenerateKeys() { autogenerationStrategy = NULL_AUTOGENERATE_STRATEGY; } @FunctionalInterface private interface Subroutine { void doIt(); } }
package com.mkyong.helloworld.domain.builder.generator; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.mkyong.helloworld.domain.AbstractDomain; import com.mkyong.helloworld.domain.KokyakuKojinTantoBushoDomain; import freemarker.template.Configuration; import freemarker.template.TemplateException; public class BuilderGenerater { private static String TEMPLATE_FILE = "BuilderTemplate.ftl"; private static String DESTINATION = "generated/"; @SuppressWarnings("rawtypes") static Class TARGET_CLASS = KokyakuKojinTantoBushoDomain.class; public static void main(String[] args) { // get domain private fields List<Field> privateFields = new ArrayList<>(); ArrayList<Field> allFields = new ArrayList<Field>(); for (Field f : TARGET_CLASS.getDeclaredFields()) { allFields.add(f); } // get domain parent private fields if exists Class<?> parent = TARGET_CLASS.getSuperclass(); while (parent != null) { if (!AbstractDomain.class.isAssignableFrom(parent)) { break; } for (Field f : parent.getDeclaredFields()) { allFields.add(f); } parent = parent.getSuperclass(); } for (Field field : allFields) { if (Modifier.isPrivate(field.getModifiers())) { privateFields.add(field); } } // config DomainConfig domainConfig = new DomainConfig(); domainConfig.setBuilderBaseClass("AbstractDomainBuilder"); domainConfig.setDomain(TARGET_CLASS.getSimpleName().replaceAll("Domain$", "")); ArrayList<HashMap<String, String>> members = new ArrayList<HashMap<String, String>>(); for (Field m : privateFields) { HashMap<String, String> map = new HashMap<String, String>(); map.put("type", m.getType().getName()); map.put("name", m.getName()); map.put("explain", m.getName()); members.add(map); } domainConfig.setMembers(members); // configuration Configuration fileMarkerConfig = new Configuration(Configuration.VERSION_2_3_23); fileMarkerConfig.setClassForTemplateLoading(BuilderGenerater.class, "/"); try { // template freemarker.template.Template template = fileMarkerConfig.getTemplate(TEMPLATE_FILE); // map Map<String, Object> map = new HashMap<String, Object>(); map.put("builderBaseClass", domainConfig.getBuilderBaseClass()); map.put("domain", domainConfig.getDomain()); map.put("members", domainConfig.getMembers()); StringBuilder memberListWithComma = new StringBuilder(); for (HashMap<String, String> m : domainConfig.getMembers()) { memberListWithComma.append("," + m.get("name")); } map.put("memberListWithComma", memberListWithComma.toString().replaceAll("^,", "")); // write File file = new File(DESTINATION + domainConfig.getDomain() + "DomainBuilder.java"); PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(file))); template.process(map, writer); } catch (IOException | TemplateException e) { e.printStackTrace(); } System.out.println("Success!!"); } }
package com.openlattice.postgres.mapstores; import com.dataloom.streams.StreamUtil; import com.google.common.collect.ImmutableList; import com.google.common.collect.MapMaker; import com.hazelcast.config.MapConfig; import com.hazelcast.config.MapStoreConfig; import com.kryptnostic.rhizome.mapstores.TestableSelfRegisteringMapStore; import com.openlattice.postgres.CountdownConnectionCloser; import com.openlattice.postgres.KeyIterator; import com.openlattice.postgres.PostgresColumnDefinition; import com.openlattice.postgres.PostgresTableDefinition; import com.zaxxer.hikari.HikariDataSource; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author Matthew Tamayo-Rios &lt;matthew@openlattice.com&gt; */ public abstract class AbstractBasePostgresMapstore<K, V> implements TestableSelfRegisteringMapStore<K, V> { protected final PostgresTableDefinition table; protected final Logger logger = LoggerFactory.getLogger( getClass() ); protected final HikariDataSource hds; private final String mapName; private final String insertQuery; private final String deleteQuery; private final String selectAllKeysQuery; private final String selectByKeyQuery; private final Optional<String> oc; public AbstractBasePostgresMapstore( String mapName, PostgresTableDefinition table, HikariDataSource hds ) { this.mapName = mapName; this.table = table; this.hds = hds; this.oc = buildOnConflictQuery(); this.insertQuery = buildInsertQuery(); this.deleteQuery = buildDeleteQuery(); this.selectAllKeysQuery = buildSelectAllKeysQuery(); this.selectByKeyQuery = buildSelectByKeyQuery(); } protected Optional<String> buildOnConflictQuery() { return Optional.of( ( " ON CONFLICT (" + keyColumns().stream() .map( PostgresColumnDefinition::getName ) .collect( Collectors.joining( ", " ) ) + ") DO " + table.updateQuery( keyColumns(), valueColumns(), false ) ) ); } protected String buildInsertQuery() { return table.insertQuery( onConflict(), getInsertColumns() ); } protected String buildDeleteQuery() { return table.deleteQuery( keyColumns() ); } protected String buildSelectAllKeysQuery() { return table.selectQuery( keyColumns() ); } protected String buildSelectByKeyQuery() { return table.selectQuery( ImmutableList.of(), keyColumns() ); } @Override public void store( K key, V value ) { try ( Connection connection = hds.getConnection(); PreparedStatement insertRow = prepareInsert( connection ) ) { bind( insertRow, key, value ); logger.debug( "Insert query: {}", insertRow ); insertRow.execute(); } catch ( SQLException e ) { logger.error( "Error executing SQL during store for key {}.", key, e ); handleStoreFailed( key, value ); } } @Override public void storeAll( Map<K, V> map ) { K key = null; final Set<K> skip = new HashSet<>(); try ( Connection connection = hds.getConnection(); PreparedStatement insertRow = prepareInsert( connection ) ) { /* * Process all the writes. */ boolean failed = true; while ( failed ) { failed = false; for ( Entry<K, V> entry : map.entrySet() ) { key = entry.getKey(); //Skip any writes that if( !skip.contains( key ) ) { bind( insertRow, key, entry.getValue() ); try { insertRow.addBatch(); } catch ( SQLException e ) { //Reset the connection so we can keep writing in other values and log //TODO: Consider whether we should retry or whether we rethrow exception (might be able to bubble it up to Hazelcast). //TODO: If we rethrow and exception meter we can monitor if any mapstores are failing an unusually high number of writes connection.commit(); skip.add( key ); logger.error( "Unable to store row {} -> {}", key, entry.getValue(), e ); failed = true; } } } } insertRow.executeBatch(); } catch ( SQLException e ) { logger.error( "Error executing SQL during store all", e ); } } @Override public void delete( K key ) { try ( Connection connection = hds.getConnection(); PreparedStatement deleteRow = prepareDelete( connection ) ) { bind( deleteRow, key ); deleteRow.executeUpdate(); connection.close(); } catch ( SQLException e ) { logger.error( "Error executing SQL during delete for key {}.", key, e ); } } @Override public void deleteAll( Collection<K> keys ) { K key = null; try ( Connection connection = hds.getConnection(); PreparedStatement deleteRow = prepareDelete( connection ) ) { connection.setAutoCommit( false ); for ( K k : keys ) { key = k; bind( deleteRow, key ); deleteRow.addBatch(); } deleteRow.executeBatch(); connection.commit(); connection.setAutoCommit( true ); connection.close(); } catch ( SQLException e ) { logger.error( "Error executing SQL during delete all for key {}", key, e ); } } @Override public V load( K key ) { V val = null; try ( Connection connection = hds.getConnection(); PreparedStatement selectRow = prepareSelectByKey( connection ) ) { bind( selectRow, key ); ResultSet rs = selectRow.executeQuery(); if ( rs.next() ) { val = mapToValue( rs ); } connection.close(); logger.debug( "LOADED: {}", val ); } catch ( SQLException e ) { logger.error( "Error executing SQL during select for key {}.", key, e ); } return val; } @Override public Map<K, V> loadAll( Collection<K> keys ) { Map<K, V> result = new MapMaker().initialCapacity( keys.size() ).makeMap(); keys.parallelStream().forEach( key -> { V value = load( key ); if ( value != null ) { result.put( key, value ); } } ); return result; } @Override public Iterable<K> loadAllKeys() { logger.info( "Starting load all keys for Edge Mapstore" ); try { Connection connection = hds.getConnection(); Statement stmt = connection.createStatement(); connection.setAutoCommit( false ); stmt.setFetchSize( 50000 ); final ResultSet rs = stmt.executeQuery( selectAllKeysQuery ); return StreamUtil .stream( () -> new KeyIterator<K>( rs, new CountdownConnectionCloser( connection, 1 ), this::mapToKey ) ) .peek( key -> logger.debug( "Key to load: {}", key ) ) ::iterator; } catch ( SQLException e ) { logger.error( "Unable to acquire connection load all keys" ); return null; } } @Override public String getMapName() { return mapName; } @Override public String getTable() { return table.getName(); } protected Optional<String> onConflict() { return oc; } protected List<PostgresColumnDefinition> getInsertColumns() { //An empty list of columns means all return ImmutableList.of(); } protected String getInsertQuery() { return insertQuery; } protected PreparedStatement prepareInsert( Connection connection ) throws SQLException { return connection.prepareStatement( getInsertQuery() ); } protected String getDeleteQuery() { return deleteQuery; } protected PreparedStatement prepareDelete( Connection connection ) throws SQLException { return connection.prepareStatement( deleteQuery ); } protected String getSelecAllKeysQuery() { return selectAllKeysQuery; } protected PreparedStatement prepareSelectAllKeys( Connection connection ) throws SQLException { return connection.prepareStatement( selectAllKeysQuery ); } protected String selectByKeyQuery() { return selectByKeyQuery; } protected PreparedStatement prepareSelectByKey( Connection connection ) throws SQLException { return connection.prepareStatement( selectByKeyQuery ); } @Override public MapStoreConfig getMapStoreConfig() { return new MapStoreConfig() .setImplementation( this ) .setEnabled( true ) .setWriteDelaySeconds( 0 ); } @Override public MapConfig getMapConfig() { return new MapConfig( getMapName() ) .setMapStoreConfig( getMapStoreConfig() ); } protected void handleStoreFailed( K key, V value ) { //Do nothing by default } protected List<PostgresColumnDefinition> keyColumns() { return ImmutableList.copyOf( table.getPrimaryKey() ); } protected List<PostgresColumnDefinition> valueColumns() { return ImmutableList.copyOf( table.getColumns() ); } /** * You must bind update parameters as well as insert parameters */ protected abstract void bind( PreparedStatement ps, K key, V value ) throws SQLException; protected abstract void bind( PreparedStatement ps, K key ) throws SQLException; protected abstract V mapToValue( ResultSet rs ) throws SQLException; protected abstract K mapToKey( ResultSet rs ) throws SQLException; }
package de.digitalcollections.iiif.bookshelf.config; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.mongodb.MongoClient; import com.mongodb.ReadPreference; import com.mongodb.ServerAddress; import com.mongodb.WriteConcern; import de.digitalcollections.iiif.presentation.config.SpringConfigBackendPresentation; import de.digitalcollections.iiif.presentation.model.impl.jackson.v2.IiifPresentationApiObjectMapper; import java.util.ArrayList; import java.util.List; import org.mongeez.MongeezRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; import org.springframework.context.support.PropertySourcesPlaceholderConfigurer; import org.springframework.core.io.ClassPathResource; import org.springframework.data.mongodb.config.AbstractMongoConfiguration; import org.springframework.data.mongodb.config.EnableMongoAuditing; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; import org.springframework.data.web.config.EnableSpringDataWebSupport; @Configuration @ComponentScan(basePackages = { "de.digitalcollections.iiif.bookshelf.backend.repository.impl" }) @PropertySource(value = { "classpath:de/digitalcollections/iiif/bookshelf/config/SpringConfigBackend-${spring.profiles.active:PROD}.properties" }) @EnableMongoRepositories(basePackages = {"de.digitalcollections.iiif.bookshelf.backend.repository"}) @EnableMongoAuditing @EnableSpringDataWebSupport @Import(SpringConfigBackendPresentation.class) public class SpringConfigBackend extends AbstractMongoConfiguration { private static final Logger LOGGER = LoggerFactory.getLogger(SpringConfigBackend.class); @Value("${mongo.host}") private String mongoHost; @Value("${mongo.port}") private int mongoPort; @Bean public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() { return new PropertySourcesPlaceholderConfigurer(); } @Override protected String getDatabaseName() { return "iiif-bookshelf"; } /* * Factory bean that creates the com.mongodb.Mongo instance */ @Override @Bean public MongoClient mongo() throws Exception { MongoClient client; if (mongoHost.contains(",")) { List<ServerAddress> addresses = new ArrayList<>(); for (String host : mongoHost.split(",")) { addresses.add(new ServerAddress(host, mongoPort)); } client = new MongoClient(addresses); client.setReadPreference(ReadPreference.secondaryPreferred()); } else { client = new MongoClient(mongoHost, mongoPort); } client.setWriteConcern(WriteConcern.ACKNOWLEDGED); return client; } @Override protected String getMappingBasePackage() { return "de.digitalcollections.iiif.bookshelf.model"; } @Bean @Override @DependsOn(value = "mongeezRunner") public MongoTemplate mongoTemplate() throws Exception { return new MongoTemplate(mongo(), getDatabaseName()); } @Bean public ObjectMapper objectMapper() { ObjectMapper objectMapper = new IiifPresentationApiObjectMapper(); objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); objectMapper.enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY); // define which fields schould be ignored with Filter-classes: // objectMapper.addMixIn(User.class, UserJsonFilter.class); // objectMapper.addMixIn(GrantedAuthority.class, GrantedAuthorityJsonFilter.class); return objectMapper; } @Bean public MongeezRunner mongeezRunner() throws Exception { MongeezRunner mongeezRunner = new MongeezRunner(); mongeezRunner.setMongo(mongo()); mongeezRunner.setExecuteEnabled(true); mongeezRunner.setDbName(getDatabaseName()); mongeezRunner.setFile(new ClassPathResource("/de/digitalcollections/iiif/bookshelf/mongeez/mongeez.xml")); return mongeezRunner; } }
package de.fernunihagen.dna.jkn.scalephant.distribution; import java.io.IOException; import java.util.Collection; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.ZooKeeper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import de.fernunihagen.dna.jkn.scalephant.Lifecycle; public class ZookeeperClient implements Lifecycle, Watcher { /** * The list of the zookeeper hosts */ protected final Collection<String> zookeeperHosts; /** * The zookeeper client instance */ protected ZooKeeper zookeeper; /** * The timeout for the zookeeper session */ protected final static int DEFAULT_TIMEOUT = 3000; /** * The logger */ private final static Logger logger = LoggerFactory.getLogger(ZookeeperClient.class); public ZookeeperClient(final Collection<String> zookeeperHosts) { super(); this.zookeeperHosts = zookeeperHosts; } /** * Connect to zookeeper */ @Override public void init() { try { zookeeper = new ZooKeeper(generateConnectString(), DEFAULT_TIMEOUT, this); } catch (IOException e) { logger.warn("Got exception while connecting to zookeeper", e); } } /** * Disconnect from zookeeper */ @Override public void shutdown() { if(zookeeper != null) { try { zookeeper.close(); } catch (InterruptedException e) { logger.warn("Got exception while closing zookeeper connection", e); } zookeeper = null; } } /** * Build a comma separated list of the zookeeper nodes * @return */ protected String generateConnectString() { final StringBuilder sb = new StringBuilder(); for(final String zookeeperHost : zookeeperHosts) { boolean wasEmpty = (sb.length() == 0); sb.append(zookeeperHost); if(wasEmpty) { sb.append(", "); } } return sb.toString(); } /** * Zookeeper watched event */ @Override public void process(final WatchedEvent watchedEvent) { } /** * Register this instance of the scalephant * @param clustername * @param ownInstanceName */ public boolean registerScalephantInstance(final String clustername, final String ownInstanceName) { if(zookeeper == null) { logger.warn("Register called but not connected to zookeeper"); return false; } try { registerClusternameIfNeeded(clustername); registerInstance(clustername, ownInstanceName); } catch (KeeperException | InterruptedException e) { logger.warn("Got exception while reigster to zookeeper", e); return false; } return true; } /** * Register the scalephant instance * @param clustername * @param ownInstanceName * @throws InterruptedException * @throws KeeperException */ protected void registerInstance(final String clustername, final String ownInstanceName) throws KeeperException, InterruptedException { final String instanceZookeeperPath = getClusterPath(clustername) + "/" + ownInstanceName; logger.info("Register instance on: " + instanceZookeeperPath); zookeeper.create(instanceZookeeperPath, "".getBytes(), ZooDefs.Ids.READ_ACL_UNSAFE, CreateMode.EPHEMERAL); } /** * Register the name of the cluster in the zookeeper directory * @param clustername * @throws KeeperException * @throws InterruptedException */ protected void registerClusternameIfNeeded(final String clustername) throws KeeperException, InterruptedException { final String clusterPath = getClusterPath(clustername); if(zookeeper.exists(clusterPath, this) == null) { logger.info(clusterPath + " not found, creating"); zookeeper.create(clusterPath, "".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); } } /** * Get the path of the zookeeper clustername * @param clustername * @return */ protected String getClusterPath(final String clustername) { return "/" + clustername; } }
package de.is24.deadcode4j.analyzer; import de.is24.deadcode4j.Analyzer; import de.is24.deadcode4j.CodeContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.Attributes; import org.xml.sax.helpers.DefaultHandler; import javax.annotation.Nonnull; import java.io.File; import static com.google.common.collect.Iterables.concat; import static com.google.common.collect.Iterables.isEmpty; /** * Analyzes both <code>web.xml</code> and class files: looks for implementations of * {@link javax.servlet.ServletContainerInitializer} if the <code>metadata-complete</code> attribute of the * <code>web-app</code> element is missing or set to "false". * * @since 1.5 */ public class ServletContainerInitializerAnalyzer implements Analyzer { private final Logger logger = LoggerFactory.getLogger(getClass()); private final ServletContainerInitializerCodeContext context = new ServletContainerInitializerCodeContext(); private final Analyzer classFinder; private final String depender; private final Analyzer webXmlAnalyzer = new XmlAnalyzer("web.xml") { @Nonnull @Override protected DefaultHandler createHandlerFor(@Nonnull final CodeContext codeContext) { return new DefaultHandler() { @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws StopParsing { if ("web-app".equals(localName) && "true".equals(attributes.getValue("metadata-complete"))) { ((ServletContainerInitializerCodeContext) codeContext).setMetadataComplete(); } throw new StopParsing(); } }; } }; /** * Creates a new instance of <code>ServletContainerInitializerAnalyzer</code>. * * @param dependerId a description of the <i>depending entity</i> with which to * call {@link de.is24.deadcode4j.CodeContext#addDependencies(String, Iterable)} * @param fqcnOfInitializerInterface the fqcn of the interface whose implementations represent a * <code>ServletContainerInitializer</code> or something comparable */ protected ServletContainerInitializerAnalyzer(String dependerId, String fqcnOfInitializerInterface) { this.depender = dependerId; this.classFinder = new InterfacesAnalyzer("ServletContainerInitializer-implementation", fqcnOfInitializerInterface) { }; } public ServletContainerInitializerAnalyzer() { this("JEE-ServletContainerInitializer", "javax.servlet.ServletContainerInitializer"); } @Override public void doAnalysis(@Nonnull CodeContext codeContext, @Nonnull File fileName) { this.context.setOriginalContext(codeContext); this.webXmlAnalyzer.doAnalysis(this.context, fileName); this.classFinder.doAnalysis(this.context, fileName); } @Override public void finishAnalysis(@Nonnull CodeContext codeContext) { if (this.context.isMetadataComplete()) { logger.debug("Found web.xml with completed metadata; " + "ServletContainerInitializer implementations are treated as dead code"); return; } Iterable<String> initializerClasses = concat(this.context.getAnalyzedCode().getCodeDependencies().values()); if (!isEmpty(initializerClasses)) { codeContext.addDependencies(depender, initializerClasses); } } private static class ServletContainerInitializerCodeContext extends CodeContext { private CodeContext originalContext; private boolean metadataComplete = false; @Override public void addAnalyzedClass(@Nonnull String clazz) { this.originalContext.addAnalyzedClass(clazz); } public void setMetadataComplete() { this.metadataComplete = true; } public boolean isMetadataComplete() { return metadataComplete; } public void setOriginalContext(CodeContext codeContext) { this.originalContext = codeContext; } } }
package eu.dzhw.fdz.metadatamanagement.config; import java.util.ArrayList; import java.util.List; import javax.inject.Inject; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; import org.springframework.context.annotation.Profile; import org.springframework.core.convert.converter.Converter; import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.config.AbstractMongoConfiguration; import org.springframework.data.mongodb.core.convert.CustomConversions; import org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean; import com.mongodb.Mongo; import eu.dzhw.fdz.metadatamanagement.config.oauth2.OAuth2AuthenticationReadConverter; import eu.dzhw.fdz.metadatamanagement.domain.util.JSR310DateConverters.DateToLocalDateConverter; import eu.dzhw.fdz.metadatamanagement.domain.util.JSR310DateConverters.DateToLocalDateTimeConverter; import eu.dzhw.fdz.metadatamanagement.domain.util.JSR310DateConverters.DateToZonedDateTimeConverter; import eu.dzhw.fdz.metadatamanagement.domain.util.JSR310DateConverters.LocalDateTimeToDateConverter; import eu.dzhw.fdz.metadatamanagement.domain.util.JSR310DateConverters.LocalDateToDateConverter; import eu.dzhw.fdz.metadatamanagement.domain.util.JSR310DateConverters.ZonedDateTimeToDateConverter; @Configuration @EnableMongoRepositories("eu.dzhw.fdz.metadatamanagement.repository") @Profile(Constants.SPRING_PROFILE_CLOUD) @DependsOn("cloudDatabaseConfiguration") public class CloudMongoDbConfiguration extends AbstractMongoConfiguration { // private final Logger log = LoggerFactory.getLogger(CloudDatabaseConfiguration.class); @Inject private MongoDbFactory mongoDbFactory; @Bean public ValidatingMongoEventListener validatingMongoEventListener() { return new ValidatingMongoEventListener(validator()); } @Bean public LocalValidatorFactoryBean validator() { return new LocalValidatorFactoryBean(); } @Bean public CustomConversions customConversions() { List<Converter<?, ?>> converterList = new ArrayList<>(); converterList.add(new OAuth2AuthenticationReadConverter()); converterList.add(DateToZonedDateTimeConverter.INSTANCE); converterList.add(ZonedDateTimeToDateConverter.INSTANCE); converterList.add(DateToLocalDateConverter.INSTANCE); converterList.add(LocalDateToDateConverter.INSTANCE); converterList.add(DateToLocalDateTimeConverter.INSTANCE); converterList.add(LocalDateTimeToDateConverter.INSTANCE); return new CustomConversions(converterList); } @Override protected String getDatabaseName() { return this.mongoDbFactory.getDb().getName(); } @Override public Mongo mongo() throws Exception { return this.mongoDbFactory.getDb().getMongo(); } }
package liquibase.ext.spatial.sqlgenerator; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Collections; import java.util.HashMap; import java.util.Map; import liquibase.database.Database; import liquibase.database.core.OracleDatabase; import liquibase.database.jvm.JdbcConnection; import liquibase.exception.UnexpectedLiquibaseException; import liquibase.statement.core.InsertStatement; /** * The <code>SpatialInsertGeneratorOracle</code> generates the SQL for <code>INSERT</code>ing * geometries into Oracle. */ public class SpatialInsertGeneratorOracle extends AbstractSpatialInsertGenerator { /** The mapping of EPSG SRID to Oracle SRID. */ private final Map<String, String> epsgToOracleMap = Collections .synchronizedMap(new HashMap<String, String>()); /** * Verifies that the <code>InsertStatement</code> has WKT or EWKT. */ @Override public boolean supports(final InsertStatement statement, final Database database) { return database instanceof OracleDatabase; } /** * Returns the name of the function that converts Well-Known Text to a database-specific * geometry. * * @return the name of the function that converts WKT to a geometry. */ @Override public String getGeomFromWktFunction() { return "SDO_GEOMETRY"; } /** * Handles the Well-Known Text and SRID for Oracle. */ @Override public String convertToFunction(final String wkt, final String srid, final Database database) { final String oracleWkt; // Strings longer than 4000 characters need to be converted to CLOBs. if (wkt.length() > 4000) { int index = 4000; final StringBuilder clobs = new StringBuilder("TO_CLOB('").append(wkt.substring(0, index)) .append("')"); while (index < wkt.length()) { final int endIndex = Math.min(index + 4000, wkt.length()); clobs.append(" || TO_CLOB('").append(wkt.substring(index, endIndex)).append("')"); index = endIndex; } oracleWkt = clobs.toString(); } else { oracleWkt = wkt; } final String oracleSrid; if (srid != null && !srid.equals("")) { if (this.epsgToOracleMap.containsKey(srid)) { oracleSrid = this.epsgToOracleMap.get(srid); } else { oracleSrid = getOracleSrid(srid, database); this.epsgToOracleMap.put(srid, oracleSrid); } } else { oracleSrid = null; } return super.convertToFunction(oracleWkt, oracleSrid, database); } /** * Queries to the database to convert the given EPSG SRID to the corresponding Oracle SRID. * * @param srid * the EPSG SRID. * @param database * the database instance. * @return the corresponding Oracle SRID. */ protected String getOracleSrid(final String srid, final Database database) { final String oracleSrid; final JdbcConnection jdbcConnection = (JdbcConnection) database.getConnection(); final Connection connection = jdbcConnection.getUnderlyingConnection(); Statement statement; ResultSet resultSet; try { statement = connection.createStatement(); resultSet = statement.executeQuery("SELECT SDO_CS.MAP_EPSG_TO_ORACLE(" + srid + ") FROM dual"); resultSet.next(); oracleSrid = resultSet.getString(1); statement.close(); } catch (final SQLException e) { throw new UnexpectedLiquibaseException("Failed to find the Oracle SRID for EPSG:" + srid, e); } finally { try { connection.close(); } catch (final SQLException ignore) { } } return oracleSrid; } }
package me.sharpjaws.sharpSK.hooks.PermissionsEx; import javax.annotation.Nullable; import org.bukkit.event.Event; import ch.njol.skript.classes.Changer; import ch.njol.skript.lang.Expression; import ch.njol.skript.lang.SkriptParser; import ch.njol.skript.lang.util.SimpleExpression; import ch.njol.util.Kleenean; import ch.njol.util.coll.CollectionUtils; import ru.tehkode.permissions.bukkit.PermissionsEx; public class ExprPexGroupRankLadder extends SimpleExpression<String> { private Expression<String> s; @Override public boolean isSingle() { return true; } @Override public Class<? extends String> getReturnType() { return String.class; } @SuppressWarnings("unchecked") @Override public boolean init(Expression<?>[] expr, int matchedPattern, Kleenean paramKleenean, SkriptParser.ParseResult paramParseResult) { s = (Expression<String>) expr[0]; return true; } @Override public String toString(@Nullable Event e, boolean paramBoolean) { return "rank[]ladder of [the] group %string%"; } @Override @Nullable protected String[] get(Event e) { return new String[] { PermissionsEx.getPermissionManager().getGroup(s.getSingle(e)).getRankLadder() }; } @Override public void change(Event e, Object[] delta, Changer.ChangeMode mode) { if (mode == Changer.ChangeMode.SET) { try { PermissionsEx.getPermissionManager().getGroup(s.getSingle(e)).setRankLadder((String) delta[0]); } catch (NullPointerException ex) { } } } @Override public Class<?>[] acceptChange(Changer.ChangeMode mode) { if (mode == Changer.ChangeMode.SET) return CollectionUtils.array(new Class[] { String.class }); return null; } }
package org.avaje.ebean.typequery.generator.write; import org.avaje.ebean.typequery.generator.GenerationMetaData; import org.avaje.ebean.typequery.generator.GeneratorConfig; import org.avaje.ebean.typequery.generator.asm.tree.FieldNode; import org.avaje.ebean.typequery.generator.read.EntityBeanPropertyReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.TreeSet; /** * A simple implementation that generates and writes query beans. */ public class SimpleQueryBeanWriter { protected static final Logger logger = LoggerFactory.getLogger(SimpleQueryBeanWriter.class); public static final String NEWLINE = "\n"; protected final GeneratorConfig config; protected final EntityBeanPropertyReader classMeta; protected final GenerationMetaData generationMetaData; protected boolean writingAssocBean; protected String destPackage; protected String shortName; protected String origShortName; protected FileWriter writer; protected Set<String> importTypes = new TreeSet<>(); protected List<PropertyMeta> properties = new ArrayList<>(); public SimpleQueryBeanWriter(GeneratorConfig config, EntityBeanPropertyReader classMeta, GenerationMetaData generationMetaData) { this.config = config; this.classMeta = classMeta; this.generationMetaData = generationMetaData; destPackage = config.getDestPackage(); shortName = deriveShortName(classMeta.name); } protected void gatherPropertyDetails() { importTypes.add(asDotNotation(classMeta.name)); importTypes.add("org.avaje.ebean.typequery.TQRootBean"); importTypes.add("org.avaje.ebean.typequery.TypeQueryBean"); importTypes.add("com.avaje.ebean.EbeanServer"); addClassProperties(classMeta); } /** * Recursively add properties from the inheritance hierarchy. * <p> * Includes properties from mapped super classes and usual inheritance. * </p> */ protected void addClassProperties(EntityBeanPropertyReader classMetaData) { List<FieldNode> allProperties = classMetaData.getAllProperties(generationMetaData); for (FieldNode field : allProperties) { PropertyType type = generationMetaData.getPropertyType(field, classMeta); if (type == null) { logger.warn("No support for field [" + field.name + "] desc[" + field.desc + "] signature [" + field.signature + "]"); } else { type.addImports(importTypes); properties.add(new PropertyMeta(field.name, type)); } } } /** * Write the type query bean (root bean). */ public void writeRootBean() throws IOException { gatherPropertyDetails(); if (classMeta.isEntity()) { writer = createFileWriter(); writePackage(); writeImports(); writeClass(); writeAlias(); writeFields(); writeConstructors(); writeClassEnd(); writer.flush(); writer.close(); } } /** * Write the type query assoc bean. */ public void writeAssocBean() throws IOException { writingAssocBean = true; destPackage = destPackage+".assoc"; origShortName = shortName; shortName = "Assoc"+shortName; prepareAssocBeanImports(); writer = createFileWriter(); writePackage(); writeImports(); writeClass(); writeFields(); writeConstructors(); writeClassEnd(); writer.flush(); writer.close(); } /** * Prepare the imports for writing assoc bean. */ protected void prepareAssocBeanImports() { importTypes.remove(asDotNotation(classMeta.name)); importTypes.remove("org.avaje.ebean.typequery.TQRootBean"); importTypes.remove("com.avaje.ebean.EbeanServer"); importTypes.add("org.avaje.ebean.typequery.TQAssocBean"); importTypes.add("org.avaje.ebean.typequery.TQProperty"); importTypes.add(config.getEntityBeanPackage() + "." + origShortName); if (!config.isAopStyle()) { importTypes.add("org.avaje.ebean.typequery.TQPath"); } // remove imports for the same package Iterator<String> importsIterator = importTypes.iterator(); while (importsIterator.hasNext()){ String importType = importsIterator.next(); // there are no subpackages so just use startsWith(destPackage) if (importType.startsWith(destPackage)) { importsIterator.remove(); } } } /** * Write constructors. */ protected void writeConstructors() throws IOException { if (writingAssocBean) { writeAssocBeanFetch(); writeAssocBeanConstructor(); } else { writeRootBeanConstructor(); } } /** * Write the constructors for 'root' type query bean. */ protected void writeRootBeanConstructor() throws IOException { writer.append(NEWLINE); writer.append(" /**").append(NEWLINE); writer.append(" * Construct with a given EbeanServer.").append(NEWLINE); writer.append(" */").append(NEWLINE); writer.append(" public Q").append(shortName).append("(EbeanServer server) {").append(NEWLINE); writer.append(" super(").append(shortName).append(".class, server);").append(NEWLINE); writer.append(" }").append(NEWLINE); writer.append(NEWLINE); if (config.isAopStyle()) { writer.append(" /**").append(NEWLINE); writer.append(" * Construct using the default EbeanServer.").append(NEWLINE); writer.append(" */").append(NEWLINE); writer.append(" public Q").append(shortName).append("() {").append(NEWLINE); writer.append(" super(").append(shortName).append(".class);").append(NEWLINE); writer.append(" }").append(NEWLINE); } else { // verbose manual style requiring manual depth control (non-AOP) writer.append(" public Q").append(shortName).append("() {").append(NEWLINE); writer.append(" this(").append(String.valueOf(config.getMaxPathTraversalDepth())).append(");").append(NEWLINE); writer.append(" }").append(NEWLINE); writer.append(" public Q").append(shortName).append("(int maxDepth) {").append(NEWLINE); writer.append(" super(").append(shortName).append(".class);").append(NEWLINE); writer.append(" setRoot(this);").append(NEWLINE); for (PropertyMeta property : properties) { property.writeConstructorSimple(writer, shortName, false); } for (PropertyMeta property : properties) { property.writeConstructorAssoc(writer, shortName, false); } writer.append(" }").append(NEWLINE); } writer.append(NEWLINE); writer.append(" /**").append(NEWLINE); writer.append(" * Construct for Alias.").append(NEWLINE); writer.append(" */").append(NEWLINE); writer.append(" private Q").append(shortName).append("(boolean dummy) {").append(NEWLINE); writer.append(" super(dummy);").append(NEWLINE); writer.append(" }").append(NEWLINE); } protected void writeAssocBeanFetch() throws IOException { writer.append(" /**").append(NEWLINE); writer.append(" * Eagerly fetch this association loading the specified properties.").append(NEWLINE); writer.append(" */").append(NEWLINE); writer.append(" @SafeVarargs").append(NEWLINE); writer.append(" public final R fetch(TQProperty<Q").append(shortName).append(">... properties) {").append(NEWLINE); writer.append(" return fetchProperties(properties);").append(NEWLINE); writer.append(" }").append(NEWLINE); writer.append(NEWLINE); } /** * Write constructor for 'assoc' type query bean. */ protected void writeAssocBeanConstructor() throws IOException { if (config.isAopStyle()) { // minimal constructor writer.append(" public Q").append(shortName).append("(String name, R root) {").append(NEWLINE); writer.append(" super(name, root);").append(NEWLINE); writer.append(" }").append(NEWLINE); } else { // generate the constructor for non-AOP manual/verbose style writer.append(" public Q").append(shortName).append("(String name, R root, int depth) {").append(NEWLINE); writer.append(" this(name, root, null, depth);").append(NEWLINE); writer.append(" }").append(NEWLINE); writer.append(" public Q").append(shortName).append("(String name, R root, String prefix, int depth) {").append(NEWLINE); writer.append(" super(name, root, prefix);").append(NEWLINE); writer.append(" String path = TQPath.add(prefix, name);").append(NEWLINE); for (PropertyMeta property : properties) { property.writeConstructorSimple(writer, shortName, true); } if (hasAssocProperties()) { writer.append(" if (--depth > 0) {").append(NEWLINE); for (PropertyMeta property : properties) { property.writeConstructorAssoc(writer, shortName, true); } writer.append(" }").append(NEWLINE); } writer.append(" }").append(NEWLINE); } } /** * Return true if this has at least one 'assoc' property. */ protected boolean hasAssocProperties() { for (PropertyMeta property : properties) { if (property.isAssociation()) { return true; } } return false; } /** * Write all the fields. */ protected void writeFields() throws IOException { for (PropertyMeta property : properties) { property.writeFieldDefn(writer, shortName, writingAssocBean); writer.append(NEWLINE); } writer.append(NEWLINE); } /** * Write the class definition. */ protected void writeClass() throws IOException { if (writingAssocBean) { writer.append("/**").append(NEWLINE); writer.append(" * Association query bean for ").append(shortName).append(".").append(NEWLINE); writer.append(" */").append(NEWLINE); //public class QAssocContact<R> writer.append("@TypeQueryBean").append(NEWLINE); writer.append("public class ").append("Q").append(shortName); writer.append("<R> extends TQAssocBean<").append(origShortName).append(",R> {").append(NEWLINE); } else { writer.append("/**").append(NEWLINE); writer.append(" * Query bean for ").append(shortName).append(".").append(NEWLINE); writer.append(" */").append(NEWLINE); // public class QContact extends TQRootBean<Contact,QContact> { writer.append("@TypeQueryBean").append(NEWLINE); writer.append("public class ").append("Q").append(shortName) .append(" extends TQRootBean<").append(shortName).append(",Q").append(shortName).append("> {").append(NEWLINE); } writer.append(NEWLINE); } protected void writeAlias() throws IOException { if (!writingAssocBean) { writer.append(" private static final Q").append(shortName).append(" _alias = new Q"); writer.append(shortName).append("(true);").append(NEWLINE); writer.append(NEWLINE); writer.append(" /**").append(NEWLINE); writer.append(" * Return the shared 'Alias' instance used to provide properties to ").append(NEWLINE); writer.append(" * <code>select()</code> and <code>fetch()</code> ").append(NEWLINE); writer.append(" */").append(NEWLINE); writer.append(" public static Q").append(shortName).append(" alias() {").append(NEWLINE); writer.append(" return _alias;").append(NEWLINE); writer.append(" }").append(NEWLINE); writer.append(NEWLINE); } } protected void writeClassEnd() throws IOException { writer.append("}").append(NEWLINE); } /** * Write all the imports. */ protected void writeImports() throws IOException { for (String importType : importTypes) { writer.append("import ").append(importType).append(";").append(NEWLINE); } writer.append(NEWLINE); } protected void writePackage() throws IOException { writer.append("package ").append(destPackage).append(";").append(NEWLINE).append(NEWLINE); } protected FileWriter createFileWriter() throws IOException { String destDirectory = config.getDestDirectory(); File destDir = new File(destDirectory); String packageAsDir = asSlashNotation(destPackage); File packageDir = new File(destDir, packageAsDir); if (!packageDir.exists() && !packageDir.mkdirs()) { logger.error("Failed to create directory [{}] for generated code", packageDir.getAbsoluteFile()); } String fileName = "Q"+shortName+".java"; File dest = new File(packageDir, fileName); logger.info("writing {}", dest.getAbsolutePath()); return new FileWriter(dest); } protected String asDotNotation(String path) { return path.replace('/', '.'); } protected String asSlashNotation(String path) { return path.replace('.', '/'); } protected String deriveShortName(String name) { int startPos = name.lastIndexOf('/'); if (startPos == -1) { return name; } return name.substring(startPos + 1); } }
package org.chocosolver.solver.search.strategy.strategy; import org.chocosolver.solver.search.strategy.decision.Decision; import org.chocosolver.solver.variables.Variable; public class GreedyBranching extends AbstractStrategy { private AbstractStrategy mainSearch; public GreedyBranching(AbstractStrategy mainSearch){ super(mainSearch.getVariables()); this.mainSearch = mainSearch; } @Override public boolean init() { return mainSearch.init(); } @Override public Decision getDecision() { Decision d = mainSearch.getDecision(); if (d != null) { d.setRefutable(false); } return d; } @Override public Decision computeDecision(Variable variable) { Decision d = mainSearch.computeDecision(variable); if (d != null) { d.setRefutable(false); } return d; } }
package nl.mpi.arbil; import nl.mpi.arbil.importexport.ImportExportDialog; import nl.mpi.arbil.data.ImdiTreeObject; import java.awt.Container; import java.awt.datatransfer.Clipboard; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.dnd.DropTarget; import java.awt.dnd.DropTargetAdapter; import java.awt.dnd.DropTargetDragEvent; import java.awt.dnd.DropTargetDropEvent; import java.util.Hashtable; import java.util.Vector; import javax.swing.JComponent; import javax.swing.JDesktopPane; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.JTable; import javax.swing.JTree; import javax.swing.TransferHandler; import javax.swing.tree.DefaultMutableTreeNode; import nl.mpi.arbil.data.ImdiSchema; public class ArbilDragDrop { // There are numerous limitations of drag and drop in 1.5 and to overcome the resulting issues we need to share the same transferable object on both the drag source and the drop target public DataFlavor imdiObjectFlavour = new DataFlavor(ImdiTreeObject.class, "ImdiTreeObject"); public ImdiObjectSelection imdiObjectSelection = new ImdiObjectSelection(); public void addDrag(JTable tableSource) { tableSource.setDragEnabled(true); tableSource.setTransferHandler(imdiObjectSelection); } public void addDrag(JTree treeSource) { treeSource.setDragEnabled(true); treeSource.setTransferHandler(imdiObjectSelection); treeSource.addTreeSelectionListener(imdiObjectSelection); DropTarget target = treeSource.getDropTarget(); try { target.addDropTargetListener(new DropTargetAdapter() { public void dragOver(DropTargetDragEvent dtdEvent) { System.out.println("imdiObjectSelection.dropAllowed: " + imdiObjectSelection.dropAllowed); if (imdiObjectSelection.dropAllowed) { dtdEvent.acceptDrag(dtdEvent.getDropAction()); } else { dtdEvent.rejectDrag(); } } public void drop(DropTargetDropEvent e) { // handled by the TransferHandler } }); } catch (java.util.TooManyListenersException ex) { GuiHelper.linorgBugCatcher.logError(ex); } } public void addDrag(JList listSource) { listSource.setDragEnabled(true); listSource.setTransferHandler(imdiObjectSelection); } public void addTransferHandler(JComponent targetComponent) { //targetComponent.setDragEnabled(true); targetComponent.setTransferHandler(imdiObjectSelection); } public class ImdiObjectSelection extends TransferHandler implements Transferable, javax.swing.event.TreeSelectionListener { long dragStartMilliSeconds; DataFlavor flavors[] = {imdiObjectFlavour}; ImdiTreeObject[] draggedImdiObjects; DefaultMutableTreeNode[] draggedTreeNodes; public boolean selectionContainsArchivableLocalFile = false; public boolean selectionContainsLocalFile = false; public boolean selectionContainsLocalDirectory = false; public boolean selectionContainsImdiResource = false; public boolean selectionContainsImdiCorpus = false; public boolean selectionContainsImdiInCache = false; public boolean selectionContainsImdiCatalogue = false; public boolean selectionContainsImdiSession = false; public boolean selectionContainsImdiChild = false; public boolean selectionContainsLocal = false; public boolean selectionContainsRemote = false; public boolean selectionContainsFavourite = false; private JComponent currentDropTarget = null; public boolean dropAllowed = false; public void valueChanged(javax.swing.event.TreeSelectionEvent evt) { if (evt.getSource() == currentDropTarget) { System.out.println("Drag target selection change: " + evt.getSource().toString()); if (evt.getSource() instanceof ImdiTree) { dropAllowed = canDropToTarget((ImdiTree) evt.getSource()); // DropTarget dropTarget = dropTree.getDropTarget(); } } } private boolean canDropToTarget(ImdiTree dropTree) { ImdiTreeObject currentLeadSelection = dropTree.getLeadSelectionNode(); if (currentLeadSelection == null) { // this check is for the root node of the trees if (TreeHelper.getSingleInstance().componentIsTheFavouritesTree(currentDropTarget)) { // allow drop to the favourites tree even when no selection is made // allow drop to only the root node of the favourites tree System.out.println("favourites tree check"); return !selectionContainsFavourite; } else if (TreeHelper.getSingleInstance().componentIsTheLocalCorpusTree(currentDropTarget)) { //if (dropTree.getSelectionPath().getPathCount() == 1) { // allow import to local tree if no nodes are selected // allow drop to the root node if it is an import System.out.println("local corpus tree check"); // todo: enable drag to rootnode from favourites but this change also needs to be done in the context menu return (!selectionContainsFavourite && (selectionContainsImdiCorpus || selectionContainsImdiCatalogue || selectionContainsImdiSession)); } System.out.println("no tree check"); return false; } else { // this check is for the child nodes of the trees System.out.println("currentLeadSelection: " + currentLeadSelection.toString()); // todo: prevent dragging to self but allow dragging to other branch of parent session // todo: look for error dragging actor from favourites // todo: look for error in field triggers when merging from favourite (suppress trtiggeres when merging) if (TreeHelper.getSingleInstance().componentIsTheLocalCorpusTree(currentDropTarget)) { if (currentLeadSelection.isCmdiMetaDataNode()) { if (currentLeadSelection.getParentDomNode().nodeTemplate == null) { System.out.println("no template for drop target node"); return false; } System.out.println("Drop to CMDI: " + currentLeadSelection.getURI().getFragment()); return (currentLeadSelection.getParentDomNode().nodeTemplate.pathCanHaveResource(currentLeadSelection.getURI().getFragment())); } else if (currentLeadSelection.isDirectory) { return false; // nothing can be dropped to a directory } else if (currentLeadSelection.isCorpus()) { if (selectionContainsImdiCorpus || selectionContainsImdiCatalogue || selectionContainsImdiSession) { return true; } } else if (currentLeadSelection.isCatalogue()) { return false; // nothing can be dropped to a catalogue } else if (currentLeadSelection.isSession()) { if (selectionContainsArchivableLocalFile || (selectionContainsImdiChild && selectionContainsFavourite)) { return true; } } else if (currentLeadSelection.isImdiChild()) { // TODO: in this case we should loop over the dragged nodes and check each one for compatability if (selectionContainsLocalFile || (selectionContainsImdiChild && selectionContainsFavourite)) { // TODO: allow drag drop of appropriate imdi child nodes to sessions and compatable subnodes return true; } } // public boolean selectionContainsArchivableLocalFile = false; // public boolean selectionContainsLocalFile = false; // public boolean selectionContainsLocalDirectory = false; // public boolean selectionContainsImdiResource = false; // public boolean selectionContainsImdiCorpus = false; // public boolean selectionContainsImdiInCache = false; // public boolean selectionContainsImdiCatalogue = false; // public boolean selectionContainsImdiSession = false; // public boolean selectionContainsImdiChild = false; // public boolean selectionContainsLocal = false; // public boolean selectionContainsRemote = false; return false; } else { return false; } } } @Override public void exportToClipboard(JComponent comp, Clipboard clip, int action) throws IllegalStateException { System.out.println("exportToClipboard: " + comp); createTransferable(null); // clear the transfer objects if (comp instanceof ImdiTree) { ImdiTree sourceTree = (ImdiTree) comp; ImdiTreeObject[] selectedImdiNodes = sourceTree.getSelectedNodes(); if (selectedImdiNodes != null) { sourceTree.copyNodeUrlToClipboard(selectedImdiNodes); } } else if (comp instanceof ImdiTable) { ImdiTable sourceTable = (ImdiTable) comp; sourceTable.copySelectedTableRowsToClipBoard(); } else { super.exportToClipboard(comp, clip, action); } } @Override public int getSourceActions(JComponent c) { System.out.println("getSourceActions"); if ((c instanceof JTree)) { JTree jTree = (JTree) c; // allow drag providing that the root node is not the only node selected if (jTree.getSelectionCount() > 1 || (jTree.getSelectionCount() == 1 && jTree.getSelectionPath().getPathCount() > 1)) { // must have a selection and not be the root node which is never an imdi node // no selection will only occur on some java runtimes but must be handled here return TransferHandler.COPY; } } else if (c instanceof JTable) { return TransferHandler.COPY; } else if (c instanceof JList) { return TransferHandler.COPY; } return TransferHandler.NONE; } @Override public boolean canImport(JComponent comp, DataFlavor flavor[]) { System.out.println("canImport: " + comp); currentDropTarget = null; dropAllowed = false; if (comp instanceof JTree) { if (TreeHelper.getSingleInstance().componentIsTheLocalCorpusTree(comp)) { System.out.println("localcorpustree so can drop here"); if (selectionContainsArchivableLocalFile || //selectionContainsLocalFile || //selectionContainsLocalDirectory || //selectionContainsImdiResource || //selectionContainsLocal || //selectionContainsRemote || selectionContainsImdiCorpus || selectionContainsImdiCatalogue || selectionContainsImdiSession || selectionContainsImdiChild) { System.out.println("dragged contents are acceptable"); currentDropTarget = comp; // store the source component for the tree node sensitive drop dropAllowed = canDropToTarget((ImdiTree) comp); return true; } } if (TreeHelper.getSingleInstance().componentIsTheFavouritesTree(comp)) { System.out.println("favourites tree so can drop here"); if (//selectionContainsArchivableLocalFile && //selectionContainsLocalFile || //selectionContainsLocalDirectory && //selectionContainsImdiResource || //selectionContainsLocal || //selectionContainsRemote || //selectionContainsImdiCorpus || selectionContainsImdiCatalogue || selectionContainsImdiSession || selectionContainsImdiChild) { System.out.println("dragged contents are acceptable"); currentDropTarget = comp; // store the source component for the tree node sensitive drop dropAllowed = canDropToTarget((ImdiTree) comp); return true; } } } else { // search through al the parent nodes to see if we can find a drop target dropAllowed = (null != findImdiDropableTarget(comp)); System.out.println("dropAllowed: " + dropAllowed); return dropAllowed; } System.out.println("canImport false"); return false; } private Container findImdiDropableTarget(Container tempCom) { while (tempCom != null) { if (tempCom instanceof LinorgSplitPanel || tempCom instanceof JDesktopPane) { System.out.println("canImport true"); return tempCom; } tempCom = tempCom.getParent(); } return null; } @Override public Transferable createTransferable(JComponent comp) { dragStartMilliSeconds = System.currentTimeMillis(); draggedImdiObjects = null; draggedTreeNodes = null; selectionContainsArchivableLocalFile = false; selectionContainsLocalFile = false; selectionContainsLocalDirectory = false; selectionContainsImdiResource = false; selectionContainsImdiCorpus = false; selectionContainsImdiCatalogue = false; selectionContainsImdiSession = false; selectionContainsImdiChild = false; selectionContainsLocal = false; selectionContainsRemote = false; selectionContainsFavourite = false; // if (comp != null) { System.out.println("createTransferable: " + comp.toString()); } if (comp instanceof ImdiTree) { ImdiTree draggedTree = (ImdiTree) comp; //System.out.println("selectedCount: " + draggedTree.getSelectionCount()); draggedImdiObjects = new ImdiTreeObject[draggedTree.getSelectionCount()]; draggedTreeNodes = new DefaultMutableTreeNode[draggedTree.getSelectionCount()]; for (int selectedCount = 0; selectedCount < draggedTree.getSelectionCount(); selectedCount++) { DefaultMutableTreeNode parentNode = (DefaultMutableTreeNode) draggedTree.getSelectionPaths()[selectedCount].getLastPathComponent(); //System.out.println("parentNode: " + parentNode.toString()); if (parentNode.getUserObject() instanceof ImdiTreeObject) { //System.out.println("DraggedImdi: " + parentNode.getUserObject().toString()); draggedImdiObjects[selectedCount] = (ImdiTreeObject) (parentNode.getUserObject()); draggedTreeNodes[selectedCount] = parentNode; } else { draggedImdiObjects[selectedCount] = null; draggedTreeNodes[selectedCount] = null; } } classifyTransferableContents(); return this; } else if (comp instanceof ImdiTable) { draggedImdiObjects = ((ImdiTable) comp).getSelectedRowsFromTable(); classifyTransferableContents(); return this; } else if (comp instanceof JList) { Object[] selectedValues = ((JList) comp).getSelectedValues(); //System.out.println("selectedValues: " + selectedValues); draggedImdiObjects = new ImdiTreeObject[selectedValues.length]; for (int selectedNodeCounter = 0; selectedNodeCounter < selectedValues.length; selectedNodeCounter++) { if (selectedValues[selectedNodeCounter] instanceof ImdiTreeObject) { draggedImdiObjects[selectedNodeCounter] = (ImdiTreeObject) selectedValues[selectedNodeCounter]; } } classifyTransferableContents(); return this; } return null; } private void classifyTransferableContents() { System.out.println("classifyTransferableContents"); // classify the draggable bundle to help matching drop targets for (ImdiTreeObject currentDraggedObject : draggedImdiObjects) { if (currentDraggedObject.isLocal()) { selectionContainsLocal = true; System.out.println("selectionContainsLocal"); if (currentDraggedObject.isDirectory()) { selectionContainsLocalDirectory = true; System.out.println("selectionContainsLocalDirectory"); } else { if (!currentDraggedObject.isMetaDataNode()) { selectionContainsLocalFile = true; System.out.println("selectionContainsLocalFile"); if (currentDraggedObject.isArchivableFile()) { selectionContainsArchivableLocalFile = true; System.out.println("selectionContainsArchivableLocalFile"); } } } } else { selectionContainsRemote = true; System.out.println("selectionContainsRemote"); } if (currentDraggedObject.isMetaDataNode()) { if (currentDraggedObject.isLocal() && LinorgSessionStorage.getSingleInstance().pathIsInsideCache(currentDraggedObject.getFile())) { selectionContainsImdiInCache = true; System.out.println("selectionContainsImdiInCache"); } if (currentDraggedObject.isImdiChild()) { selectionContainsImdiChild = true; System.out.println("selectionContainsImdiChild"); // only an imdichild will contain a resource if (currentDraggedObject.hasResource()) { selectionContainsImdiResource = true; System.out.println("selectionContainsImdiResource"); } } else if (currentDraggedObject.isSession()) { selectionContainsImdiSession = true; System.out.println("selectionContainsImdiSession"); } else if (currentDraggedObject.isCatalogue()) { selectionContainsImdiCatalogue = true; System.out.println("selectionContainsImdiCatalogue"); } else if (currentDraggedObject.isCorpus()) { selectionContainsImdiCorpus = true; System.out.println("selectionContainsImdiCorpus"); } if (currentDraggedObject.isFavorite()) { selectionContainsFavourite = true; System.out.println("selectionContainsFavourite"); } } } } @Override public boolean importData(JComponent comp, Transferable t) { // due to the swing api being far to keen to do a drag drop action on the windows platform users frequently loose nodes by dragging them into random locations // so to avoid this we check the date time from when the transferable was created and if less than x seconds reject the drop if (System.currentTimeMillis() - dragStartMilliSeconds < (100 * 1)) { // todo: (has beed reduced to 100 * 1 from 100 * 3) this may be too agressive and preventing valid drag events, particularly since "improveddraggesture" property is now set. return false; } try { System.out.println("importData: " + comp.toString()); if (comp instanceof ImdiTable && draggedImdiObjects == null) { ((ImdiTable) comp).pasteIntoSelectedTableRowsFromClipBoard(); } else { //System.out.println("draggedImdiObjects: " + draggedImdiObjects); if (draggedImdiObjects != null) { if (comp instanceof JTree && canDropToTarget((ImdiTree) comp)) { System.out.println("comp: " + comp.getName()); for (int draggedCounter = 0; draggedCounter < draggedImdiObjects.length; draggedCounter++) { System.out.println("dragged: " + draggedImdiObjects[draggedCounter].toString()); } if (TreeHelper.getSingleInstance().componentIsTheFavouritesTree(currentDropTarget)) { boolean resultValue = LinorgFavourites.getSingleInstance().toggleFavouritesList(draggedImdiObjects, true); createTransferable(null); // clear the transfer objects return resultValue; } else { JTree dropTree = (JTree) comp; DefaultMutableTreeNode targetNode = TreeHelper.getSingleInstance().getLocalCorpusTreeSingleSelection(); TreeHelper.getSingleInstance().addToSortQueue(targetNode); Object dropTargetUserObject = targetNode.getUserObject(); Vector<ImdiTreeObject> importNodeList = new Vector<ImdiTreeObject>(); Hashtable<ImdiTreeObject, Vector> imdiNodesDeleteList = new Hashtable<ImdiTreeObject, Vector>(); System.out.println("to: " + dropTargetUserObject.toString()); // TODO: add drag to local corpus tree // TODO: consider adding a are you sure you want to move that node into this node ... // TODO: must prevent parent nodes being dragged into lower branches of itself if (dropTargetUserObject instanceof ImdiTreeObject) { //TODO: this should also allow drop to the root node // if (((ImdiTreeObject) dropTargetUserObject).isImdiChild()) { // dropTargetUserObject = ((ImdiTreeObject) dropTargetUserObject).getParentDomNode(); if (((ImdiTreeObject) dropTargetUserObject).getParentDomNode().isCmdiMetaDataNode() || ((ImdiTreeObject) dropTargetUserObject).getParentDomNode().isSession()/* || ((ImdiTreeObject) dropTargetUserObject).isImdiChild()*/) { //TODO: for now we do not allow drag on to imdi child nodes if (selectionContainsArchivableLocalFile == true && selectionContainsLocalFile == true && selectionContainsLocalDirectory == false && selectionContainsImdiResource == false && selectionContainsImdiCorpus == false && selectionContainsImdiSession == false && selectionContainsImdiChild == false && selectionContainsLocal == true && selectionContainsRemote == false) { System.out.println("ok to add local file"); for (int draggedCounter = 0; draggedCounter < draggedImdiObjects.length; draggedCounter++) { System.out.println("dragged: " + draggedImdiObjects[draggedCounter].toString()); ((ImdiTreeObject) dropTargetUserObject).requestAddNode("Resource", draggedImdiObjects[draggedCounter]); } createTransferable(null); // clear the transfer objects return true; // we have achieved the drag so return true } } } // allow drop to the root node wich will not be an imditreeobject // if (!(dropTargetUserObject instanceof ImdiTreeObject) || ((ImdiTreeObject) dropTargetUserObject).isCorpus()) { if (selectionContainsArchivableLocalFile == false // selectionContainsLocalFile == true && && selectionContainsLocalDirectory == false && selectionContainsImdiResource == false && (selectionContainsImdiCorpus == false || selectionContainsImdiSession == false) //(selectionContainsImdiChild == false || GuiHelper.imdiSchema.nodeCanExistInNode((ImdiTreeObject) dropTargetUserObject, (ImdiTreeObject) draggedImdiObjects[draggedCounter]))// && // selectionContainsLocal == true && // selectionContainsRemote == false ) { System.out.println("ok to move local IMDI"); for (int draggedCounter = 0; draggedCounter < draggedImdiObjects.length; draggedCounter++) { System.out.println("dragged: " + draggedImdiObjects[draggedCounter].toString()); if (!((ImdiTreeObject) draggedImdiObjects[draggedCounter]).isImdiChild() || ImdiSchema.getSingleInstance().nodeCanExistInNode((ImdiTreeObject) dropTargetUserObject, (ImdiTreeObject) draggedImdiObjects[draggedCounter])) { //((ImdiTreeObject) dropTargetUserObject).requestAddNode(GuiHelper.imdiSchema.getNodeTypeFromMimeType(draggedImdiObjects[draggedCounter].mpiMimeType), "Resource", null, draggedImdiObjects[draggedCounter].getUrlString(), draggedImdiObjects[draggedCounter].mpiMimeType); // check that the node has not been dragged into itself boolean draggedIntoSelf = false; DefaultMutableTreeNode ancestorNode = targetNode; while (ancestorNode != null) { if (draggedTreeNodes[draggedCounter].equals(ancestorNode)) { draggedIntoSelf = true; System.out.println("found ancestor: " + draggedTreeNodes[draggedCounter] + ":" + ancestorNode); } // System.out.println("checking: " + draggedTreeNodes[draggedCounter] + ":" + ancestorNode); ancestorNode = (DefaultMutableTreeNode) ancestorNode.getParent(); } // todo: test for dragged to parent session //System.out.println("found ancestor: " + draggedTreeNodes[draggedCounter] + ":" + ancestorNode); if (!draggedIntoSelf) { if (((ImdiTreeObject) draggedImdiObjects[draggedCounter]).isFavorite()) { // todo: this does not allow the adding of favourites to the root node, note that that would need to be changed in the add menu also ((ImdiTreeObject) dropTargetUserObject).requestAddNode(((ImdiTreeObject) draggedImdiObjects[draggedCounter]).toString(), ((ImdiTreeObject) draggedImdiObjects[draggedCounter])); } else if (!(((ImdiTreeObject) draggedImdiObjects[draggedCounter]).isLocal() && LinorgSessionStorage.getSingleInstance().pathIsInsideCache(((ImdiTreeObject) draggedImdiObjects[draggedCounter]).getFile()))) { importNodeList.add((ImdiTreeObject) draggedImdiObjects[draggedCounter]); } else { String targetNodeName; if (dropTargetUserObject instanceof ImdiTreeObject) { targetNodeName = targetNode.getUserObject().toString(); } else { targetNodeName = ((JLabel) targetNode.getUserObject()).getText(); } // if (draggedTreeNodes[draggedCounter].getUserObject()) int detailsOption = JOptionPane.showOptionDialog(LinorgWindowManager.getSingleInstance().linorgFrame, "Move " + draggedTreeNodes[draggedCounter].getUserObject().toString() + /*" from " + ((DefaultMutableTreeNode) ancestorNode.getParent()).getUserObject().toString() +*/ " to " + targetNodeName, "Arbil", JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE, null, new Object[]{"Move", "Cancel"}, "Cancel"); if (detailsOption == 0) { boolean addNodeResult = true; if (dropTargetUserObject instanceof ImdiTreeObject) { addNodeResult = ((ImdiTreeObject) dropTargetUserObject).addCorpusLink(draggedImdiObjects[draggedCounter]); } else { addNodeResult = TreeHelper.getSingleInstance().addLocation(draggedImdiObjects[draggedCounter].getURI()); } if (addNodeResult) { if (draggedTreeNodes[draggedCounter] != null) { if (draggedTreeNodes[draggedCounter].getParent().equals(draggedTreeNodes[draggedCounter].getRoot())) { System.out.println("dragged from root"); TreeHelper.getSingleInstance().removeLocation(draggedImdiObjects[draggedCounter]); TreeHelper.getSingleInstance().applyRootLocations(); } else { ImdiTreeObject parentImdi = (ImdiTreeObject) ((DefaultMutableTreeNode) draggedTreeNodes[draggedCounter].getParent()).getUserObject(); System.out.println("removeing from parent: " + parentImdi); // add the parent and the child node to the deletelist if (!imdiNodesDeleteList.containsKey(parentImdi)) { imdiNodesDeleteList.put(parentImdi, new Vector()); } imdiNodesDeleteList.get(parentImdi).add(draggedImdiObjects[draggedCounter]); // System.out.println("delete list: " + imdiNodesDeleteList.get(parentImdi).size()); } } } } } } } } if (importNodeList.size() > 0) { // TODO: finish this import code try { ImportExportDialog importExportDialog = new ImportExportDialog(dropTree); if (dropTargetUserObject instanceof ImdiTreeObject) { importExportDialog.setDestinationNode(((ImdiTreeObject) dropTargetUserObject)); } importExportDialog.copyToCache(importNodeList); } catch (Exception e) { System.out.println(e.getMessage()); } } for (ImdiTreeObject currentParent : imdiNodesDeleteList.keySet()) { System.out.println("deleting by corpus link"); currentParent.deleteCorpusLink(((Vector<ImdiTreeObject>) imdiNodesDeleteList.get(currentParent)).toArray(new ImdiTreeObject[]{})); } if (dropTargetUserObject instanceof ImdiTreeObject) { // TODO: this save is required to prevent user data loss, but the save and reload process may not really be required here ((ImdiTreeObject) dropTargetUserObject).saveChangesToCache(false); ((ImdiTreeObject) dropTargetUserObject).reloadNode(); } else { TreeHelper.getSingleInstance().applyRootLocations(); } createTransferable(null); // clear the transfer objects return true; // we have achieved the drag so return true } } } else { Container imdiSplitPanel = findImdiDropableTarget(comp); if (imdiSplitPanel instanceof LinorgSplitPanel) { LinorgSplitPanel targetPanel = (LinorgSplitPanel) imdiSplitPanel; ImdiTableModel dropTableModel = (ImdiTableModel) targetPanel.imdiTable.getModel(); dropTableModel.addImdiObjects(draggedImdiObjects); createTransferable(null); // clear the transfer objects return true; // we have achieved the drag so return true } else if (imdiSplitPanel instanceof JDesktopPane) { LinorgWindowManager.getSingleInstance().openFloatingTableOnce(draggedImdiObjects, null); createTransferable(null); // clear the transfer objects return true; // we have achieved the drag so return true } } } } } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); } createTransferable(null); // clear the transfer objects return false; } public Object getTransferData(DataFlavor flavor) { System.out.println("getTransferData"); if (isDataFlavorSupported(flavor)) { return draggedImdiObjects; } return null; } public DataFlavor[] getTransferDataFlavors() { System.out.println("getTransferDataFlavors"); return flavors; } public boolean isDataFlavorSupported(DataFlavor flavor) { System.out.println("isDataFlavorSupported"); return flavors[0].equals(flavor); } } }
package org.neo4j.kernel.impl.transaction.xaframework; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.logging.Logger; import javax.transaction.xa.XAException; import javax.transaction.xa.XAResource; import javax.transaction.xa.Xid; import org.neo4j.kernel.impl.util.ArrayMap; // make package access? public class XaResourceManager { private final ArrayMap<XAResource,Xid> xaResourceMap = new ArrayMap<XAResource,Xid>(); private final ArrayMap<Xid,XidStatus> xidMap = new ArrayMap<Xid,XidStatus>(); private int recoveredTxCount = 0; private XaLogicalLog log = null; private final XaTransactionFactory tf; private final String name; XaResourceManager( XaTransactionFactory tf, String name ) { this.tf = tf; this.name = name; } synchronized void setLogicalLog( XaLogicalLog log ) { this.log = log; } synchronized XaTransaction getXaTransaction( XAResource xaRes ) throws XAException { XidStatus status = xidMap.get( xaResourceMap.get( xaRes ) ); if ( status == null ) { throw new XAException( "Resource[" + xaRes + "] not enlisted" ); } return status.getTransactionStatus().getTransaction(); } synchronized void start( XAResource xaResource, Xid xid ) throws XAException { if ( xaResourceMap.get( xaResource ) != null ) { throw new XAException( "Resource[" + xaResource + "] already enlisted or suspended" ); } xaResourceMap.put( xaResource, xid ); if ( xidMap.get( xid ) == null ) { int identifier = log.start( xid ); XaTransaction xaTx = tf.create( identifier ); xidMap.put( xid, new XidStatus( xaTx ) ); } } synchronized void injectStart( Xid xid, XaTransaction tx ) throws IOException { if ( xidMap.get( xid ) != null ) { throw new IOException( "Inject start failed, xid: " + xid + " already injected" ); } xidMap.put( xid, new XidStatus( tx ) ); recoveredTxCount++; } synchronized void resume( Xid xid ) throws XAException { XidStatus status = xidMap.get( xid ); if ( status == null ) { throw new XAException( "Unknown xid[" + xid + "]" ); } if ( status.getActive() ) { throw new XAException( "Xid [" + xid + "] not suspended" ); } status.setActive( true ); } synchronized void join( XAResource xaResource, Xid xid ) throws XAException { if ( xidMap.get( xid ) == null ) { throw new XAException( "Unknown xid[" + xid + "]" ); } if ( xaResourceMap.get( xaResource ) != null ) { throw new XAException( "Resource[" + xaResource + "] already enlisted" ); } xaResourceMap.put( xaResource, xid ); } synchronized void end( XAResource xaResource, Xid xid ) throws XAException { Xid xidEntry = xaResourceMap.remove( xaResource ); if ( xidEntry == null ) { throw new XAException( "Resource[" + xaResource + "] not enlisted" ); } } synchronized void suspend( Xid xid ) throws XAException { XidStatus status = xidMap.get( xid ); if ( status == null ) { throw new XAException( "Unknown xid[" + xid + "]" ); } if ( !status.getActive() ) { throw new XAException( "Xid[" + xid + "] already suspended" ); } status.setActive( false ); } synchronized void fail( XAResource xaResource, Xid xid ) throws XAException { if ( xidMap.get( xid ) == null ) { throw new XAException( "Unknown xid[" + xid + "]" ); } Xid xidEntry = xaResourceMap.remove( xaResource ); if ( xidEntry == null ) { throw new XAException( "Resource[" + xaResource + "] not enlisted" ); } XidStatus status = xidMap.get( xid ); status.getTransactionStatus().markAsRollback(); } synchronized void validate( XAResource xaResource ) throws XAException { XidStatus status = xidMap.get( xaResourceMap.get( xaResource ) ); if ( status == null ) { throw new XAException( "Resource[" + xaResource + "] not enlisted" ); } if ( !status.getActive() ) { throw new XAException( "Resource[" + xaResource + "] suspended" ); } } // TODO: check so we're not currently committing on the resource synchronized void destroy( XAResource xaResource ) { xaResourceMap.remove( xaResource ); } private static class XidStatus { private boolean active = true; private TransactionStatus txStatus; XidStatus( XaTransaction xaTransaction ) { txStatus = new TransactionStatus( xaTransaction ); } void setActive( boolean active ) { this.active = active; } boolean getActive() { return this.active; } TransactionStatus getTransactionStatus() { return txStatus; } } private static class TransactionStatus { private boolean prepared = false; private boolean commitStarted = false; private boolean rollback = false; private final XaTransaction xaTransaction; TransactionStatus( XaTransaction xaTransaction ) { this.xaTransaction = xaTransaction; } void markAsPrepared() { prepared = true; } void markAsRollback() { rollback = true; } void markCommitStarted() { commitStarted = true; } boolean prepared() { return prepared; } boolean rollback() { return rollback; } boolean commitStarted() { return commitStarted; } XaTransaction getTransaction() { return xaTransaction; } public String toString() { return "TransactionStatus[" + xaTransaction.getIdentifier() + ", prepared=" + prepared + ", commitStarted=" + commitStarted + ", rolledback=" + rollback + "]"; } } synchronized int prepare( Xid xid ) throws XAException { XidStatus status = xidMap.get( xid ); if ( status == null ) { throw new XAException( "Unknown xid[" + xid + "]" ); } TransactionStatus txStatus = status.getTransactionStatus(); XaTransaction xaTransaction = txStatus.getTransaction(); if ( xaTransaction.isReadOnly() ) { log.done( xaTransaction.getIdentifier() ); xidMap.remove( xid ); if ( xaTransaction.isRecovered() ) { recoveredTxCount checkIfRecoveryComplete(); } return XAResource.XA_RDONLY; } else { xaTransaction.prepare(); log.prepare( xaTransaction.getIdentifier() ); txStatus.markAsPrepared(); return XAResource.XA_OK; } } // called from XaResource internal recovery // returns true if read only and should be removed... synchronized boolean injectPrepare( Xid xid ) throws IOException { XidStatus status = xidMap.get( xid ); if ( status == null ) { throw new IOException( "Unknown xid[" + xid + "]" ); } TransactionStatus txStatus = status.getTransactionStatus(); XaTransaction xaTransaction = txStatus.getTransaction(); if ( xaTransaction.isReadOnly() ) { xidMap.remove( xid ); if ( xaTransaction.isRecovered() ) { recoveredTxCount checkIfRecoveryComplete(); } return true; } else { txOrderMap.put( xid, nextTxOrder++ ); txStatus.markAsPrepared(); return false; } } private Map<Xid,Integer> txOrderMap = new HashMap<Xid,Integer>(); private int nextTxOrder = 0; // called during recovery // if not read only transaction will be commited. synchronized void injectOnePhaseCommit( Xid xid ) throws XAException { XidStatus status = xidMap.get( xid ); if ( status == null ) { throw new XAException( "Unknown xid[" + xid + "]" ); } TransactionStatus txStatus = status.getTransactionStatus(); txOrderMap.put( xid, nextTxOrder++ ); txStatus.markAsPrepared(); txStatus.markCommitStarted(); XaTransaction xaTransaction = txStatus.getTransaction(); xaTransaction.commit(); } synchronized void injectTwoPhaseCommit( Xid xid ) throws XAException { XidStatus status = xidMap.get( xid ); if ( status == null ) { throw new XAException( "Unknown xid[" + xid + "]" ); } TransactionStatus txStatus = status.getTransactionStatus(); txOrderMap.put( xid, nextTxOrder++ ); txStatus.markAsPrepared(); txStatus.markCommitStarted(); XaTransaction xaTransaction = txStatus.getTransaction(); xaTransaction.commit(); } synchronized XaTransaction commit( Xid xid, boolean onePhase ) throws XAException { XidStatus status = xidMap.get( xid ); if ( status == null ) { throw new XAException( "Unknown xid[" + xid + "]" ); } TransactionStatus txStatus = status.getTransactionStatus(); XaTransaction xaTransaction = txStatus.getTransaction(); if ( onePhase ) { if ( !xaTransaction.isReadOnly() ) { if ( !xaTransaction.isRecovered() ) { xaTransaction.prepare(); log.commitOnePhase( xaTransaction.getIdentifier() ); } } txStatus.markAsPrepared(); } if ( !txStatus.prepared() || txStatus.rollback() ) { throw new XAException( "Transaction not prepared or " + "(marked as) rolledbacked" ); } if ( !xaTransaction.isReadOnly() ) { if ( !xaTransaction.isRecovered() ) { if ( !onePhase ) { log.commitTwoPhase( xaTransaction.getIdentifier() ); } } txStatus.markCommitStarted(); xaTransaction.commit(); } if ( !xaTransaction.isRecovered() ) { log.done( xaTransaction.getIdentifier() ); } xidMap.remove( xid ); if ( xaTransaction.isRecovered() ) { recoveredTxCount checkIfRecoveryComplete(); } return xaTransaction; } synchronized XaTransaction rollback( Xid xid ) throws XAException { XidStatus status = xidMap.get( xid ); if ( status == null ) { throw new XAException( "Unknown xid[" + xid + "]" ); } TransactionStatus txStatus = status.getTransactionStatus(); XaTransaction xaTransaction = txStatus.getTransaction(); if ( txStatus.commitStarted() ) { throw new XAException( "Transaction already started commit" ); } txStatus.markAsRollback(); xaTransaction.rollback(); log.done( xaTransaction.getIdentifier() ); xidMap.remove( xid ); if ( xaTransaction.isRecovered() ) { recoveredTxCount checkIfRecoveryComplete(); } return txStatus.getTransaction(); } synchronized XaTransaction forget( Xid xid ) throws XAException { XidStatus status = xidMap.get( xid ); TransactionStatus txStatus = status.getTransactionStatus(); XaTransaction xaTransaction = txStatus.getTransaction(); log.done( xaTransaction.getIdentifier() ); xidMap.remove( xid ); if ( xaTransaction.isRecovered() ) { recoveredTxCount checkIfRecoveryComplete(); } return xaTransaction; } synchronized void markAsRollbackOnly( Xid xid ) throws XAException { XidStatus status = xidMap.get( xid ); if ( status == null ) { throw new XAException( "Unknown xid[" + xid + "]" ); } TransactionStatus txStatus = status.getTransactionStatus(); txStatus.markAsRollback(); } synchronized Xid[] recover( int flag ) throws XAException { List<Xid> xids = new ArrayList<Xid>(); Iterator<Xid> keyIterator = xidMap.keySet().iterator(); while ( keyIterator.hasNext() ) { xids.add( keyIterator.next() ); } return xids.toArray( new Xid[xids.size()] ); } // called from neostore internal recovery synchronized void pruneXid( Xid xid ) throws IOException { XidStatus status = xidMap.get( xid ); if ( status == null ) { throw new IOException( "Unknown xid[" + xid + "]" ); } TransactionStatus txStatus = status.getTransactionStatus(); XaTransaction xaTransaction = txStatus.getTransaction(); xidMap.remove( xid ); if ( xaTransaction.isRecovered() ) { recoveredTxCount checkIfRecoveryComplete(); } } synchronized void pruneXidIfExist( Xid xid ) throws IOException { XidStatus status = xidMap.get( xid ); if ( status == null ) { return; } TransactionStatus txStatus = status.getTransactionStatus(); XaTransaction xaTransaction = txStatus.getTransaction(); xidMap.remove( xid ); if ( xaTransaction.isRecovered() ) { recoveredTxCount checkIfRecoveryComplete(); } } synchronized void checkXids() throws IOException { Iterator<Xid> keyIterator = xidMap.keySet().iterator(); LinkedList<Xid> xids = new LinkedList<Xid>(); while ( keyIterator.hasNext() ) { xids.add( keyIterator.next() ); } // comparator only used here Collections.sort( xids, new Comparator<Xid>() { public int compare( Xid o1, Xid o2 ) { Integer id1 = txOrderMap.get( o1 ); Integer id2 = txOrderMap.get( o2 ); if ( id1 == null && id2 == null ) { return 0; } if ( id1 == null ) { return Integer.MAX_VALUE; } if ( id2 == null ) { return Integer.MIN_VALUE; } return id1 - id2; } } ); txOrderMap.clear(); // = null; Logger logger = Logger.getLogger( tf.getClass().getName() ); while ( !xids.isEmpty() ) { Xid xid = xids.removeFirst(); XidStatus status = xidMap.get( xid ); TransactionStatus txStatus = status.getTransactionStatus(); XaTransaction xaTransaction = txStatus.getTransaction(); int identifier = xaTransaction.getIdentifier(); if ( xaTransaction.isRecovered() ) { if ( txStatus.commitStarted() ) { logger.fine( "Marking 1PC [" + name + "] tx " + identifier + " as done" ); log.doneInternal( identifier ); xidMap.remove( xid ); recoveredTxCount } else if ( !txStatus.prepared() ) { logger.fine( "Rolling back non prepared tx [" + name + "]" + "txIdent[" + identifier + "]" ); log.doneInternal( xaTransaction.getIdentifier() ); xidMap.remove( xid ); recoveredTxCount } else { logger.fine( "2PC tx [" + name + "] " + txStatus + " txIdent[" + identifier + "]" ); } } } checkIfRecoveryComplete(); } private void checkIfRecoveryComplete() { if ( log.scanIsComplete() && recoveredTxCount == 0 ) { // log.makeNewLog(); tf.recoveryComplete(); } } // for testing, do not use! synchronized void reset() { xaResourceMap.clear(); xidMap.clear(); log.reset(); } /** * Returns <CODE>true</CODE> if recovered transactions exist. This method * is useful to invoke after the logical log has been opened to detirmine if * there are any recovered transactions waiting for the TM to tell them what * to do. * * @return True if recovered transactions exist */ public boolean hasRecoveredTransactions() { return recoveredTxCount > 0; } }
package org.spongepowered.common.mixin.core.block.tiles; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import net.minecraft.block.Block; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.util.BlockPos; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.tileentity.TileEntity; import org.spongepowered.api.block.tileentity.TileEntityType; import org.spongepowered.api.data.DataContainer; import org.spongepowered.api.data.DataView; import org.spongepowered.api.data.MemoryDataContainer; import org.spongepowered.api.data.Queries; import org.spongepowered.api.data.manipulator.DataManipulator; import org.spongepowered.api.util.annotation.NonnullByDefault; import org.spongepowered.api.util.persistence.InvalidDataException; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.World; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.Shadow; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.callback.CallbackInfo; import org.spongepowered.common.SpongeImpl; import org.spongepowered.common.data.type.SpongeTileEntityType; import org.spongepowered.common.data.util.DataQueries; import org.spongepowered.common.data.util.DataUtil; import org.spongepowered.common.data.util.NbtDataUtil; import org.spongepowered.common.entity.PlayerTracker; import org.spongepowered.common.interfaces.block.tile.IMixinTileEntity; import org.spongepowered.common.interfaces.data.IMixinCustomDataHolder; import org.spongepowered.common.interfaces.world.IMixinWorld; import org.spongepowered.common.registry.type.block.TileEntityTypeRegistryModule; import org.spongepowered.common.util.SpongeHooks; import org.spongepowered.common.util.VecHelper; import org.spongepowered.common.util.persistence.NbtTranslator; import java.util.Collection; import java.util.List; @NonnullByDefault @Mixin(net.minecraft.tileentity.TileEntity.class) public abstract class MixinTileEntity implements TileEntity, IMixinTileEntity { private final TileEntityType tileType = SpongeImpl.getRegistry().getTranslated(this.getClass(), TileEntityType.class); @Shadow protected boolean tileEntityInvalid; @Shadow protected net.minecraft.world.World worldObj; @Shadow private int blockMetadata; @Shadow protected BlockPos pos; @Shadow public abstract BlockPos getPos(); @Shadow public abstract Block getBlockType(); @Shadow public abstract void writeToNBT(NBTTagCompound compound); @Shadow public abstract void markDirty(); @Inject(method = "markDirty", at = @At(value = "HEAD")) public void onMarkDirty(CallbackInfo ci) { if (this.worldObj != null && !this.worldObj.isRemote) { IMixinWorld world = (IMixinWorld) this.worldObj; // This handles transfers to this TE from a source such as a Hopper if (world.getCurrentTickTileEntity().isPresent() && this != world.getCurrentTickTileEntity().get()) { net.minecraft.tileentity.TileEntity te = (net.minecraft.tileentity.TileEntity) world.getCurrentTickTileEntity().get(); SpongeHooks.tryToTrackBlock(te.getWorld(), te, te.getPos(), this.getBlockType(), this.pos, PlayerTracker.Type.NOTIFIER); } } } @SuppressWarnings({"unchecked", "rawtypes"}) @Inject(method = "addMapping(Ljava/lang/Class;Ljava/lang/String;)V", at = @At(value = "RETURN")) private static void onRegister(Class clazz, String name, CallbackInfo callbackInfo) { if (clazz == null) { // do nothing This is for backwards compatibility since Minecraft would otherwise throw exceptions for // tile entities that aren't upgraded or updated. } final String id = TileEntityTypeRegistryModule.getInstance().getIdForName(name); final TileEntityType tileEntityType = new SpongeTileEntityType((Class<? extends TileEntity>) clazz, name, id); TileEntityTypeRegistryModule.getInstance().registerAdditionalCatalog(tileEntityType); } @Override public Location<World> getLocation() { return new Location<>((World) this.worldObj, VecHelper.toVector(this.getPos())); } @Override public int getContentVersion() { return 1; } @Override public DataContainer toContainer() { final DataContainer container = new MemoryDataContainer() .set(Queries.CONTENT_VERSION, getContentVersion()) .set(Queries.WORLD_ID, ((World) this.worldObj).getUniqueId().toString()) .set(Queries.POSITION_X, this.getPos().getX()) .set(Queries.POSITION_Y, this.getPos().getY()) .set(Queries.POSITION_Z, this.getPos().getZ()) .set(DataQueries.BLOCK_ENTITY_TILE_TYPE, this.tileType.getId()); final NBTTagCompound compound = new NBTTagCompound(); this.writeToNBT(compound); NbtDataUtil.filterSpongeCustomData(compound); // We must filter the custom data so it isn't stored twice container.set(DataQueries.UNSAFE_NBT, NbtTranslator.getInstance().translateFrom(compound)); final Collection<DataManipulator<?, ?>> manipulators = getContainers(); if (!manipulators.isEmpty()) { container.set(DataQueries.DATA_MANIPULATORS, DataUtil.getSerializedManipulatorList(manipulators)); } return container; } @Override public boolean validateRawData(DataContainer container) { return container.contains(Queries.WORLD_ID) && container.contains(Queries.POSITION_X) && container.contains(Queries.POSITION_Y) && container.contains(Queries.POSITION_Z) && container.contains(DataQueries.BLOCK_ENTITY_TILE_TYPE) && container.contains(DataQueries.UNSAFE_NBT); } @Override public void setRawData(DataContainer container) throws InvalidDataException { } @Override public boolean isValid() { return !this.tileEntityInvalid; } @Override public void setValid(boolean valid) { this.tileEntityInvalid = valid; } @Override public final TileEntityType getType() { return this.tileType; } @Override public BlockState getBlock() { return (BlockState) this.worldObj.getBlockState(this.getPos()); } /** * Hooks into vanilla's writeToNBT to call {@link #writeToNbt}. * <p> * <p> This makes it easier for other entity mixins to override writeToNBT without having to specify the <code>@Inject</code> annotation. </p> * * @param compound The compound vanilla writes to (unused because we write to SpongeData) * @param ci (Unused) callback info */ @Inject(method = "Lnet/minecraft/tileentity/TileEntity;writeToNBT(Lnet/minecraft/nbt/NBTTagCompound;)V", at = @At("HEAD")) public void onWriteToNBT(NBTTagCompound compound, CallbackInfo ci) { this.writeToNbt(this.getSpongeData()); } /** * Hooks into vanilla's readFromNBT to call {@link #readFromNbt}. * <p> * <p> This makes it easier for other entity mixins to override readFromNbt without having to specify the <code>@Inject</code> annotation. </p> * * @param compound The compound vanilla reads from (unused because we read from SpongeData) * @param ci (Unused) callback info */ @Inject(method = "Lnet/minecraft/tileentity/TileEntity;readFromNBT(Lnet/minecraft/nbt/NBTTagCompound;)V", at = @At("RETURN")) public void onReadFromNBT(NBTTagCompound compound, CallbackInfo ci) { this.readFromNbt(this.getSpongeData()); } /** * Read extra data (SpongeData) from the tile entity's NBT tag. * * @param compound The SpongeData compound to read from */ @Override public void readFromNbt(NBTTagCompound compound) { if (this instanceof IMixinCustomDataHolder) { if (compound.hasKey(NbtDataUtil.CUSTOM_MANIPULATOR_TAG_LIST, NbtDataUtil.TAG_LIST)) { final NBTTagList list = compound.getTagList(NbtDataUtil.CUSTOM_MANIPULATOR_TAG_LIST, NbtDataUtil.TAG_COMPOUND); final ImmutableList.Builder<DataView> builder = ImmutableList.builder(); if (list != null && list.tagCount() != 0) { for (int i = 0; i < list.tagCount(); i++) { final NBTTagCompound internal = list.getCompoundTagAt(i); builder.add(NbtTranslator.getInstance().translateFrom(internal)); } } try { final List<DataManipulator<?, ?>> manipulators = DataUtil.deserializeManipulatorList(builder.build()); for (DataManipulator<?, ?> manipulator : manipulators) { offer(manipulator); } } catch (InvalidDataException e) { SpongeImpl.getLogger().error("Could not deserialize custom plugin data! ", e); } } } } /** * Write extra data (SpongeData) to the tile entity's NBT tag. * * @param compound The SpongeData compound to write to */ @Override public void writeToNbt(NBTTagCompound compound) { if (this instanceof IMixinCustomDataHolder) { final List<DataView> manipulatorViews = DataUtil.getSerializedManipulatorList(((IMixinCustomDataHolder) this).getCustomManipulators()); final NBTTagList manipulatorTagList = new NBTTagList(); for (DataView dataView : manipulatorViews) { manipulatorTagList.appendTag(NbtTranslator.getInstance().translateData(dataView)); } compound.setTag(NbtDataUtil.CUSTOM_MANIPULATOR_TAG_LIST, manipulatorTagList); } } public void supplyVanillaManipulators(List<DataManipulator<?, ?>> manipulators) { } @Override public Collection<DataManipulator<?, ?>> getContainers() { final List<DataManipulator<?, ?>> list = Lists.newArrayList(); this.supplyVanillaManipulators(list); if (this instanceof IMixinCustomDataHolder) { list.addAll(((IMixinCustomDataHolder) this).getCustomManipulators()); } return list; } }
package org.wallride.web.controller.admin.article; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.support.MessageSourceAccessor; import org.springframework.http.HttpStatus; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindException; import org.springframework.validation.BindingResult; import org.springframework.validation.ObjectError; import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.*; import org.springframework.web.servlet.mvc.support.RedirectAttributes; import org.wallride.core.domain.Article; import org.wallride.core.domain.CategoryTree; import org.wallride.core.domain.Page; import org.wallride.core.service.ArticleService; import org.wallride.core.service.CategoryService; import org.wallride.core.service.DuplicateCodeException; import org.wallride.core.service.EmptyCodeException; import org.wallride.core.support.AuthorizedUser; import org.wallride.web.support.DomainObjectSavedModel; import org.wallride.web.support.RestValidationErrorModel; import javax.inject.Inject; import javax.validation.Valid; import javax.validation.groups.Default; @Controller @RequestMapping("/{language}/articles/edit") public class ArticleEditController { private static Logger logger = LoggerFactory.getLogger(ArticleEditController.class); @Inject private ArticleService articleService; @Inject private CategoryService categoryService; @Inject private MessageSourceAccessor messageSourceAccessor; @ModelAttribute("article") public Article setupArticle( @PathVariable String language, @RequestParam long id) { return articleService.readArticleById(id, language); } @ModelAttribute("categoryTree") public CategoryTree setupCategoryTree(@PathVariable String language) { return categoryService.readCategoryTree(language); } @ExceptionHandler(BindException.class) @ResponseStatus(HttpStatus.BAD_REQUEST) public @ResponseBody RestValidationErrorModel bindException(BindException e) { logger.debug("BindException", e); return RestValidationErrorModel.fromBindingResult(e.getBindingResult(), messageSourceAccessor); } @RequestMapping(method=RequestMethod.GET) public String edit( @PathVariable String language, @RequestParam long id, Model model, RedirectAttributes redirectAttributes) { Article article = (Article) model.asMap().get("article"); if (!language.equals(article.getLanguage())) { redirectAttributes.addAttribute("language", language); return "redirect:/_admin/{language}/articles/index"; } ArticleEditForm form = ArticleEditForm.fromDomainObject(article); model.addAttribute("form", form); Article draft = articleService.readDraftById(id); model.addAttribute("draft", draft); return "/article/edit"; } @RequestMapping(method=RequestMethod.GET, params="draft") public String editDraft( @PathVariable String language, @RequestParam long id, Model model, RedirectAttributes redirectAttributes) { Article article = (Article) model.asMap().get("article"); if (!language.equals(article.getLanguage())) { redirectAttributes.addAttribute("language", language); return "redirect:/_admin/{language}/articles/index"; } Article draft = articleService.readDraftById(id); if (draft == null) { redirectAttributes.addAttribute("language", language); redirectAttributes.addAttribute("id", id); return "redirect:/_admin/{language}/articles/edit?id={id}"; } ArticleEditForm form = ArticleEditForm.fromDomainObject(draft); model.addAttribute("form", form); return "/article/edit"; } @RequestMapping(method=RequestMethod.POST, params="draft") public @ResponseBody DomainObjectSavedModel saveAsDraft( @PathVariable String language, @Validated @ModelAttribute("form") ArticleEditForm form, BindingResult errors, Model model, AuthorizedUser authorizedUser) throws BindException { if (errors.hasErrors()) { for (ObjectError error : errors.getAllErrors()) { if (!"validation.NotNull".equals(error.getCode())) { throw new BindException(errors); } } } Article article = (Article) model.asMap().get("article"); try { articleService.saveArticleAsDraft(form.buildArticleUpdateRequest(), authorizedUser); } catch (EmptyCodeException e) { errors.rejectValue("code", "NotNull"); } catch (DuplicateCodeException e) { errors.rejectValue("code", "NotDuplicate"); } if (errors.hasErrors()) { logger.debug("Errors: {}", errors); throw new BindException(errors); } return new DomainObjectSavedModel<>(article); } @RequestMapping(method=RequestMethod.POST, params="publish") public String saveAsPublished( @PathVariable String language, @Validated({Default.class, ArticleEditForm.GroupPublish.class}) @ModelAttribute("form") ArticleEditForm form, BindingResult errors, AuthorizedUser authorizedUser, RedirectAttributes redirectAttributes) { if (errors.hasErrors()) { return "/article/edit"; } Article article = null; try { article = articleService.saveArticleAsPublished(form.buildArticleUpdateRequest(), authorizedUser); } catch (EmptyCodeException e) { errors.rejectValue("code", "NotNull"); } catch (DuplicateCodeException e) { errors.rejectValue("code", "NotDuplicate"); } if (errors.hasErrors()) { logger.debug("Errors: {}", errors); return "/article/edit"; } redirectAttributes.addFlashAttribute("savedArticle", article); redirectAttributes.addAttribute("language", language); redirectAttributes.addAttribute("id", article.getId()); return "redirect:/_admin/{language}/articles/describe?id={id}"; } @RequestMapping(method=RequestMethod.POST, params="unpublish") public String saveAsUnpublished( @PathVariable String language, @Validated({Default.class, ArticleEditForm.GroupPublish.class}) @ModelAttribute("form") ArticleEditForm form, BindingResult errors, AuthorizedUser authorizedUser, RedirectAttributes redirectAttributes) { if (errors.hasErrors()) { return "/article/edit"; } Article article = null; try { article = articleService.saveArticleAsUnpublished(form.buildArticleUpdateRequest(), authorizedUser); } catch (EmptyCodeException e) { errors.rejectValue("code", "NotNull"); } catch (DuplicateCodeException e) { errors.rejectValue("code", "NotDuplicate"); } if (errors.hasErrors()) { logger.debug("Errors: {}", errors); return "/article/edit"; } redirectAttributes.addFlashAttribute("savedArticle", article); redirectAttributes.addAttribute("language", language); redirectAttributes.addAttribute("id", article.getId()); return "redirect:/_admin/{language}/articles/describe?id={id}"; } @RequestMapping(method=RequestMethod.POST, params="update") public String update( @PathVariable String language, @Validated({Default.class, ArticleEditForm.GroupPublish.class}) @ModelAttribute("form") ArticleEditForm form, BindingResult errors, AuthorizedUser authorizedUser, RedirectAttributes redirectAttributes) { if (errors.hasErrors()) { return "/article/edit"; } Article article = null; try { article = articleService.saveArticle(form.buildArticleUpdateRequest(), authorizedUser); } catch (EmptyCodeException e) { errors.rejectValue("code", "NotNull"); } catch (DuplicateCodeException e) { errors.rejectValue("code", "NotDuplicate"); } if (errors.hasErrors()) { logger.debug("Errors: {}", errors); return "/article/edit"; } redirectAttributes.addFlashAttribute("savedArticle", article); redirectAttributes.addAttribute("language", language); redirectAttributes.addAttribute("id", article.getId()); return "redirect:/_admin/{language}/articles/describe?id={id}"; } @RequestMapping(method=RequestMethod.POST, params="cancel") public String cancel( @Valid @ModelAttribute("form") ArticleEditForm form, RedirectAttributes redirectAttributes) { redirectAttributes.addAttribute("id", form.getId()); return "redirect:/_admin/articles/describe/{id}"; } }
package techreborn.blockentity.machine.tier1; import net.minecraft.block.BlockState; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.item.ItemStack; import net.minecraft.recipe.RecipeType; import net.minecraft.util.math.Direction; import reborncore.api.IToolDrop; import reborncore.api.blockentity.InventoryProvider; import reborncore.client.containerBuilder.IContainerProvider; import reborncore.client.containerBuilder.builder.BuiltContainer; import reborncore.client.containerBuilder.builder.ContainerBuilder; import reborncore.common.blocks.BlockMachineBase; import reborncore.common.powerSystem.PowerAcceptorBlockEntity; import reborncore.common.registration.RebornRegister; import reborncore.common.registration.config.ConfigRegistry; import reborncore.common.util.RebornInventory; import techreborn.TechReborn; import techreborn.init.TRContent; import techreborn.utils.RecipeUtils; import techreborn.init.TRBlockEntities; @RebornRegister(TechReborn.MOD_ID) public class ElectricFurnaceBlockEntity extends PowerAcceptorBlockEntity implements IToolDrop, InventoryProvider, IContainerProvider { @ConfigRegistry(config = "machines", category = "electric_furnace", key = "ElectricFurnaceInput", comment = "Electric Furnace Max Input (Value in EU)") public static int maxInput = 32; @ConfigRegistry(config = "machines", category = "electric_furnace", key = "ElectricFurnaceMaxEnergy", comment = "Electric Furnace Max Energy (Value in EU)") public static int maxEnergy = 1000; public RebornInventory<ElectricFurnaceBlockEntity> inventory = new RebornInventory<>(3, "ElectricFurnaceBlockEntity", 64, this); public int progress; public int fuelScale = 100; public int cost = 6; int input1 = 0; int output = 1; boolean wasBurning = false; public ElectricFurnaceBlockEntity() { super(TRBlockEntities.ELECTRIC_FURNACE ); } public int gaugeProgressScaled(int scale) { return progress * scale / (int) (fuelScale * (1.0 - getSpeedMultiplier())); } public void cookItems() { if (canSmelt()) { final ItemStack itemstack = RecipeUtils.getMatchingRecipes(world, RecipeType.SMELTING, inventory.getInvStack(input1)); if (inventory.getInvStack(output).isEmpty()) { inventory.setInvStack(output, itemstack.copy()); } else if (inventory.getInvStack(output).isItemEqualIgnoreDamage(itemstack)) { inventory.getInvStack(output).increment(itemstack.getCount()); } if (inventory.getInvStack(input1).getCount() > 1) { inventory.shrinkSlot(input1, 1); } else { inventory.setInvStack(input1, ItemStack.EMPTY); } } } public boolean canSmelt() { if (inventory.getInvStack(input1).isEmpty()) { return false; } final ItemStack itemstack = RecipeUtils.getMatchingRecipes(world, RecipeType.SMELTING, inventory.getInvStack(input1)); if (itemstack.isEmpty()) { return false; } if (inventory.getInvStack(output).isEmpty()) { return true; } if (!inventory.getInvStack(output).isItemEqualIgnoreDamage(itemstack)) { return false; } final int result = inventory.getInvStack(output).getCount() + itemstack.getCount(); return result <= this.inventory.getStackLimit() && result <= itemstack.getMaxCount(); } public boolean isBurning() { return getEnergy() > getEuPerTick(cost); } public ItemStack getResultFor(ItemStack stack) { final ItemStack result = RecipeUtils.getMatchingRecipes(world, RecipeType.SMELTING, stack); if (!result.isEmpty()) { return result.copy(); } return ItemStack.EMPTY; } public void updateState() { if (wasBurning != (progress > 0)) { // skips updating the block state for 1 tick, to prevent the machine from // turning on/off rapidly causing fps drops if (wasBurning && progress == 0 && canSmelt()) { wasBurning = true; return; } final BlockState BlockStateContainer = world.getBlockState(pos); if (BlockStateContainer.getBlock() instanceof BlockMachineBase) { final BlockMachineBase blockMachineBase = (BlockMachineBase) BlockStateContainer.getBlock(); if (BlockStateContainer.get(BlockMachineBase.ACTIVE) != progress > 0) blockMachineBase.setActive(progress > 0, world, pos); } wasBurning = (progress > 0); } } public int getBurnTime() { return progress; } public void setBurnTime(final int burnTime) { this.progress = burnTime; } // TilePowerAcceptor @Override public void tick() { super.tick(); charge(2); if (world.isClient) { return; } final boolean burning = isBurning(); boolean updateInventory = false; if (isBurning() && canSmelt()) { updateState(); if (canUseEnergy(getEuPerTick(cost))) { useEnergy(getEuPerTick(cost)); progress++; if (progress >= Math.max((int) (fuelScale * (1.0 - getSpeedMultiplier())), 5)) { progress = 0; cookItems(); updateInventory = true; } } } else { updateState(); } if (burning != isBurning()) { updateInventory = true; } if (updateInventory) { markDirty(); } } @Override public double getBaseMaxPower() { return maxEnergy; } @Override public boolean canAcceptEnergy(final Direction direction) { return true; } @Override public boolean canProvideEnergy(final Direction direction) { return false; } @Override public double getBaseMaxOutput() { return 0; } @Override public double getBaseMaxInput() { return maxInput; } // IToolDrop @Override public ItemStack getToolDrop(final PlayerEntity entityPlayer) { return TRContent.Machine.ELECTRIC_FURNACE.getStack(); } // ItemHandlerProvider @Override public RebornInventory<ElectricFurnaceBlockEntity> getInventory() { return inventory; } // IContainerProvider @Override public BuiltContainer createContainer(int syncID, final PlayerEntity player) { return new ContainerBuilder("electricfurnace").player(player.inventory).inventory().hotbar().addInventory() .blockEntity(this).slot(0, 55, 45).outputSlot(1, 101, 45).energySlot(2, 8, 72).syncEnergyValue() .syncIntegerValue(this::getBurnTime, this::setBurnTime).addInventory().create(this, syncID); } }
package org.eclipse.ice.caebat.launcher; import java.io.InputStream; import java.util.ArrayList; import java.util.Scanner; import javax.xml.bind.annotation.XmlRootElement; import org.eclipse.ice.datastructures.ICEObject.IUpdateable; import org.eclipse.ice.datastructures.ICEObject.IUpdateableListener; import org.eclipse.ice.datastructures.form.AllowedValueType; import org.eclipse.ice.datastructures.form.DataComponent; import org.eclipse.ice.datastructures.form.Entry; import org.eclipse.ice.datastructures.form.FormStatus; import org.eclipse.ice.datastructures.form.TableComponent; import org.eclipse.ice.io.ips.IPSReader; import org.eclipse.ice.io.ips.IPSWriter; import org.eclipse.ice.item.jobLauncher.JobLauncher; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.Path; /** * <!-- begin-UML-doc --> * <p> * This class inherits from JobLauncher form. It will create the Caebat launcher * so that it can remote execute the code. * </p> * <!-- end-UML-doc --> * * @author s4h */ @XmlRootElement(name = "CaebatLauncher") public class CaebatLauncher extends JobLauncher implements IUpdateableListener { /** * The execution command */ private String fullExecCMD; /** * The default CAEBAT home directory. */ private String CAEBAT_ROOT; /** * The default IPS home directory. */ private String IPS_ROOT; /** * A nullary constructor that delegates to the project constructor. */ public CaebatLauncher() { this(null); } /** * <!-- begin-UML-doc --> * <p> * The constructor. Takes an IProject argument. Calls the super constructor * on JobLauncher. * </p> * <!-- end-UML-doc --> * * @param project * <p> * The project space. * </p> */ public CaebatLauncher(IProject project) { // begin-user-code // Call the JobLauncher constructor super(project); return; // end-user-code } /** * This operations sets up some CAEBAT-specific information for the * launcher, including the default project installation directory. */ protected void setupItemInfo() { // begin-user-code // Set the name and description of the Item setName("Caebat Launcher"); setDescription("Caebat is a coupled battery and " + "physics simulation from ORNL."); // Set the name of the home directory CAEBAT_ROOT = "/home/batsim/caebat"; IPS_ROOT = "$IPS_ROOT"; return; // end-user-code } /** * <!-- begin-UML-doc --> * <p> * This operation overrides setupForm() on JobLauncher. It will setup the * paths and add the locations for the remote server addresses. It will call * super.setupForm() prior to setting up the executable and hostnames. * </p> * <!-- end-UML-doc --> */ public void setupForm() { // begin-user-code // Setup the script to copy the data files for case 6 // TableComponent hostTable = (TableComponent) form.getComponent(4); // CAEBAT_ROOT = hostTable.getRow(0).get(2).getValue(); // String exportRoot = "export CAEBAT_ROOT=" + CAEBAT_ROOT + ";"; // String copyCase = /** * <!-- begin-UML-doc --> * <p> * Overrides process by setting the executable correctly and then forwarding * later. Still calls super.process(actionName) once the executable is set * correctly for the workstation.conf file. * </p> * <!-- end-UML-doc --> * * @param the * action name * * @return The status of the action */ public FormStatus process(String actionName) { // begin-user-code /* * This section will be used in future iterations String separator = * System.getProperty("file.separator"); IPSReader reader = new * IPSReader(); IPSWriter writer = new IPSWriter(); * * DataComponent fileComponent = (DataComponent) form.getComponent(1); * Entry inputFileEntry = fileComponent.retrieveEntry("Input File"); * * IPath fileIPath = new Path(project.getLocation().toOSString() + * separator + inputFileEntry.getValue()); IFile inputFile = * ResourcesPlugin.getWorkspace().getRoot().getFile(fileIPath); * ArrayList<Entry> simRootMatches = reader.findAll(inputFile, * "SIM_ROOT=.*"); dataDir = * simRootMatches.get(0).getName().split("=")[1]; * * writer.replace(inputFile, "SIM_ROOT=.*", "SIM_ROOT=" + * getLaunchDirectory()); */ // Local Declarations String separator = System.getProperty("file.separator"); IPSReader reader = new IPSReader(); IPSWriter writer = new IPSWriter(); DataComponent fileComponent = (DataComponent) form.getComponent(1); Entry inputFileEntry = fileComponent.retrieveEntry("Input File"); Entry kvPairFileEntry = fileComponent .retrieveEntry("Key-value pair file"); IPath fileIPath = new Path(project.getLocation().toOSString() + separator + inputFileEntry.getValue()); IPath kvFileIPath = new Path(project.getLocation().toOSString() + separator + kvPairFileEntry.getValue()); IFile inputFile = project.getFile(inputFileEntry.getValue()); IFile kvPairFile = project.getFile(kvPairFileEntry.getValue()); // Get the Run ID that may be used to locate the simulation files String runID = ""; ArrayList<Entry> runIDMatches = reader.findAll(inputFile, "RUN_ID=.*"); if (runIDMatches != null && !runIDMatches.isEmpty()) { runID = runIDMatches.get(0).getName().split("=")[1]; } // Get the Case Name which may also be used to locate the simulation // files String caseName = ""; ArrayList<Entry> caseNameMatches = reader.findAll(inputFile, "SIM_NAME=.*"); if (caseNameMatches != null && !caseNameMatches.isEmpty()) { caseName = caseNameMatches.get(0).getName().split("=")[1]; } // Determine if we need to use the Run ID or the Case Name to find the // files if (caseName.contains("${RUN_ID}")) { caseName = runID; } // Get the base path for the simulation files String dataDir = ""; ArrayList<Entry> simRootMatches = reader.findAll(inputFile, "SIM_ROOT=.*"); if (simRootMatches != null && !simRootMatches.isEmpty()) { dataDir = simRootMatches.get(0).getName().split("=")[1]; } if (dataDir.endsWith("/$SIM_NAME")) { dataDir = dataDir.substring(0, dataDir.length() - 10); } else if (dataDir.endsWith("${SIM_NAME}")) { dataDir = dataDir.substring(0, dataDir.length() - 12); } // Get the input file directory for the simulation String inputDir = ""; ArrayList<Entry> inputDirMatches = reader.findAll(inputFile, ".*INPUT_DIR.*"); if (inputDirMatches != null && !inputDirMatches.isEmpty()) { inputDir = inputDirMatches.get(0).getName().split("=")[1]; } // If we are supplying a new KV Pair file replace it in the input file if (kvPairFileEntry.getValue() != "Use Default KV Entries" || kvPairFileEntry.getValue() != "") { writer.replace(inputFile, "input_keyvalue", kvPairFileEntry.getValue()); } // Pull some information from the form TableComponent hostTable = (TableComponent) form.getComponent(4); CAEBAT_ROOT = hostTable.getRow(0).get(2).getValue(); // Set up the execution command String exportRoot = "export CAEBAT_ROOT=" + CAEBAT_ROOT + "/vibe/components && "; /** * Override of update so that the CaebatLauncher can check if the user wants to select a * custom KV Pair file. */ public void update(IUpdateable component) { refreshProjectSpace(); super.update(component); // Determine whether the file selector needs to be added to or removed from the form if (component.getName() == "Use custom key-value pair file?" && ((Entry) component).getValue() == "true") { addInputType("Key-value pair file", "keyValueFile", "Key-value pair with case parameters", ".dat"); } else if (component.getName() == "Use custom key-value pair file?" && ((Entry) component).getValue() == "false") { removeInputType("Key-value pair file"); } } /** * Recursively copies a directory to a destination. This method is used to * pull the simulation input files into the ICE Launch directory. * * @param src * The directory to copy over * @param dest * Where to put the directory */ public void copyInputDirectory(String src, String dest) { copyDirectory(src, dest); } }
package org.ensembl.healthcheck.testcase.generic; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.ensembl.healthcheck.DatabaseRegistryEntry; import org.ensembl.healthcheck.DatabaseType; import org.ensembl.healthcheck.ReportManager; import org.ensembl.healthcheck.Team; import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase; import org.ensembl.healthcheck.util.DBUtils; /** * Compare the transcript stable IDs and exon coordinates between 2 releases. Note this is not comparing counts so doesn't extend * ComparePreviousVersionBase. Note this reads 2 complete exon sets into memory and so needs quite a bit of memory allocated. * Suggest -Xmx1700m */ public class ComparePreviousVersionExonCoords extends SingleDatabaseTestCase { /** * Create a new testcase. */ public ComparePreviousVersionExonCoords() { addToGroup("release"); addToGroup("pre-compara-handover"); addToGroup("post-compara-handover"); setDescription("Compare the transcript stable IDs and exon coordinates for each exon across releases to ensure that protein sequences are the same."); setEffect("Causes problems for Compara if proteins are not identical"); setTeamResponsible(Team.CORE); setSecondTeamResponsible(Team.GENEBUILD); } /** * This test Does not apply to sangervega dbs */ public void types() { removeAppliesToType(DatabaseType.SANGER_VEGA); removeAppliesToType(DatabaseType.VEGA); removeAppliesToType(DatabaseType.RNASEQ); removeAppliesToType(DatabaseType.CDNA); removeAppliesToType(DatabaseType.OTHERFEATURES); } public boolean run(DatabaseRegistryEntry current) { boolean result = true; if (System.getProperty("ignore.previous.checks") != null) { logger.finest("ignore.previous.checks is set in database.properties, skipping this test"); return true; } Connection currentCon = current.getConnection(); // skip databases where there's no previous one (e.g. new species) DatabaseRegistryEntry previous = getEquivalentFromSecondaryServer(current); if (previous == null) { ReportManager.correct(this, currentCon, "Can't identify previous database - new species?"); return true; } Connection previousCon = previous.getConnection(); // and those where the genebuild version has changed - expect exon coords to change then // if we can't get the genebuild version (due to a non-standard database name for example, check anyway) int currentVersion = current.getNumericGeneBuildVersion(); int previousVersion = previous.getNumericGeneBuildVersion(); if (currentVersion > 0 && previousVersion > 0 && currentVersion != previousVersion) { ReportManager.correct(this, currentCon, "Genebuild version has changed since " + previous.getName() + ", skipping"); return true; } // and those where the meta key genebuild.last_geneset_update has changed if (!DBUtils.getMetaValue(currentCon, "genebuild.last_geneset_update").equals(DBUtils.getMetaValue(previousCon, "genebuild.last_geneset_update"))) { ReportManager.correct(this, currentCon, "Meta entry genebuild.last_geneset_update has changed since " + previous.getName() + ", skipping"); return true; } // build hashes of transcript stable id:exon start:exon end for both databases logger.finest("Building hash of current exon coords"); Map<String, String> currentHash = buildHash(currentCon); logger.finest("Building hash of previous exon coords"); Map<String, String> previousHash = buildHash(previousCon); // compare and store any differences logger.finest("Comparing ..."); List<String> inNewNotOld = new ArrayList<String>(); for (String currentKey : currentHash.keySet()) { // if it's not in the old one, make a note if (!previousHash.containsKey(currentKey)) { inNewNotOld.add(currentKey); } else { // otherwise we're no longer interested, remove from both previousHash.remove(currentKey); } } // now previousHash will only contain keys that were in the old but not in the new List<String> inOldNotNew = new ArrayList<String>(previousHash.keySet()); if (inNewNotOld.size() > 0 && inOldNotNew.size() == 0 ) { ReportManager .problem(this, currentCon, inNewNotOld.size() + " exons in " + current.getName() + " are not in " + previous.getName()); result = false; } if (inNewNotOld.size() == 0 && inOldNotNew.size() > 0 ) { ReportManager .problem(this, currentCon, inOldNotNew.size() + " exons in " + previous.getName() + " are not in " + current.getName()); result = false; } if (inNewNotOld.size() > 0 && inOldNotNew.size() > 0 ) { ReportManager .problem(this, currentCon, inOldNotNew.size() + " exons in " + previous.getName() + " have coordinates that are different from those in the same transcript in " + current.getName()); result = false; } if (inOldNotNew.size() == 0 && inNewNotOld.size() == 0) { ReportManager.correct(this, currentCon, "All exons identical between databases"); } return result; } private Map<String, String> buildHash(Connection con) { Map<String, String> hash = new HashMap<String, String>(); String sql = "SELECT CONCAT(t.stable_id, ':', e.seq_region_start, ':', e.seq_region_end) FROM transcript t, exon_transcript et, exon e WHERE t.transcript_id=et.transcript_id AND et.exon_id=e.exon_id AND t.biotype='protein_coding'"; try { Statement stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); while (rs.next()) { hash.put(rs.getString(1), "1"); } rs.close(); stmt.close(); } catch (SQLException e) { System.err.println("Error executing " + sql); e.printStackTrace(); } return hash; } } // ComparePreviousVersionExonCoords
package org.dazzle.utils; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.dazzle.common.exception.BaseException; /**JavaBeanMapJavaBeanJavaBean<br /> * Beanjava * @author hcqt@qq.com */ public class BeanUtils { /** @author hcqt@qq.com */ BeanUtils() { super(); } /**@see #bean2Map(Object, String[], Boolean) * @author hcqt@qq.com */ public static final <T> Map<String, Object> bean2Map(T bean) { return bean2Map(bean, (Object) null, false); } /**@see #bean2Map(Object, String[], Boolean) * @author hcqt@qq.com */ public static final <T> Map<String, Object> bean2Map( T bean, String[] fields) { return bean2Map(bean, fields, false); } /**@see #bean2Map(Object, String[], Boolean) * @author hcqt@qq.com */ public static final <T> Map<String, Object> bean2Map( T bean, String fields) { return bean2Map(bean, (Object) fields, false); } /**@see #bean2Map(Object, String[], Boolean) * @author hcqt@qq.com */ public static final <T> Map<String, Object> bean2Map( T bean, Boolean needSequence) { return bean2Map(bean, (String[]) null, needSequence); } /**@see #bean2Map(Object, String[], Boolean) * @author hcqt@qq.com */ public static final <T> Map<String, Object> bean2Map( T bean, String fields, Boolean needSequence) { return bean2Map(bean, (Object) fields, needSequence); } /**beanmap * @param fields //Collection * @see #bean2Map(Object, String[], Boolean) * @author hcqt@qq.com */ public static final <T> Map<String, Object> bean2Map( T bean, Object fields, Boolean needSequence) { if(fields == null) { return bean2Map(bean, (String[]) null, needSequence); } String[] _fields = null; if(fields instanceof String) { _fields = DTU.cvt(String[].class, ((String) fields).split(",")); } else { try { _fields = DTU.cvt(String[].class, fields); } catch (BaseException e) { throw new BaseException("beanUtils_9k3YP", "beanmap//Collection:{0}:{1}", e, fields.getClass(), fields); } } return bean2Map(bean, _fields, needSequence); } /**beanmap * @param bean bean * @param fields bean * @param needSequence map * @author hcqt@qq.com */ @SuppressWarnings("unchecked") public static final <T> Map<String, Object> bean2Map( T bean, String[] fields, Boolean needSequence) { if (null == bean) { return null; } Class<?> clazz = bean.getClass(); Set<String> fieldSet = null; if(null == fields) { fieldSet = getAllFieldName(clazz); } else { fieldSet = DTU.cvt(Set.class, fields); } if(null == fieldSet) { return null; } Map<String, Object> ret = null; if(needSequence == null || !needSequence) { ret = new HashMap<String, Object>(); } else { ret = new LinkedHashMap<String, Object>(); } for (String field : fieldSet) { if(null == field) { continue; } ret.put(field, getFieldValue(bean, field)); } return ret; } /**beanbean * @see #bean2Bean(Map, Object, Object) * @author hcqt@qq.com */ public static final <S, T> T bean2Bean(S obj, Class<T> clazz) { Set<String> fields = getAllFieldName(clazz); if(fields == null) { return null; } Map<String, String> fieldMapping = new HashMap<String, String>(); for (String field : fields) { fieldMapping.put(field, field); } return bean2Bean(fieldMapping, obj, clazz); } /**@see #bean2Bean(Map, Object, Object) * @author hcqt@qq.com */ public static final <S, T> T bean2Bean(S obj, T target) { if(target == null) { return null; } Set<String> fields = getAllFieldName(target.getClass()); if(fields == null) { return target; } Map<String, String> fieldMapping = new HashMap<String, String>(); for (String field : fields) { fieldMapping.put(field, field); } return bean2Bean(fieldMapping, obj, target); } /**@param <S> Javabean * @param <T> * @param fieldMapping beanbean * @param obj bean * @param clazz * @see #bean2Bean(Map, Object, Object) * @author hcqt@qq.com */ public static final <S, T> T bean2Bean(Map<String, String> fieldMapping, S obj, Class<T> clazz) { if(null == fieldMapping) { return null; } T ret = null; try { ret = clazz.newInstance(); } catch (Exception e) { throw new BaseException("beanUtils_8n3lk", "[{0}]——{1}", e, EU.out(e)); } return bean2Bean(fieldMapping, obj, ret); } /**@param <S> Javabean * @param <T> * @param fieldMapping beanbean * @param obj bean * @param target * @author hcqt@qq.com */ public static final <S, T> T bean2Bean(Map<String, String> fieldMapping, S obj, T target) { if(null == fieldMapping) { return target; } if(target == null) { return null; } for (Entry<String, String> entry : fieldMapping.entrySet()) { Object fieldNewValue = getFieldValue(obj, entry.getKey()); setFieldValue(target, entry.getValue(), fieldNewValue); } return target; } /** @author hcqt@qq.com */ public static final <T> T map2Bean(Class<T> beanClazz, Map<String, ?> map){ if(null == beanClazz || null == map){ return null; } T ret = null; try { ret = beanClazz.newInstance(); } catch (Exception e) { throw new BaseException("beanUtils_9k3hQ", "mapjavaBeanJavaBean——{0}", e, EU.out(e)); } return map2Bean(ret, map); } /** @author hcqt@qq.com */ public static final <T> T map2Bean(T beanObj, Map<String, ?> map){ if(null == beanObj || null == map){ return null; } Set<String> fields = getAllFieldName(beanObj.getClass()); if(fields == null) { return null; } for (String field : fields) { setFieldValue(beanObj, field, map.get(field)); } return beanObj; } /** @author hcqt@qq.com */ @SuppressWarnings("unchecked") public static final <T extends Collection<Map<String, ?>>, S> Collection<S> collectionMap2CollectionBean(Class<S> beanClazz, T collection) { if(null == beanClazz || null == collection) { return null; } Collection<S> ret = null; try { ret = collection.getClass().newInstance(); } catch (Exception e) { throw new BaseException("beanUtils_72jJk", "——{0}", e, e.getMessage()); } for(Map<String, ?> item : collection){ S obj = map2Bean(beanClazz, item); if(null == obj){ continue; } ret.add(obj); } if(!ret.isEmpty()){ return ret; } return null; } /**JavaBean * @author hcqt@qq.com */ public static final Set<String> getAllFieldName (Class<?> clazz) { if(null == clazz) { return null; } Set<String> ret = new HashSet<String>(); for (Class<?> currentClass = clazz; currentClass != Object.class; currentClass = currentClass.getSuperclass()) { Field[] fields = currentClass.getDeclaredFields(); for (Field field : fields) { ret.add(field.getName()); } } return ret; } /**javaBeanget * @author hcqt@qq.com */ public static final Object getFieldValue( Object obj, String field) { if(null == field) { return null; } Method method = null; for (Class<?> currentClass = obj.getClass(); currentClass != Object.class; currentClass = currentClass.getSuperclass()) { try { method = currentClass.getDeclaredMethod(methodName("get", field)); return method.invoke(obj); } catch (Throwable e) { try { Field _field = currentClass.getDeclaredField(field); _field.setAccessible(true); return _field.get(obj); } catch (Throwable e1) { } } } return null; } /**javaBeanset * @author hcqt@qq.com */ public static final void setFieldValue( Object obj, String field, Object val) { if(null == field) { return; } for (Class<?> currentClass = obj.getClass(); currentClass != Object.class; currentClass = currentClass.getSuperclass()) { setField(currentClass, obj, field, val); } } private static final void setField(Class<?> currentClass, Object obj, String fieldName, Object val) { Class<?> fieldType = getFieldType(currentClass, fieldName); Method method = null; try { method = currentClass.getDeclaredMethod(methodName("set", fieldName), fieldType); } catch (NoSuchMethodException e) { reflectSetField(currentClass, obj, fieldName, val); return; } catch (SecurityException e) { reflectSetField(currentClass, obj, fieldName, val); return; } try { method.invoke(obj, DTU.cvt(fieldType, val)); } catch (IllegalAccessException e) { reflectSetField(currentClass, obj, fieldName, val); return; } catch (IllegalArgumentException e) { reflectSetField(currentClass, obj, fieldName, val); return; } catch (InvocationTargetException e) { reflectSetField(currentClass, obj, fieldName, val); return; } } private static final void reflectSetField(Class<?> currentClass, Object obj, String fieldName, Object fieldVal) { Field _field = getField(currentClass, fieldName); if(_field == null) { return; } try { _field.setAccessible(true); } catch(SecurityException e) { } try { _field.set(obj, DTU.cvt(_field.getType(), fieldVal)); } catch (Throwable e) { } } private static final Class<?> getFieldType(Class<?> clazz, String fieldName) { Field _field = getField(clazz, fieldName); if(_field == null) { return Object.class; } return getFieldType(_field); } private static final Field getField(Class<?> clazz, String fieldName) { try { return clazz.getDeclaredField(fieldName); } catch (NoSuchFieldException e) { return null; } catch (SecurityException e) { return null; } } private static final Class<?> getFieldType(Field field) { if(field == null) { return Object.class; } return field.getType(); } private static final String methodName(String prefix, String fieldName) { return new StringBuilder().append(prefix).append(fieldName.substring(0, 1).toUpperCase()).append(fieldName.substring(1)).toString(); } }
package org.javacc.parser; import java.util.*; public class NfaState { public static boolean unicodeWarningGiven = false; public static int generatedStates = 0; static int idCnt = 0; static int lohiByteCnt; static int dummyStateIndex = -1; static boolean done; static boolean mark[]; static boolean stateDone[]; static boolean nonAsciiIntersections[][] = new boolean[20][20]; static Vector allStates = new Vector(); static Vector indexedAllStates = new Vector(); static Vector nonAsciiTableForMethod = new Vector(); static Hashtable equivStatesTable = new Hashtable(); static Hashtable allNextStates = new Hashtable(); static Hashtable lohiByteTab = new Hashtable(); static Hashtable stateNameForComposite = new Hashtable(); static Hashtable compositeStateTable = new Hashtable(); static Hashtable stateBlockTable = new Hashtable(); static Hashtable stateSetsToFix = new Hashtable(); public static void ReInit() { generatedStates = 0; idCnt = 0; dummyStateIndex = -1; done = false; mark = null; stateDone = null; allStates.removeAllElements(); indexedAllStates.removeAllElements(); equivStatesTable.clear(); allNextStates.clear(); compositeStateTable.clear(); stateBlockTable.clear(); stateNameForComposite.clear(); stateSetsToFix.clear(); } long[] asciiMoves = new long[2]; char[] charMoves = null; char[] rangeMoves = null; NfaState next = null; NfaState stateForCase; Vector epsilonMoves = new Vector(); String epsilonMovesString; NfaState[] epsilonMoveArray; int id; int stateName = -1; int kind = Integer.MAX_VALUE; int lookingFor; int usefulEpsilonMoves = 0; int inNextOf; private int lexState; int nonAsciiMethod = -1; int kindToPrint = Integer.MAX_VALUE; boolean dummy = false; boolean isComposite = false; int[] compositeStates = null; boolean isFinal = false; public Vector loByteVec; public int[] nonAsciiMoveIndices; int round = 0; int onlyChar = 0; char matchSingleChar; NfaState() { id = idCnt++; allStates.addElement(this); lexState = LexGen.lexStateIndex; lookingFor = LexGen.curKind; } NfaState CreateClone() { NfaState retVal = new NfaState(); retVal.isFinal = isFinal; retVal.kind = kind; retVal.lookingFor = lookingFor; retVal.lexState = lexState; retVal.inNextOf = inNextOf; retVal.MergeMoves(this); return retVal; } static void InsertInOrder(Vector v, NfaState s) { int j; for (j = 0; j < v.size(); j++) if (((NfaState)v.elementAt(j)).id > s.id) break; else if (((NfaState)v.elementAt(j)).id == s.id) return; v.insertElementAt(s, j); } private static char[] ExpandCharArr(char[] oldArr, int incr) { char[] ret = new char[oldArr.length + incr]; System.arraycopy(oldArr, 0, ret, 0, oldArr.length); return ret; } void AddMove(NfaState newState) { if (!epsilonMoves.contains(newState)) InsertInOrder(epsilonMoves, newState); } private final void AddASCIIMove(char c) { asciiMoves[c / 64] |= (1L << (c % 64)); } void AddChar(char c) { onlyChar++; matchSingleChar = c; int i; char temp; char temp1; if ((int)c < 128) // ASCII char { AddASCIIMove(c); return; } if (charMoves == null) charMoves = new char[10]; int len = charMoves.length; if (charMoves[len - 1] != 0) { charMoves = ExpandCharArr(charMoves, 10); len += 10; } for (i = 0; i < len; i++) if (charMoves[i] == 0 || charMoves[i] > c) break; if (!unicodeWarningGiven && c > 0xff && !Options.B("JAVA_UNICODE_ESCAPE") && !Options.B("USER_CHAR_STREAM")) { unicodeWarningGiven = true; JavaCCErrors.warning(LexGen.curRE, "Non-ASCII characters used in regular expression.\n" + "Please make sure you use the correct Reader when you create the parser that can handle your character set."); } temp = charMoves[i]; charMoves[i] = c; for (i++; i < len; i++) { if (temp == 0) break; temp1 = charMoves[i]; charMoves[i] = temp; temp = temp1; } } void AddRange(char left, char right) { onlyChar = 2; int i; char tempLeft1, tempLeft2, tempRight1, tempRight2; if (left < 128) { if (right < 128) { for (; left <= right; left++) AddASCIIMove(left); return; } for (; left < 128; left++) AddASCIIMove(left); } if (!unicodeWarningGiven && (left > 0xff || right > 0xff) && !Options.B("JAVA_UNICODE_ESCAPE") && !Options.B("USER_CHAR_STREAM")) { unicodeWarningGiven = true; JavaCCErrors.warning(LexGen.curRE, "Non-ASCII characters used in regular expression.\n" + "Please make sure you use the correct Reader when you create the parser that can handle your character set."); } if (rangeMoves == null) rangeMoves = new char[20]; int len = rangeMoves.length; if (rangeMoves[len - 1] != 0) { rangeMoves = ExpandCharArr(rangeMoves, 20); len += 20; } for (i = 0; i < len; i += 2) if (rangeMoves[i] == 0 || (rangeMoves[i] > left) || ((rangeMoves[i] == left) && (rangeMoves[i + 1] > right))) break; tempLeft1 = rangeMoves[i]; tempRight1 = rangeMoves[i + 1]; rangeMoves[i] = left; rangeMoves[i + 1] = right; for (i += 2; i < len; i += 2) { if (tempLeft1 == 0) break; tempLeft2 = rangeMoves[i]; tempRight2 = rangeMoves[i + 1]; rangeMoves[i] = tempLeft1; rangeMoves[i + 1] = tempRight1; tempLeft1 = tempLeft2; tempRight1 = tempRight2; } } // From hereon down all the functions are used for code generation private static boolean EqualCharArr(char[] arr1, char[] arr2) { if (arr1 == arr2) return true; if (arr1 != null && arr2 != null && arr1.length == arr2.length) { for (int i = arr1.length; i if (arr1[i] != arr2[i]) return false; return true; } return false; } private boolean closureDone = false; /** This function computes the closure and also updates the kind so that * any time there is a move to this state, it can go on epsilon to a * new state in the epsilon moves that might have a lower kind of token * number for the same length. */ private void EpsilonClosure() { int i = 0; if (closureDone || mark[id]) return; mark[id] = true; // Recursively do closure for (i = 0; i < epsilonMoves.size(); i++) ((NfaState)epsilonMoves.elementAt(i)).EpsilonClosure(); Enumeration e = epsilonMoves.elements(); while (e.hasMoreElements()) { NfaState tmp = (NfaState)e.nextElement(); for (i = 0; i < tmp.epsilonMoves.size(); i++) { NfaState tmp1 = (NfaState)tmp.epsilonMoves.elementAt(i); if (tmp1.UsefulState() && !epsilonMoves.contains(tmp1)) { InsertInOrder(epsilonMoves, tmp1); done = false; } } if (kind > tmp.kind) kind = tmp.kind; } if (HasTransitions() && !epsilonMoves.contains(this)) InsertInOrder(epsilonMoves, this); } private boolean UsefulState() { return isFinal || HasTransitions(); } public boolean HasTransitions() { return (asciiMoves[0] != 0L || asciiMoves[1] != 0L || (charMoves != null && charMoves[0] != 0) || (rangeMoves != null && rangeMoves[0] != 0)); } void MergeMoves(NfaState other) { // Warning : This function does not merge epsilon moves if (asciiMoves == other.asciiMoves) { JavaCCErrors.semantic_error("Bug in JavaCC : Please send " + "a report along with the input that caused this. Thank you."); throw new Error(); } asciiMoves[0] = asciiMoves[0] | other.asciiMoves[0]; asciiMoves[1] = asciiMoves[1] | other.asciiMoves[1]; if (other.charMoves != null) { if (charMoves == null) charMoves = other.charMoves; else { char[] tmpCharMoves = new char[charMoves.length + other.charMoves.length]; System.arraycopy(charMoves, 0, tmpCharMoves, 0, charMoves.length); charMoves = tmpCharMoves; for (int i = 0; i < other.charMoves.length; i++) AddChar(other.charMoves[i]); } } if (other.rangeMoves != null) { if (rangeMoves == null) rangeMoves = other.rangeMoves; else { char[] tmpRangeMoves = new char[rangeMoves.length + other.rangeMoves.length]; System.arraycopy(rangeMoves, 0, tmpRangeMoves, 0, rangeMoves.length); rangeMoves = tmpRangeMoves; for (int i = 0; i < other.rangeMoves.length; i += 2) AddRange(other.rangeMoves[i], other.rangeMoves[i + 1]); } } if (other.kind < kind) kind = other.kind; if (other.kindToPrint < kindToPrint) kindToPrint = other.kindToPrint; isFinal |= other.isFinal; } NfaState CreateEquivState(Vector states) { NfaState newState = ((NfaState)states.elementAt(0)).CreateClone(); newState.next = new NfaState(); InsertInOrder(newState.next.epsilonMoves, ((NfaState)states.elementAt(0)).next); for (int i = 1; i < states.size(); i++) { NfaState tmp2 = ((NfaState)states.elementAt(i)); if (tmp2.kind < newState.kind) newState.kind = tmp2.kind; newState.isFinal |= tmp2.isFinal; InsertInOrder(newState.next.epsilonMoves, tmp2.next); } return newState; } private NfaState GetEquivalentRunTimeState() { Outer : for (int i = allStates.size(); i { NfaState other = (NfaState)allStates.elementAt(i); if (this != other && other.stateName != -1 && kindToPrint == other.kindToPrint && asciiMoves[0] == other.asciiMoves[0] && asciiMoves[1] == other.asciiMoves[1] && EqualCharArr(charMoves, other.charMoves) && EqualCharArr(rangeMoves, other.rangeMoves)) { if (next == other.next) return other; else if (next != null && other.next != null) { if (next.epsilonMoves.size() == other.next.epsilonMoves.size()) { for (int j = 0; j < next.epsilonMoves.size(); j++) if (next.epsilonMoves.elementAt(j) != other.next.epsilonMoves.elementAt(j)) continue Outer; return other; } } } } return null; } // generates code (without outputting it) and returns the name used. void GenerateCode() { if (stateName != -1) return; if (next != null) { next.GenerateCode(); if (next.kind != Integer.MAX_VALUE) kindToPrint = next.kind; } if (stateName == -1 && HasTransitions()) { NfaState tmp = GetEquivalentRunTimeState(); if (tmp != null) { stateName = tmp.stateName; //tmp.inNextOf += inNextOf; dummy = true; return; } stateName = generatedStates++; indexedAllStates.addElement(this); GenerateNextStatesCode(); } } public static void ComputeClosures() { for (int i = allStates.size(); i { NfaState tmp = (NfaState)allStates.elementAt(i); if (!tmp.closureDone) tmp.OptimizeEpsilonMoves(true); } for (int i = 0; i < allStates.size(); i++) { NfaState tmp = (NfaState)allStates.elementAt(i); if (!tmp.closureDone) tmp.OptimizeEpsilonMoves(false); } for (int i = 0; i < allStates.size(); i++) { NfaState tmp = (NfaState)allStates.elementAt(i); tmp.epsilonMoveArray = new NfaState[tmp.epsilonMoves.size()]; tmp.epsilonMoves.copyInto(tmp.epsilonMoveArray); } } void OptimizeEpsilonMoves(boolean optReqd) { int i; // First do epsilon closure done = false; while (!done) { if (mark == null || mark.length < allStates.size()) mark = new boolean[allStates.size()]; for (i = allStates.size(); i mark[i] = false; done = true; EpsilonClosure(); } for (i = allStates.size(); i ((NfaState)allStates.elementAt(i)).closureDone = mark[((NfaState)allStates.elementAt(i)).id]; // Warning : The following piece of code is just an optimization. // in case of trouble, just remove this piece. boolean sometingOptimized = true; NfaState newState = null; NfaState tmp1, tmp2; int j; Vector equivStates = null; while (sometingOptimized) { sometingOptimized = false; for (i = 0; optReqd && i < epsilonMoves.size(); i++) { if ((tmp1 = (NfaState)epsilonMoves.elementAt(i)).HasTransitions()) { for (j = i + 1; j < epsilonMoves.size(); j++) { if ((tmp2 = (NfaState)epsilonMoves.elementAt(j)). HasTransitions() && (tmp1.asciiMoves[0] == tmp2.asciiMoves[0] && tmp1.asciiMoves[1] == tmp2.asciiMoves[1] && EqualCharArr(tmp1.charMoves, tmp2.charMoves) && EqualCharArr(tmp1.rangeMoves, tmp2.rangeMoves))) { if (equivStates == null) { equivStates = new Vector(); equivStates.addElement(tmp1); } InsertInOrder(equivStates, tmp2); epsilonMoves.removeElementAt(j } } } if (equivStates != null) { sometingOptimized = true; String tmp = ""; for (int l = 0; l < equivStates.size(); l++) tmp += String.valueOf( ((NfaState)equivStates.elementAt(l)).id) + ", "; if ((newState = (NfaState)equivStatesTable.get(tmp)) == null) { newState = CreateEquivState(equivStates); equivStatesTable.put(tmp, newState); } epsilonMoves.removeElementAt(i epsilonMoves.addElement(newState); equivStates = null; newState = null; } } for (i = 0; i < epsilonMoves.size(); i++) { //if ((tmp1 = (NfaState)epsilonMoves.elementAt(i)).next == null) //continue; tmp1 = (NfaState)epsilonMoves.elementAt(i); for (j = i + 1; j < epsilonMoves.size(); j++) { tmp2 = (NfaState)epsilonMoves.elementAt(j); if (tmp1.next == tmp2.next) { if (newState == null) { newState = tmp1.CreateClone(); newState.next = tmp1.next; sometingOptimized = true; } newState.MergeMoves(tmp2); epsilonMoves.removeElementAt(j } } if (newState != null) { epsilonMoves.removeElementAt(i epsilonMoves.addElement(newState); newState = null; } } } // End Warning NfaState tempState; // Generate an array of states for epsilon moves (not vector) if (epsilonMoves.size() > 0) { for (i = 0; i < epsilonMoves.size(); i++) // Since we are doing a closure, just epsilon moves are unncessary if ((tempState = (NfaState)epsilonMoves.elementAt(i)). HasTransitions()) usefulEpsilonMoves++; else epsilonMoves.removeElementAt(i } } void GenerateNextStatesCode() { if (next.usefulEpsilonMoves > 0) next.GetEpsilonMovesString(); } String GetEpsilonMovesString() { int[] stateNames = new int[usefulEpsilonMoves]; int cnt = 0; if (epsilonMovesString != null) return epsilonMovesString; if (usefulEpsilonMoves > 0) { NfaState tempState; epsilonMovesString = "{ "; for (int i = 0; i < epsilonMoves.size(); i++) { if ((tempState = (NfaState)epsilonMoves.elementAt(i)). HasTransitions()) { if (tempState.stateName == -1) tempState.GenerateCode(); ((NfaState)indexedAllStates.elementAt(tempState.stateName)).inNextOf++; stateNames[cnt] = tempState.stateName; epsilonMovesString += tempState.stateName + ", "; if (cnt++ > 0 && cnt % 16 == 0) epsilonMovesString += "\n"; } } epsilonMovesString += "};"; } usefulEpsilonMoves = cnt; if (epsilonMovesString != null && allNextStates.get(epsilonMovesString) == null) { int[] statesToPut = new int[usefulEpsilonMoves]; System.arraycopy(stateNames, 0, statesToPut, 0, cnt); allNextStates.put(epsilonMovesString, statesToPut); } return epsilonMovesString; } public static boolean CanStartNfaUsingAscii(char c) { if (c >= 128) throw new Error("JavaCC Bug: Please send mail to sankar@cs.stanford.edu"); String s = LexGen.initialState.GetEpsilonMovesString(); if (s == null || s.equals("null;")) return false; int[] states = (int[])allNextStates.get(s); for (int i = 0; i < states.length; i++) { NfaState tmp = (NfaState)indexedAllStates.elementAt(states[i]); if ((tmp.asciiMoves[c / 64 ] & (1L << c % 64)) != 0L) return true; } return false; } final boolean CanMoveUsingChar(char c) { int i; if (onlyChar == 1) return c == matchSingleChar; if (c < 128) return ((asciiMoves[c / 64 ] & (1L << c % 64)) != 0L); // Just check directly if there is a move for this char if (charMoves != null && charMoves[0] != 0) { for (i = 0; i < charMoves.length; i++) { if (c == charMoves[i]) return true; else if (c < charMoves[i] || charMoves[i] == 0) break; } } // For ranges, iterate thru the table to see if the current char // is in some range if (rangeMoves != null && rangeMoves[0] != 0) for (i = 0; i < rangeMoves.length; i += 2) if (c >= rangeMoves[i] && c <= rangeMoves[i + 1]) return true; else if (c < rangeMoves[i] || rangeMoves[i] == 0) break; //return (nextForNegatedList != null); return false; } public int getFirstValidPos(String s, int i, int len) { if (onlyChar == 1) { char c = matchSingleChar; while (c != s.charAt(i) && ++i < len); return i; } do { if (CanMoveUsingChar(s.charAt(i))) return i; } while (++i < len); return i; } public int MoveFrom(char c, Vector newStates) { if (CanMoveUsingChar(c)) { for (int i = next.epsilonMoves.size(); i InsertInOrder(newStates, (NfaState)next.epsilonMoves.elementAt(i)); return kindToPrint; } return Integer.MAX_VALUE; } public static int MoveFromSet(char c, Vector states, Vector newStates) { int tmp; int retVal = Integer.MAX_VALUE; for (int i = states.size(); i if (retVal > (tmp = ((NfaState)states.elementAt(i)).MoveFrom(c, newStates))) retVal = tmp; return retVal; } public static int moveFromSetForRegEx(char c, NfaState[] states, NfaState[] newStates, int round) { int start = 0; int sz = states.length; for (int i = 0; i < sz; i++) { NfaState tmp1, tmp2; if ((tmp1 = states[i]) == null) break; if (tmp1.CanMoveUsingChar(c)) { if (tmp1.kindToPrint != Integer.MAX_VALUE) { newStates[start] = null; return 1; } NfaState[] v = tmp1.next.epsilonMoveArray; for (int j = v.length; j { if ((tmp2 = v[j]).round != round) { tmp2.round = round; newStates[start++] = tmp2; } } } } newStates[start] = null; return Integer.MAX_VALUE; } static Vector allBitVectors = new Vector(); /* This function generates the bit vectors of low and hi bytes for common bit vectors and retunrs those that are not common with anything (in loBytes) and returns an array of indices that can be used to generate the function names for char matching using the common bit vectors. It also generates code to match a char with the common bit vectors. (Need a better comment). */ static int[] tmpIndices = new int[512]; // 2 * 256 void GenerateNonAsciiMoves(java.io.PrintWriter ostr) { int i = 0, j = 0; char hiByte; int cnt = 0; long[][] loBytes = new long[256][4]; if ((charMoves == null || charMoves[0] == 0) && (rangeMoves == null || rangeMoves[0] == 0)) return; if (charMoves != null) { for (i = 0; i < charMoves.length; i++) { if (charMoves[i] == 0) break; hiByte = (char)(charMoves[i] >> 8); loBytes[hiByte][(charMoves[i] & 0xff) / 64] |= (1L << ((charMoves[i] & 0xff) % 64)); } } if (rangeMoves != null) { for (i = 0; i < rangeMoves.length; i += 2) { if (rangeMoves[i] == 0) break; char c, r; r = (char)(rangeMoves[i + 1] & 0xff); hiByte = (char)(rangeMoves[i] >> 8); if (hiByte == (char)(rangeMoves[i + 1] >> 8)) { for (c = (char)(rangeMoves[i] & 0xff); c <= r; c++) loBytes[hiByte][c / 64] |= (1L << (c % 64)); continue; } for (c = (char)(rangeMoves[i] & 0xff); c <= 0xff; c++) loBytes[hiByte][c / 64] |= (1L << (c % 64)); while (++hiByte < (char)(rangeMoves[i + 1] >> 8)) { loBytes[hiByte][0] |= 0xffffffffffffffffL; loBytes[hiByte][1] |= 0xffffffffffffffffL; loBytes[hiByte][2] |= 0xffffffffffffffffL; loBytes[hiByte][3] |= 0xffffffffffffffffL; } for (c = 0; c <= r; c++) loBytes[hiByte][c / 64] |= (1L << (c % 64)); } } long[] common = null; boolean[] done = new boolean[256]; for (i = 0; i <= 255; i++) { if (done[i] || (done[i] = loBytes[i][0] == 0 && loBytes[i][1] == 0 && loBytes[i][2] == 0 && loBytes[i][3] == 0)) continue; for (j = i + 1; j < 256; j++) { if (done[j]) continue; if (loBytes[i][0] == loBytes[j][0] && loBytes[i][1] == loBytes[j][1] && loBytes[i][2] == loBytes[j][2] && loBytes[i][3] == loBytes[j][3]) { done[j] = true; if (common == null) { done[i] = true; common = new long[4]; common[i / 64] |= (1L << (i % 64)); } common[j / 64] |= (1L << (j % 64)); } } if (common != null) { Integer ind; String tmp; tmp = "{\n 0x" + Long.toHexString(common[0]) + "L, " + "0x" + Long.toHexString(common[1]) + "L, " + "0x" + Long.toHexString(common[2]) + "L, " + "0x" + Long.toHexString(common[3]) + "L\n};"; if ((ind = (Integer)lohiByteTab.get(tmp)) == null) { allBitVectors.addElement(tmp); if (!AllBitsSet(tmp)) ostr.println("static final long[] jjbitVec" + lohiByteCnt + " = " + tmp); lohiByteTab.put(tmp, ind = new Integer(lohiByteCnt++)); } tmpIndices[cnt++] = ind.intValue(); tmp = "{\n 0x" + Long.toHexString(loBytes[i][0]) + "L, " + "0x" + Long.toHexString(loBytes[i][1]) + "L, " + "0x" + Long.toHexString(loBytes[i][2]) + "L, " + "0x" + Long.toHexString(loBytes[i][3]) + "L\n};"; if ((ind = (Integer)lohiByteTab.get(tmp)) == null) { allBitVectors.addElement(tmp); if (!AllBitsSet(tmp)) ostr.println("static final long[] jjbitVec" + lohiByteCnt + " = " + tmp); lohiByteTab.put(tmp, ind = new Integer(lohiByteCnt++)); } tmpIndices[cnt++] = ind.intValue(); common = null; } } nonAsciiMoveIndices = new int[cnt]; System.arraycopy(tmpIndices, 0, nonAsciiMoveIndices, 0, cnt); /* System.out.println("state : " + stateName + " cnt : " + cnt); while (cnt > 0) { System.out.print(nonAsciiMoveIndices[cnt - 1] + ", " + nonAsciiMoveIndices[cnt - 2] + ", "); cnt -= 2; } System.out.println(""); */ for (i = 0; i < 256; i++) { if (done[i]) loBytes[i] = null; else { //System.out.print(i + ", "); String tmp; Integer ind; tmp = "{\n 0x" + Long.toHexString(loBytes[i][0]) + "L, " + "0x" + Long.toHexString(loBytes[i][1]) + "L, " + "0x" + Long.toHexString(loBytes[i][2]) + "L, " + "0x" + Long.toHexString(loBytes[i][3]) + "L\n};"; if ((ind = (Integer)lohiByteTab.get(tmp)) == null) { allBitVectors.addElement(tmp); if (!AllBitsSet(tmp)) ostr.println("static final long[] jjbitVec" + lohiByteCnt + " = " + tmp); lohiByteTab.put(tmp, ind = new Integer(lohiByteCnt++)); } if (loByteVec == null) loByteVec = new Vector(); loByteVec.addElement(new Integer(i)); loByteVec.addElement(ind); } } //System.out.println(""); UpdateDuplicateNonAsciiMoves(); } private void UpdateDuplicateNonAsciiMoves() { for (int i = 0; i < nonAsciiTableForMethod.size(); i++) { NfaState tmp = (NfaState)nonAsciiTableForMethod.elementAt(i); if (EqualLoByteVectors(loByteVec, tmp.loByteVec) && EqualNonAsciiMoveIndices(nonAsciiMoveIndices, tmp.nonAsciiMoveIndices)) { nonAsciiMethod = i; return; } } nonAsciiMethod = nonAsciiTableForMethod.size(); nonAsciiTableForMethod.addElement(this); } private static boolean EqualLoByteVectors(Vector vec1, Vector vec2) { if (vec1 == null || vec2 == null) return false; if (vec1 == vec2) return true; if (vec1.size() != vec2.size()) return false; for (int i = 0; i < vec1.size(); i++) { if (((Integer)vec1.elementAt(i)).intValue() != ((Integer)vec2.elementAt(i)).intValue()) return false; } return true; } private static boolean EqualNonAsciiMoveIndices(int[] moves1, int[] moves2) { if (moves1 == moves2) return true; if (moves1 == null || moves2 == null) return false; if (moves1.length != moves2.length) return false; for (int i = 0; i < moves1.length;i++) { if (moves1[i] != moves2[i]) return false; } return true; } static String allBits = "{\n 0xffffffffffffffffL, " + "0xffffffffffffffffL, " + "0xffffffffffffffffL, " + "0xffffffffffffffffL\n};"; static boolean AllBitsSet(String bitVec) { return bitVec.equals(allBits); } static int AddStartStateSet(String stateSetString) { return AddCompositeStateSet(stateSetString, true); } private static int AddCompositeStateSet(String stateSetString, boolean starts) { Integer stateNameToReturn; if ((stateNameToReturn = (Integer)stateNameForComposite.get(stateSetString)) != null) return stateNameToReturn.intValue(); int toRet = 0; int[] nameSet = (int[])allNextStates.get(stateSetString); if (!starts) stateBlockTable.put(stateSetString, stateSetString); if (nameSet == null) throw new Error("JavaCC Bug: Please send mail to sankar@cs.stanford.edu; nameSet null for : " + stateSetString); if (nameSet.length == 1) { stateNameToReturn = new Integer(nameSet[0]); stateNameForComposite.put(stateSetString, stateNameToReturn); return nameSet[0]; } for (int i = 0; i < nameSet.length; i++) { if (nameSet[i] == -1) continue; NfaState st = (NfaState)indexedAllStates.elementAt(nameSet[i]); st.isComposite = true; st.compositeStates = nameSet; } while (toRet < nameSet.length && (starts && ((NfaState)indexedAllStates.elementAt(nameSet[toRet])).inNextOf > 1)) toRet++; Enumeration e = compositeStateTable.keys(); String s; while (e.hasMoreElements()) { s = (String)e.nextElement(); if (!s.equals(stateSetString) && Intersect(stateSetString, s)) { int[] other = (int[])compositeStateTable.get(s); while (toRet < nameSet.length && ((starts && ((NfaState)indexedAllStates.elementAt(nameSet[toRet])).inNextOf > 1) || ElemOccurs(nameSet[toRet], other) >= 0)) toRet++; } } int tmp; if (toRet >= nameSet.length) { if (dummyStateIndex == -1) tmp = dummyStateIndex = generatedStates; else tmp = ++dummyStateIndex; } else tmp = nameSet[toRet]; stateNameToReturn = new Integer(tmp); stateNameForComposite.put(stateSetString, stateNameToReturn); compositeStateTable.put(stateSetString, nameSet); return tmp; } private static int StateNameForComposite(String stateSetString) { return ((Integer)stateNameForComposite.get(stateSetString)).intValue(); } static int InitStateName() { String s = LexGen.initialState.GetEpsilonMovesString(); if (LexGen.initialState.usefulEpsilonMoves != 0) return StateNameForComposite(s); return -1; } public void GenerateInitMoves(java.io.PrintWriter ostr) { GetEpsilonMovesString(); if (epsilonMovesString == null) epsilonMovesString = "null;"; AddStartStateSet(epsilonMovesString); } static Hashtable tableToDump = new Hashtable(); static Vector orderedStateSet = new Vector(); static int lastIndex = 0; private static int[] GetStateSetIndicesForUse(String arrayString) { int[] ret; int[] set = (int[])allNextStates.get(arrayString); if ((ret = (int[])tableToDump.get(arrayString)) == null) { ret = new int[2]; ret[0] = lastIndex; ret[1] = lastIndex + set.length - 1; lastIndex += set.length; tableToDump.put(arrayString, ret); orderedStateSet.addElement(set); } return ret; } public static void DumpStateSets(java.io.PrintWriter ostr) { int cnt = 0; ostr.print("static final int[] jjnextStates = {"); for (int i = 0; i < orderedStateSet.size(); i++) { int[] set = (int[])orderedStateSet.elementAt(i); for (int j = 0; j < set.length; j++) { if (cnt++ % 16 == 0) ostr.print("\n "); ostr.print(set[j] + ", "); } } ostr.println("\n};"); } static String GetStateSetString(int[] states) { String retVal = "{ "; for (int i = 0; i < states.length; ) { retVal += states[i] + ", "; if (i++ > 0 && i % 16 == 0) retVal += "\n"; } retVal += "};"; allNextStates.put(retVal, states); return retVal; } static String GetStateSetString(Vector states) { if (states == null || states.size() == 0) return "null;"; int[] set = new int[states.size()]; String retVal = "{ "; for (int i = 0; i < states.size(); ) { int k; retVal += (k = ((NfaState)states.elementAt(i)).stateName) + ", "; set[i] = k; if (i++ > 0 && i % 16 == 0) retVal += "\n"; } retVal += "};"; allNextStates.put(retVal, set); return retVal; } static int NumberOfBitsSet(long l) { int ret = 0; for (int i = 0; i < 63; i++) if (((l >> i) & 1L) != 0L) ret++; return ret; } static int OnlyOneBitSet(long l) { int oneSeen = -1; for (int i = 0; i < 64; i++) if (((l >> i) & 1L) != 0L) { if (oneSeen >= 0) return -1; oneSeen = i; } return oneSeen; } private static int ElemOccurs(int elem, int[] arr) { for (int i = arr.length; i if (arr[i] == elem) return i; return -1; } private boolean FindCommonBlocks() { if (next == null || next.usefulEpsilonMoves <= 1) return false; if (stateDone == null) stateDone = new boolean[generatedStates]; String set = next.epsilonMovesString; int[] nameSet = (int[])allNextStates.get(set); if (nameSet.length <= 2 || compositeStateTable.get(set) != null) return false; int i; int freq[] = new int[nameSet.length]; boolean live[] = new boolean[nameSet.length]; int[] count = new int[allNextStates.size()]; for (i = 0; i < nameSet.length; i++) { if (nameSet[i] != -1) { if (live[i] = !stateDone[nameSet[i]]) count[0]++; } } int j, blockLen = 0, commonFreq = 0; Enumeration e = allNextStates.keys(); boolean needUpdate; while (e.hasMoreElements()) { int[] tmpSet = (int[])allNextStates.get((String)e.nextElement()); if (tmpSet == nameSet) continue; needUpdate = false; for (j = 0; j < nameSet.length; j++) { if (nameSet[j] == -1) continue; if (live[j] && ElemOccurs(nameSet[j], tmpSet) >= 0) { if (!needUpdate) { needUpdate = true; commonFreq++; } count[freq[j]] count[commonFreq]++; freq[j] = commonFreq; } } if (needUpdate) { int foundFreq = -1; blockLen = 0; for (j = 0; j <= commonFreq; j++) if (count[j] > blockLen) { foundFreq = j; blockLen = count[j]; } if (blockLen <= 1) return false; for (j = 0; j < nameSet.length; j++) if (nameSet[j] != -1 && freq[j] != foundFreq) { live[j] = false; count[freq[j]] } } } if (blockLen <= 1) return false; int[] commonBlock = new int[blockLen]; int cnt = 0; //System.out.println("Common Block for " + set + " : "); for (i = 0; i < nameSet.length; i++) { if (live[i]) { if (((NfaState)indexedAllStates.elementAt(nameSet[i])).isComposite) return false; stateDone[nameSet[i]] = true; commonBlock[cnt++] = nameSet[i]; //System.out.print(nameSet[i] + ", "); } } //System.out.println(""); String s = GetStateSetString(commonBlock); e = allNextStates.keys(); Outer : while (e.hasMoreElements()) { int at; boolean firstOne = true; String stringToFix; int[] setToFix = (int[])allNextStates.get(stringToFix = (String)e.nextElement()); if (setToFix == commonBlock) continue; for (int k = 0; k < cnt; k++) { if ((at = ElemOccurs(commonBlock[k], setToFix)) >= 0) { if (!firstOne) setToFix[at] = -1; firstOne = false; } else continue Outer; } if (stateSetsToFix.get(stringToFix) == null) stateSetsToFix.put(stringToFix, setToFix); } next.usefulEpsilonMoves -= blockLen - 1; AddCompositeStateSet(s, false); return true; } private boolean CheckNextOccursTogether() { if (next == null || next.usefulEpsilonMoves <= 1) return true; String set = next.epsilonMovesString; int[] nameSet = (int[])allNextStates.get(set); if (nameSet.length == 1 || compositeStateTable.get(set) != null || stateSetsToFix.get(set) != null) return false; int i; Hashtable occursIn = new Hashtable(); NfaState tmp = (NfaState)allStates.elementAt(nameSet[0]); for (i = 1; i < nameSet.length; i++) { NfaState tmp1 = (NfaState)allStates.elementAt(nameSet[i]); if (tmp.inNextOf != tmp1.inNextOf) return false; } int isPresent, j; Enumeration e = allNextStates.keys(); while (e.hasMoreElements()) { String s; int[] tmpSet = (int[])allNextStates.get(s = (String)e.nextElement()); if (tmpSet == nameSet) continue; isPresent = 0; Outer: for (j = 0; j < nameSet.length; j++) { if (ElemOccurs(nameSet[j], tmpSet) >= 0) isPresent++; else if (isPresent > 0) return false; } if (isPresent == j) { if (tmpSet.length > nameSet.length) occursIn.put(s, tmpSet); /* May not need. But safe. */ if (compositeStateTable.get(s) != null || stateSetsToFix.get(s) != null) return false; } else if (isPresent != 0) return false; } e = occursIn.keys(); while (e.hasMoreElements()) { String s; int[] setToFix = (int[])occursIn.get(s = (String)e.nextElement()); if (stateSetsToFix.get(s) == null) stateSetsToFix.put(s, setToFix); for (int k = 0; k < setToFix.length; k++) if (ElemOccurs(setToFix[k], nameSet) > 0) // Not >= since need the first one (0) setToFix[k] = -1; } next.usefulEpsilonMoves = 1; AddCompositeStateSet(next.epsilonMovesString, false); return true; } private static void FixStateSets() { Hashtable fixedSets = new Hashtable(); Enumeration e = stateSetsToFix.keys(); int[] tmp = new int[generatedStates]; int i; while (e.hasMoreElements()) { String s; int[] toFix = (int[])stateSetsToFix.get(s = (String)e.nextElement()); int cnt = 0; //System.out.print("Fixing : "); for (i = 0; i < toFix.length; i++) { //System.out.print(toFix[i] + ", "); if (toFix[i] != -1) tmp[cnt++] = toFix[i]; } int[] fixed = new int[cnt]; System.arraycopy(tmp, 0, fixed, 0, cnt); fixedSets.put(s, fixed); allNextStates.put(s, fixed); //System.out.println(" as " + GetStateSetString(fixed)); } for (i = 0; i < allStates.size(); i++) { NfaState tmpState = (NfaState)allStates.elementAt(i); int[] newSet; if (tmpState.next == null || tmpState.next.usefulEpsilonMoves == 0) continue; /*if (compositeStateTable.get(tmpState.next.epsilonMovesString) != null) tmpState.next.usefulEpsilonMoves = 1; else*/ if ((newSet = (int[])fixedSets.get(tmpState.next.epsilonMovesString)) != null) tmpState.FixNextStates(newSet); } } private final void FixNextStates(int[] newSet) { next.usefulEpsilonMoves = newSet.length; //next.epsilonMovesString = GetStateSetString(newSet); } private static boolean Intersect(String set1, String set2) { if (set1 == null || set2 == null) return false; int[] nameSet1 = (int[])allNextStates.get(set1); int[] nameSet2 = (int[])allNextStates.get(set2); if (nameSet1 == null || nameSet2 == null) return false; if (nameSet1 == nameSet2) return true; for (int i = nameSet1.length; i for (int j = nameSet2.length; j if (nameSet1[i] == nameSet2[j]) return true; return false; } private static void DumpHeadForCase(java.io.PrintWriter ostr, int byteNum) { if (byteNum == 0) ostr.println(" long l = 1L << curChar;"); else if (byteNum == 1) ostr.println(" long l = 1L << (curChar & 077);"); else { if (Options.B("JAVA_UNICODE_ESCAPE") || unicodeWarningGiven) { ostr.println(" int hiByte = (int)(curChar >> 8);"); ostr.println(" int i1 = hiByte >> 6;"); ostr.println(" long l1 = 1L << (hiByte & 077);"); } ostr.println(" int i2 = (curChar & 0xff) >> 6;"); ostr.println(" long l2 = 1L << (curChar & 077);"); } ostr.println(" MatchLoop: do"); ostr.println(" {"); ostr.println(" switch(jjstateSet[--i])"); ostr.println(" {"); } private static Vector PartitionStatesSetForAscii(int[] states, int byteNum) { int[] cardinalities = new int[states.length]; Vector original = new Vector(); Vector partition = new Vector(); NfaState tmp; original.setSize(states.length); int cnt = 0; for (int i = 0; i < states.length; i++) { tmp = (NfaState)allStates.elementAt(states[i]); if (tmp.asciiMoves[byteNum] != 0L) { int j; int p = NumberOfBitsSet(tmp.asciiMoves[byteNum]); for (j = 0; j < i; j++) if (cardinalities[j] <= p) break; for (int k = i; k > j; k cardinalities[k] = cardinalities[k - 1]; cardinalities[j] = p; original.insertElementAt(tmp, j); cnt++; } } original.setSize(cnt); while (original.size() > 0) { tmp = (NfaState)original.elementAt(0); original.removeElement(tmp); long bitVec = tmp.asciiMoves[byteNum]; Vector subSet = new Vector(); subSet.addElement(tmp); for (int j = 0; j < original.size(); j++) { NfaState tmp1 = (NfaState)original.elementAt(j); if ((tmp1.asciiMoves[byteNum] & bitVec) == 0L) { bitVec |= tmp1.asciiMoves[byteNum]; subSet.addElement(tmp1); original.removeElementAt(j } } partition.addElement(subSet); } return partition; } private String PrintNoBreak(java.io.PrintWriter ostr, int byteNum, boolean[] dumped) { if (inNextOf != 1) throw new Error("JavaCC Bug: Please send mail to sankar@cs.stanford.edu"); dumped[stateName] = true; if (byteNum >= 0) { if (asciiMoves[byteNum] != 0L) { ostr.println(" case " + stateName + ":"); DumpAsciiMoveForCompositeState(ostr, byteNum, false); return ""; } } else if (nonAsciiMethod != -1) { ostr.println(" case " + stateName + ":"); DumpNonAsciiMoveForCompositeState(ostr); return ""; } return (" case " + stateName + ":\n"); } private static void DumpCompositeStatesAsciiMoves(java.io.PrintWriter ostr, String key, int byteNum, boolean[] dumped) { int i; int[] nameSet = (int[])allNextStates.get(key); if (nameSet.length == 1 || dumped[StateNameForComposite(key)]) return; NfaState toBePrinted = null; int neededStates = 0; NfaState tmp; NfaState stateForCase = null; String toPrint = ""; boolean stateBlock = (stateBlockTable.get(key) != null); for (i = 0; i < nameSet.length; i++) { tmp = (NfaState)allStates.elementAt(nameSet[i]); if (tmp.asciiMoves[byteNum] != 0L) { if (neededStates++ == 1) break; else toBePrinted = tmp; } else dumped[tmp.stateName] = true; if (tmp.stateForCase != null) { if (stateForCase != null) throw new Error("JavaCC Bug: Please send mail to sankar@cs.stanford.edu : "); stateForCase = tmp.stateForCase; } } if (stateForCase != null) toPrint = stateForCase.PrintNoBreak(ostr, byteNum, dumped); if (neededStates == 0) { if (stateForCase != null && toPrint.equals("")) ostr.println(" break;"); return; } if (neededStates == 1) { //if (byteNum == 1) //System.out.println(toBePrinted.stateName + " is the only state for " //+ key + " ; and key is : " + StateNameForComposite(key)); if (!toPrint.equals("")) ostr.print(toPrint); ostr.println(" case " + StateNameForComposite(key) + ":"); if (!dumped[toBePrinted.stateName] && !stateBlock && toBePrinted.inNextOf > 1) ostr.println(" case " + toBePrinted.stateName + ":"); dumped[toBePrinted.stateName] = true; toBePrinted.DumpAsciiMove(ostr, byteNum, dumped); return; } Vector partition = PartitionStatesSetForAscii(nameSet, byteNum); if (!toPrint.equals("")) ostr.print(toPrint); int keyState = StateNameForComposite(key); ostr.println(" case " + keyState + ":"); if (keyState < generatedStates) dumped[keyState] = true; for (i = 0; i < partition.size(); i++) { Vector subSet = (Vector)partition.elementAt(i); for (int j = 0; j < subSet.size(); j++) { tmp = (NfaState)subSet.elementAt(j); if (stateBlock) dumped[tmp.stateName] = true; tmp.DumpAsciiMoveForCompositeState(ostr, byteNum, j != 0); } } if (stateBlock) ostr.println(" break;"); else ostr.println(" break;"); } private boolean selfLoop() { if (next == null || next.epsilonMovesString == null) return false; int[] set = (int[])allNextStates.get(next.epsilonMovesString); return ElemOccurs(stateName, set) >= 0; } private void DumpAsciiMoveForCompositeState(java.io.PrintWriter ostr, int byteNum, boolean elseNeeded) { boolean nextIntersects = selfLoop(); for (int j = 0; j < allStates.size(); j++) { NfaState temp1 = (NfaState)allStates.elementAt(j); if (this == temp1 || temp1.stateName == -1 || temp1.dummy || stateName == temp1.stateName || temp1.asciiMoves[byteNum] == 0L) continue; if (!nextIntersects && Intersect(temp1.next.epsilonMovesString, next.epsilonMovesString)) { nextIntersects = true; break; } } //System.out.println(stateName + " \'s nextIntersects : " + nextIntersects); String prefix = ""; if (asciiMoves[byteNum] != 0xffffffffffffffffL) { int oneBit = OnlyOneBitSet(asciiMoves[byteNum]); if (oneBit != -1) ostr.println(" " + (elseNeeded ? "else " : "") + "if (curChar == " + (64 * byteNum + oneBit) + ")"); else ostr.println(" " + (elseNeeded ? "else " : "") + "if ((0x" + Long.toHexString(asciiMoves[byteNum]) + "L & l) != 0L)"); prefix = " "; } if (kindToPrint != Integer.MAX_VALUE) { if (asciiMoves[byteNum] != 0xffffffffffffffffL) { ostr.println(" {"); } ostr.println(prefix + " if (kind > " + kindToPrint + ")"); ostr.println(prefix + " kind = " + kindToPrint + ";"); } if (next != null && next.usefulEpsilonMoves > 0) { int[] stateNames = (int[])allNextStates.get( next.epsilonMovesString); if (next.usefulEpsilonMoves == 1) { int name = stateNames[0]; if (nextIntersects) ostr.println(prefix + " jjCheckNAdd(" + name + ");"); else ostr.println(prefix + " jjstateSet[jjnewStateCnt++] = " + name + ";"); } else if (next.usefulEpsilonMoves == 2 && nextIntersects) { ostr.println(prefix + " jjCheckNAddTwoStates(" + stateNames[0] + ", " + stateNames[1] + ");"); } else { int[] indices = GetStateSetIndicesForUse(next.epsilonMovesString); boolean notTwo = (indices[0] + 1 != indices[1]); if (nextIntersects) ostr.println(prefix + " jjCheckNAddStates(" + indices[0] + (notTwo ? (", " + indices[1]) : "") + ");"); else ostr.println(prefix + " jjAddStates(" + indices[0] + ", " + indices[1] + ");"); } } if (asciiMoves[byteNum] != 0xffffffffffffffffL && kindToPrint != Integer.MAX_VALUE) ostr.println(" }"); } private void DumpAsciiMove(java.io.PrintWriter ostr, int byteNum, boolean dumped[]) { boolean nextIntersects = selfLoop() && isComposite; boolean onlyState = true; for (int j = 0; j < allStates.size(); j++) { NfaState temp1 = (NfaState)allStates.elementAt(j); if (this == temp1 || temp1.stateName == -1 || temp1.dummy || stateName == temp1.stateName || temp1.asciiMoves[byteNum] == 0L) continue; if (onlyState && (asciiMoves[byteNum] & temp1.asciiMoves[byteNum]) != 0L) onlyState = false; if (!nextIntersects && Intersect(temp1.next.epsilonMovesString, next.epsilonMovesString)) nextIntersects = true; if (!dumped[temp1.stateName] && !temp1.isComposite && asciiMoves[byteNum] == temp1.asciiMoves[byteNum] && kindToPrint == temp1.kindToPrint && (next.epsilonMovesString == temp1.next.epsilonMovesString || (next.epsilonMovesString != null && temp1.next.epsilonMovesString != null && next.epsilonMovesString.equals( temp1.next.epsilonMovesString)))) { dumped[temp1.stateName] = true; ostr.println(" case " + temp1.stateName + ":"); } } //if (onlyState) //nextIntersects = false; int oneBit = OnlyOneBitSet(asciiMoves[byteNum]); if (asciiMoves[byteNum] != 0xffffffffffffffffL) { if ((next == null || next.usefulEpsilonMoves == 0) && kindToPrint != Integer.MAX_VALUE) { String kindCheck = ""; if (!onlyState) kindCheck = " && kind > " + kindToPrint; if (oneBit != -1) ostr.println(" if (curChar == " + (64 * byteNum + oneBit) + kindCheck + ")"); else ostr.println(" if ((0x" + Long.toHexString(asciiMoves[byteNum]) + "L & l) != 0L" + kindCheck + ")"); ostr.println(" kind = " + kindToPrint + ";"); if (onlyState) ostr.println(" break;"); else ostr.println(" break;"); return; } } String prefix = ""; if (kindToPrint != Integer.MAX_VALUE) { if (oneBit != -1) { ostr.println(" if (curChar != " + (64 * byteNum + oneBit) + ")"); ostr.println(" break;"); } else if (asciiMoves[byteNum] != 0xffffffffffffffffL) { ostr.println(" if ((0x" + Long.toHexString(asciiMoves[byteNum]) + "L & l) == 0L)"); ostr.println(" break;"); } if (onlyState) { ostr.println(" kind = " + kindToPrint + ";"); } else { ostr.println(" if (kind > " + kindToPrint + ")"); ostr.println(" kind = " + kindToPrint + ";"); } } else { if (oneBit != -1) { ostr.println(" if (curChar == " + (64 * byteNum + oneBit) + ")"); prefix = " "; } else if (asciiMoves[byteNum] != 0xffffffffffffffffL) { ostr.println(" if ((0x" + Long.toHexString(asciiMoves[byteNum]) + "L & l) != 0L)"); prefix = " "; } } if (next != null && next.usefulEpsilonMoves > 0) { int[] stateNames = (int[])allNextStates.get( next.epsilonMovesString); if (next.usefulEpsilonMoves == 1) { int name = stateNames[0]; if (nextIntersects) ostr.println(prefix + " jjCheckNAdd(" + name + ");"); else ostr.println(prefix + " jjstateSet[jjnewStateCnt++] = " + name + ";"); } else if (next.usefulEpsilonMoves == 2 && nextIntersects) { ostr.println(prefix + " jjCheckNAddTwoStates(" + stateNames[0] + ", " + stateNames[1] + ");"); } else { int[] indices = GetStateSetIndicesForUse(next.epsilonMovesString); boolean notTwo = (indices[0] + 1 != indices[1]); if (nextIntersects) ostr.println(prefix + " jjCheckNAddStates(" + indices[0] + (notTwo ? (", " + indices[1]) : "") + ");"); else ostr.println(prefix + " jjAddStates(" + indices[0] + ", " + indices[1] + ");"); } } if (onlyState) ostr.println(" break;"); else ostr.println(" break;"); } private static void DumpAsciiMoves(java.io.PrintWriter ostr, int byteNum) { boolean[] dumped = new boolean[Math.max(generatedStates, dummyStateIndex + 1)]; Enumeration e = compositeStateTable.keys(); DumpHeadForCase(ostr, byteNum); while (e.hasMoreElements()) DumpCompositeStatesAsciiMoves(ostr, (String)e.nextElement(), byteNum, dumped); for (int i = 0; i < allStates.size(); i++) { NfaState temp = (NfaState)allStates.elementAt(i); if (dumped[temp.stateName] || temp.lexState != LexGen.lexStateIndex || !temp.HasTransitions() || temp.dummy || temp.stateName == -1) continue; String toPrint = ""; if (temp.stateForCase != null) { if (temp.inNextOf == 1) continue; if (dumped[temp.stateForCase.stateName]) continue; toPrint = (temp.stateForCase.PrintNoBreak(ostr, byteNum, dumped)); if (temp.asciiMoves[byteNum] == 0L) { if (toPrint.equals("")) ostr.println(" break;"); continue; } } if (temp.asciiMoves[byteNum] == 0L) continue; if (!toPrint.equals("")) ostr.print(toPrint); dumped[temp.stateName] = true; ostr.println(" case " + temp.stateName + ":"); temp.DumpAsciiMove(ostr, byteNum, dumped); } ostr.println(" default : break;"); ostr.println(" }"); ostr.println(" } while(i != startsAt);"); } private static void DumpCompositeStatesNonAsciiMoves(java.io.PrintWriter ostr, String key, boolean[] dumped) { int i; int[] nameSet = (int[])allNextStates.get(key); if (nameSet.length == 1 || dumped[StateNameForComposite(key)]) return; NfaState toBePrinted = null; int neededStates = 0; NfaState tmp; NfaState stateForCase = null; String toPrint = ""; boolean stateBlock = (stateBlockTable.get(key) != null); for (i = 0; i < nameSet.length; i++) { tmp = (NfaState)allStates.elementAt(nameSet[i]); if (tmp.nonAsciiMethod != -1) { if (neededStates++ == 1) break; else toBePrinted = tmp; } else dumped[tmp.stateName] = true; if (tmp.stateForCase != null) { if (stateForCase != null) throw new Error("JavaCC Bug: Please send mail to sankar@cs.stanford.edu : "); stateForCase = tmp.stateForCase; } } if (stateForCase != null) toPrint = stateForCase.PrintNoBreak(ostr, -1, dumped); if (neededStates == 0) { if (stateForCase != null && toPrint.equals("")) ostr.println(" break;"); return; } if (neededStates == 1) { if (!toPrint.equals("")) ostr.print(toPrint); ostr.println(" case " + StateNameForComposite(key) + ":"); if (!dumped[toBePrinted.stateName] && !stateBlock && toBePrinted.inNextOf > 1) ostr.println(" case " + toBePrinted.stateName + ":"); dumped[toBePrinted.stateName] = true; toBePrinted.DumpNonAsciiMove(ostr, dumped); return; } if (!toPrint.equals("")) ostr.print(toPrint); int keyState = StateNameForComposite(key); ostr.println(" case " + keyState + ":"); if (keyState < generatedStates) dumped[keyState] = true; for (i = 0; i < nameSet.length; i++) { tmp = (NfaState)allStates.elementAt(nameSet[i]); if (tmp.nonAsciiMethod != -1) { if (stateBlock) dumped[tmp.stateName] = true; tmp.DumpNonAsciiMoveForCompositeState(ostr); } } if (stateBlock) ostr.println(" break;"); else ostr.println(" break;"); } private final void DumpNonAsciiMoveForCompositeState(java.io.PrintWriter ostr) { boolean nextIntersects = selfLoop(); for (int j = 0; j < allStates.size(); j++) { NfaState temp1 = (NfaState)allStates.elementAt(j); if (this == temp1 || temp1.stateName == -1 || temp1.dummy || stateName == temp1.stateName || (temp1.nonAsciiMethod == -1)) continue; if (!nextIntersects && Intersect(temp1.next.epsilonMovesString, next.epsilonMovesString)) { nextIntersects = true; break; } } if (!Options.B("JAVA_UNICODE_ESCAPE") && !unicodeWarningGiven) { if (loByteVec != null && loByteVec.size() > 1) ostr.println(" if ((jjbitVec" + ((Integer)loByteVec.elementAt(1)).intValue() + "[i2" + "] & l2) != 0L)"); } else { ostr.println(" if (jjCanMove_" + nonAsciiMethod + "(hiByte, i1, i2, l1, l2))"); } if (kindToPrint != Integer.MAX_VALUE) { ostr.println(" {"); ostr.println(" if (kind > " + kindToPrint + ")"); ostr.println(" kind = " + kindToPrint + ";"); } if (next != null && next.usefulEpsilonMoves > 0) { int[] stateNames = (int[])allNextStates.get( next.epsilonMovesString); if (next.usefulEpsilonMoves == 1) { int name = stateNames[0]; if (nextIntersects) ostr.println(" jjCheckNAdd(" + name + ");"); else ostr.println(" jjstateSet[jjnewStateCnt++] = " + name + ";"); } else if (next.usefulEpsilonMoves == 2 && nextIntersects) { ostr.println(" jjCheckNAddTwoStates(" + stateNames[0] + ", " + stateNames[1] + ");"); } else { int[] indices = GetStateSetIndicesForUse(next.epsilonMovesString); boolean notTwo = (indices[0] + 1 != indices[1]); if (nextIntersects) ostr.println(" jjCheckNAddStates(" + indices[0] + (notTwo ? (", " + indices[1]) : "") + ");"); else ostr.println(" jjAddStates(" + indices[0] + ", " + indices[1] + ");"); } } if (kindToPrint != Integer.MAX_VALUE) ostr.println(" }"); } private final void DumpNonAsciiMove(java.io.PrintWriter ostr, boolean dumped[]) { boolean nextIntersects = selfLoop() && isComposite; for (int j = 0; j < allStates.size(); j++) { NfaState temp1 = (NfaState)allStates.elementAt(j); if (this == temp1 || temp1.stateName == -1 || temp1.dummy || stateName == temp1.stateName || (temp1.nonAsciiMethod == -1)) continue; if (!nextIntersects && Intersect(temp1.next.epsilonMovesString, next.epsilonMovesString)) nextIntersects = true; if (!dumped[temp1.stateName] && !temp1.isComposite && nonAsciiMethod == temp1.nonAsciiMethod && kindToPrint == temp1.kindToPrint && (next.epsilonMovesString == temp1.next.epsilonMovesString || (next.epsilonMovesString != null && temp1.next.epsilonMovesString != null && next.epsilonMovesString.equals(temp1.next.epsilonMovesString)))) { dumped[temp1.stateName] = true; ostr.println(" case " + temp1.stateName + ":"); } } if (next == null || next.usefulEpsilonMoves <= 0) { String kindCheck = " && kind > " + kindToPrint; if (!Options.B("JAVA_UNICODE_ESCAPE") && !unicodeWarningGiven) { if (loByteVec != null && loByteVec.size() > 1) ostr.println(" if ((jjbitVec" + ((Integer)loByteVec.elementAt(1)).intValue() + "[i2" + "] & l2) != 0L" + kindCheck + ")"); } else { ostr.println(" if (jjCanMove_" + nonAsciiMethod + "(hiByte, i1, i2, l1, l2)" + kindCheck + ")"); } ostr.println(" kind = " + kindToPrint + ";"); ostr.println(" break;"); return; } String prefix = " "; if (kindToPrint != Integer.MAX_VALUE) { if (!Options.B("JAVA_UNICODE_ESCAPE") && !unicodeWarningGiven) { if (loByteVec != null && loByteVec.size() > 1) { ostr.println(" if ((jjbitVec" + ((Integer)loByteVec.elementAt(1)).intValue() + "[i2" + "] & l2) == 0L)"); ostr.println(" break;"); } } else { ostr.println(" if (!jjCanMove_" + nonAsciiMethod + "(hiByte, i1, i2, l1, l2))"); ostr.println(" break;"); } ostr.println(" if (kind > " + kindToPrint + ")"); ostr.println(" kind = " + kindToPrint + ";"); prefix = ""; } else if (!Options.B("JAVA_UNICODE_ESCAPE") && !unicodeWarningGiven) { if (loByteVec != null && loByteVec.size() > 1) ostr.println(" if ((jjbitVec" + ((Integer)loByteVec.elementAt(1)).intValue() + "[i2" + "] & l2) != 0L)"); } else { ostr.println(" if (jjCanMove_" + nonAsciiMethod + "(hiByte, i1, i2, l1, l2))"); } if (next != null && next.usefulEpsilonMoves > 0) { int[] stateNames = (int[])allNextStates.get( next.epsilonMovesString); if (next.usefulEpsilonMoves == 1) { int name = stateNames[0]; if (nextIntersects) ostr.println(prefix + " jjCheckNAdd(" + name + ");"); else ostr.println(prefix + " jjstateSet[jjnewStateCnt++] = " + name + ";"); } else if (next.usefulEpsilonMoves == 2 && nextIntersects) { ostr.println(prefix + " jjCheckNAddTwoStates(" + stateNames[0] + ", " + stateNames[1] + ");"); } else { int[] indices = GetStateSetIndicesForUse(next.epsilonMovesString); boolean notTwo = (indices[0] + 1 != indices[1]); if (nextIntersects) ostr.println(prefix + " jjCheckNAddStates(" + indices[0] + (notTwo ? (", " + indices[1]) : "") + ");"); else ostr.println(prefix + " jjAddStates(" + indices[0] + ", " + indices[1] + ");"); } } ostr.println(" break;"); } public static void DumpCharAndRangeMoves(java.io.PrintWriter ostr) { boolean[] dumped = new boolean[Math.max(generatedStates, dummyStateIndex + 1)]; Enumeration e = compositeStateTable.keys(); int i; DumpHeadForCase(ostr, -1); while (e.hasMoreElements()) DumpCompositeStatesNonAsciiMoves(ostr, (String)e.nextElement(), dumped); for (i = 0; i < allStates.size(); i++) { NfaState temp = (NfaState)allStates.elementAt(i); if (dumped[temp.stateName] || temp.lexState != LexGen.lexStateIndex || !temp.HasTransitions() || temp.dummy || temp.stateName == -1) continue; String toPrint = ""; if (temp.stateForCase != null) { if (temp.inNextOf == 1) continue; if (dumped[temp.stateForCase.stateName]) continue; toPrint = (temp.stateForCase.PrintNoBreak(ostr, -1, dumped)); if (temp.nonAsciiMethod == -1) { if (toPrint.equals("")) ostr.println(" break;"); continue; } } if (temp.nonAsciiMethod == -1) continue; if (!toPrint.equals("")) ostr.print(toPrint); dumped[temp.stateName] = true; //System.out.println("case : " + temp.stateName); ostr.println(" case " + temp.stateName + ":"); temp.DumpNonAsciiMove(ostr, dumped); } ostr.println(" default : break;"); ostr.println(" }"); ostr.println(" } while(i != startsAt);"); } public static void DumpNonAsciiMoveMethods(java.io.PrintWriter ostr) { if (!Options.B("JAVA_UNICODE_ESCAPE") && !unicodeWarningGiven) return; if (nonAsciiTableForMethod.size() <= 0) return; for (int i = 0; i < nonAsciiTableForMethod.size(); i++) { NfaState tmp = (NfaState)nonAsciiTableForMethod.elementAt(i); tmp.DumpNonAsciiMoveMethod(ostr); } } void DumpNonAsciiMoveMethod(java.io.PrintWriter ostr) { int j; ostr.println("private static final boolean jjCanMove_" + nonAsciiMethod + "(int hiByte, int i1, int i2, long l1, long l2)"); ostr.println("{"); ostr.println(" switch(hiByte)"); ostr.println(" {"); if (loByteVec != null && loByteVec.size() > 0) { for (j = 0; j < loByteVec.size(); j += 2) { ostr.println(" case " + ((Integer)loByteVec.elementAt(j)).intValue() + ":"); if (!AllBitsSet((String)allBitVectors.elementAt( ((Integer)loByteVec.elementAt(j + 1)).intValue()))) { ostr.println(" return ((jjbitVec" + ((Integer)loByteVec.elementAt(j + 1)).intValue() + "[i2" + "] & l2) != 0L);"); } else ostr.println(" return true;"); } } ostr.println(" default : "); if (nonAsciiMoveIndices != null && (j = nonAsciiMoveIndices.length) > 0) { do { if (!AllBitsSet((String)allBitVectors.elementAt( nonAsciiMoveIndices[j - 2]))) ostr.println(" if ((jjbitVec" + nonAsciiMoveIndices[j - 2] + "[i1] & l1) != 0L)"); if (!AllBitsSet((String)allBitVectors.elementAt( nonAsciiMoveIndices[j - 1]))) { ostr.println(" if ((jjbitVec" + nonAsciiMoveIndices[j - 1] + "[i2] & l2) == 0L)"); ostr.println(" return false;"); ostr.println(" else"); } ostr.println(" return true;"); } while ((j -= 2) > 0); } ostr.println(" return false;"); ostr.println(" }"); ostr.println("}"); } private static void ReArrange() { Vector v = allStates; allStates = new Vector(); allStates.setSize(generatedStates); for (int j = 0; j < v.size(); j++) { NfaState tmp = (NfaState)v.elementAt(j); if (tmp.stateName != -1 && !tmp.dummy) allStates.setElementAt(tmp, tmp.stateName); } } private static boolean boilerPlateDumped = false; static void PrintBoilerPlate(java.io.PrintWriter ostr) { ostr.println((Options.B("STATIC") ? "static " : "") + "private final void " + "jjCheckNAdd(int state)"); ostr.println("{"); ostr.println(" if (jjrounds[state] != jjround)"); ostr.println(" {"); ostr.println(" jjstateSet[jjnewStateCnt++] = state;"); ostr.println(" jjrounds[state] = jjround;"); ostr.println(" }"); ostr.println("}"); ostr.println((Options.B("STATIC") ? "static " : "") + "private final void " + "jjAddStates(int start, int end)"); ostr.println("{"); ostr.println(" do {"); ostr.println(" jjstateSet[jjnewStateCnt++] = jjnextStates[start];"); ostr.println(" } while (start++ != end);"); ostr.println("}"); ostr.println((Options.B("STATIC") ? "static " : "") + "private final void " + "jjCheckNAddTwoStates(int state1, int state2)"); ostr.println("{"); ostr.println(" jjCheckNAdd(state1);"); ostr.println(" jjCheckNAdd(state2);"); ostr.println("}"); ostr.println((Options.B("STATIC") ? "static " : "") + "private final void " + "jjCheckNAddStates(int start, int end)"); ostr.println("{"); ostr.println(" do {"); ostr.println(" jjCheckNAdd(jjnextStates[start]);"); ostr.println(" } while (start++ != end);"); ostr.println("}"); ostr.println((Options.B("STATIC") ? "static " : "") + "private final void " + "jjCheckNAddStates(int start)"); ostr.println("{"); ostr.println(" jjCheckNAdd(jjnextStates[start]);"); ostr.println(" jjCheckNAdd(jjnextStates[start + 1]);"); ostr.println("}"); } private static void FindStatesWithNoBreak() { Hashtable printed = new Hashtable(); boolean[] put = new boolean[generatedStates]; int cnt = 0; int i, j, foundAt = 0; Outer : for (j = 0; j < allStates.size(); j++) { NfaState stateForCase = null; NfaState tmpState = (NfaState)allStates.elementAt(j); if (tmpState.stateName == -1 || tmpState.dummy || !tmpState.UsefulState() || tmpState.next == null || tmpState.next.usefulEpsilonMoves < 1) continue; String s = tmpState.next.epsilonMovesString; if (compositeStateTable.get(s) != null || printed.get(s) != null) continue; printed.put(s, s); int[] nexts = (int[])allNextStates.get(s); if (nexts.length == 1) continue; int state = cnt; //System.out.println("State " + tmpState.stateName + " : " + s); for (i = 0; i < nexts.length; i++) { if ((state = nexts[i]) == -1) continue; NfaState tmp = (NfaState)allStates.elementAt(state); if (!tmp.isComposite && tmp.inNextOf == 1) { if (put[state]) throw new Error("JavaCC Bug: Please send mail to sankar@cs.stanford.edu"); foundAt = i; cnt++; stateForCase = tmp; put[state] = true; //System.out.print(state + " : " + tmp.inNextOf + ", "); break; } } //System.out.println(""); if (stateForCase == null) continue; for (i = 0; i < nexts.length; i++) { if ((state = nexts[i]) == -1) continue; NfaState tmp = (NfaState)allStates.elementAt(state); if (!put[state] && tmp.inNextOf > 1 && !tmp.isComposite && tmp.stateForCase == null) { cnt++; nexts[i] = -1; put[state] = true; int toSwap = nexts[0]; nexts[0] = nexts[foundAt]; nexts[foundAt] = toSwap; tmp.stateForCase = stateForCase; stateForCase.stateForCase = tmp; stateSetsToFix.put(s, nexts); //System.out.println("For : " + s + "; " + stateForCase.stateName + //" and " + tmp.stateName); continue Outer; } } for (i = 0; i < nexts.length; i++) { if ((state = nexts[i]) == -1) continue; NfaState tmp = (NfaState)allStates.elementAt(state); if (tmp.inNextOf <= 1) put[state] = false; } } } static int[][] kinds; static int[][][] statesForState; public static void DumpMoveNfa(java.io.PrintWriter ostr) { if (!boilerPlateDumped) PrintBoilerPlate(ostr); boilerPlateDumped = true; int i; int[] kindsForStates = null; if (kinds == null) { kinds = new int[LexGen.maxLexStates][]; statesForState = new int[LexGen.maxLexStates][][]; } ReArrange(); for (i = 0; i < allStates.size(); i++) { NfaState temp = (NfaState)allStates.elementAt(i); if (temp.lexState != LexGen.lexStateIndex || !temp.HasTransitions() || temp.dummy || temp.stateName == -1) continue; /* if (Options.B("OPTIMIZE_TOKEN_MANAGER") && temp.next != null && temp.next.usefulEpsilonMoves > 1) temp.CheckNextOccursTogether(); */ if (kindsForStates == null) { kindsForStates = new int[generatedStates]; statesForState[LexGen.lexStateIndex] = new int[Math.max(generatedStates, dummyStateIndex + 1)][]; } kindsForStates[temp.stateName] = temp.lookingFor; statesForState[LexGen.lexStateIndex][temp.stateName] = temp.compositeStates; temp.GenerateNonAsciiMoves(ostr); } Enumeration e = stateNameForComposite.keys(); while (e.hasMoreElements()) { String s = (String)e.nextElement(); int state = ((Integer)stateNameForComposite.get(s)).intValue(); if (state >= generatedStates) statesForState[LexGen.lexStateIndex][state] = (int[])allNextStates.get(s); } /* if (Options.B("OPTIMIZE_TOKEN_MANAGER")) { for (i = 0; i < allStates.size(); i++) { NfaState temp = (NfaState)allStates.elementAt(i); while (temp.FindCommonBlocks()) ; } FindStatesWithNoBreak(); } */ if (stateSetsToFix.size() != 0) FixStateSets(); kinds[LexGen.lexStateIndex] = kindsForStates; ostr.println((Options.B("STATIC") ? "static " : "") + "private final int " + "jjMoveNfa" + LexGen.lexStateSuffix + "(int startState, int curPos)"); ostr.println("{"); if (generatedStates == 0) { ostr.println(" return curPos;"); ostr.println("}"); return; } if (LexGen.mixed[LexGen.lexStateIndex]) { ostr.println(" int strKind = jjmatchedKind;"); ostr.println(" int strPos = jjmatchedPos;"); ostr.println(" int seenUpto;"); ostr.println(" input_stream.backup(seenUpto = curPos + 1);"); ostr.println(" try { curChar = input_stream.readChar(); }"); ostr.println(" catch(java.io.IOException e) { throw new Error(\"Internal Error\"); }"); ostr.println(" curPos = 0;"); } ostr.println(" int[] nextStates;"); ostr.println(" int startsAt = 0;"); ostr.println(" jjnewStateCnt = " + generatedStates + ";"); ostr.println(" int i = 1;"); ostr.println(" jjstateSet[0] = startState;"); if (Options.B("DEBUG_TOKEN_MANAGER")) ostr.println(" debugStream.println(\" Starting NFA to match one of : \" + " + "jjKindsForStateVector(curLexState, jjstateSet, 0, 1));"); if (Options.B("DEBUG_TOKEN_MANAGER")) ostr.println(" debugStream.println(" + (LexGen.maxLexStates > 1 ? "\"<\" + lexStateNames[curLexState] + \">\" + " : "") + "\"Current character : \" + " + "TokenMgrError.addEscapes(String.valueOf(curChar)) + \" (\" + (int)curChar + \")\");"); ostr.println(" int j, kind = 0x" + Integer.toHexString(Integer.MAX_VALUE) + ";"); ostr.println(" for (;;)"); ostr.println(" {"); ostr.println(" if (++jjround == 0x" + Integer.toHexString(Integer.MAX_VALUE) + ")"); ostr.println(" ReInitRounds();"); ostr.println(" if (curChar < 64)"); ostr.println(" {"); DumpAsciiMoves(ostr, 0); ostr.println(" }"); ostr.println(" else if (curChar < 128)"); ostr.println(" {"); DumpAsciiMoves(ostr, 1); ostr.println(" }"); ostr.println(" else"); ostr.println(" {"); DumpCharAndRangeMoves(ostr); ostr.println(" }"); ostr.println(" if (kind != 0x" + Integer.toHexString(Integer.MAX_VALUE) + ")"); ostr.println(" {"); ostr.println(" jjmatchedKind = kind;"); ostr.println(" jjmatchedPos = curPos;"); ostr.println(" kind = 0x" + Integer.toHexString(Integer.MAX_VALUE) + ";"); ostr.println(" }"); ostr.println(" ++curPos;"); if (Options.B("DEBUG_TOKEN_MANAGER")) { ostr.println(" if (jjmatchedKind != 0 && jjmatchedKind != 0x" + Integer.toHexString(Integer.MAX_VALUE) + ")"); ostr.println(" debugStream.println(\" Currently matched the first \" + (jjmatchedPos + 1) + \" characters as a \" + tokenImage[jjmatchedKind] + \" token.\");"); } ostr.println(" if ((i = jjnewStateCnt) == (startsAt = " + generatedStates + " - (jjnewStateCnt = startsAt)))"); if (LexGen.mixed[LexGen.lexStateIndex]) ostr.println(" break;"); else ostr.println(" return curPos;"); if (Options.B("DEBUG_TOKEN_MANAGER")) ostr.println(" debugStream.println(\" Possible kinds of longer matches : \" + " + "jjKindsForStateVector(curLexState, jjstateSet, startsAt, i));"); ostr.println(" try { curChar = input_stream.readChar(); }"); if (LexGen.mixed[LexGen.lexStateIndex]) ostr.println(" catch(java.io.IOException e) { break; }"); else ostr.println(" catch(java.io.IOException e) { return curPos; }"); if (Options.B("DEBUG_TOKEN_MANAGER")) ostr.println(" debugStream.println(" + (LexGen.maxLexStates > 1 ? "\"<\" + lexStateNames[curLexState] + \">\" + " : "") + "\"Current character : \" + " + "TokenMgrError.addEscapes(String.valueOf(curChar)) + \" (\" + (int)curChar + \")\");"); ostr.println(" }"); if (LexGen.mixed[LexGen.lexStateIndex]) { ostr.println(" if (jjmatchedPos > strPos)"); ostr.println(" return curPos;"); ostr.println(""); ostr.println(" int toRet = Math.max(curPos, seenUpto);"); ostr.println(""); ostr.println(" if (curPos < toRet)"); ostr.println(" for (i = toRet - Math.min(curPos, seenUpto); i ostr.println(" try { curChar = input_stream.readChar(); }"); ostr.println(" catch(java.io.IOException e) { throw new Error(\"Internal Error : Please send a bug report.\"); }"); ostr.println(""); ostr.println(" if (jjmatchedPos < strPos)"); ostr.println(" {"); ostr.println(" jjmatchedKind = strKind;"); ostr.println(" jjmatchedPos = strPos;"); ostr.println(" }"); ostr.println(" else if (jjmatchedPos == strPos && jjmatchedKind > strKind)"); ostr.println(" jjmatchedKind = strKind;"); ostr.println(""); ostr.println(" return toRet;"); } ostr.println("}"); allStates.removeAllElements(); } public static void DumpStatesForState(java.io.PrintWriter ostr) { ostr.print("protected static final int[][][] statesForState = "); if (statesForState == null) { ostr.println("null;"); return; } else ostr.println("{"); for (int i = 0; i < statesForState.length; i++) { if (statesForState[i] == null) { ostr.println(" null, "); continue; } ostr.println(" {"); for (int j = 0; j < statesForState[i].length; j++) { int[] stateSet = statesForState[i][j]; if (stateSet == null) { ostr.println(" { " + j + " }, "); continue; } ostr.print(" { "); for (int k = 0; k < stateSet.length; k++) ostr.print(stateSet[k] + ", "); ostr.println("},"); } ostr.println(" },"); } ostr.println("\n};"); } public static void DumpStatesForKind(java.io.PrintWriter ostr) { DumpStatesForState(ostr); boolean moreThanOne = false; int cnt = 0; ostr.print("protected static final int[][] kindForState = "); if (kinds == null) { ostr.println("null;"); return; } else ostr.println("{"); for (int i = 0; i < kinds.length; i++) { if (moreThanOne) ostr.println(", "); moreThanOne = true; if (kinds[i] == null) ostr.println("null"); else { cnt = 0; ostr.print("{ "); for (int j = 0; j < kinds[i].length; j++) { if (cnt++ > 0) ostr.print(", "); if (cnt % 15 == 0) ostr.print("\n "); ostr.print(kinds[i][j]); } ostr.print("}"); } } ostr.println("\n};"); } public static void reInit() { unicodeWarningGiven = false; generatedStates = 0; idCnt = 0; lohiByteCnt = (int)0; dummyStateIndex = -1; done = false; mark = null; stateDone = null; nonAsciiIntersections = new boolean[20][20]; allStates = new Vector(); indexedAllStates = new Vector(); nonAsciiTableForMethod = new Vector(); equivStatesTable = new Hashtable(); allNextStates = new Hashtable(); lohiByteTab = new Hashtable(); stateNameForComposite = new Hashtable(); compositeStateTable = new Hashtable(); stateBlockTable = new Hashtable(); stateSetsToFix = new Hashtable(); allBitVectors = new Vector(); tmpIndices = new int[512]; allBits = "{\n 0xffffffffffffffffL, " + "0xffffffffffffffffL, " + "0xffffffffffffffffL, " + "0xffffffffffffffffL\n};"; tableToDump = new Hashtable(); orderedStateSet = new Vector(); lastIndex = 0; boilerPlateDumped = false; kinds = null; statesForState = null; } }
package org.jgroups.stack; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jgroups.Event; import org.jgroups.util.Queue; import java.util.Map; import java.util.Properties; import java.util.Vector; /** * The Protocol class provides a set of common services for protocol layers. Each layer has to * be a subclass of Protocol and override a number of methods (typically just <code>up()</code>, * <code>Down</code> and <code>getName</code>. Layers are stacked in a certain order to form * a protocol stack. <a href=org.jgroups.Event.html>Events</a> are passed from lower * layers to upper ones and vice versa. E.g. a Message received by the UDP layer at the bottom * will be passed to its higher layer as an Event. That layer will in turn pass the Event to * its layer and so on, until a layer handles the Message and sends a response or discards it, * the former resulting in another Event being passed down the stack.<p> * Each layer has 2 FIFO queues, one for up Events and one for down Events. When an Event is * received by a layer (calling the internal upcall <code>ReceiveUpEvent</code>), it is placed * in the up-queue where it will be retrieved by the up-handler thread which will invoke method * <code>Up</code> of the layer. The same applies for Events traveling down the stack. Handling * of the up-handler and down-handler threads and the 2 FIFO queues is donw by the Protocol * class, subclasses will almost never have to override this behavior.<p> * The important thing to bear in mind is that Events have to passed on between layers in FIFO * order which is guaranteed by the Protocol implementation and must be guranteed by subclasses * implementing their on Event queuing.<p> * <b>Note that each class implementing interface Protocol MUST provide an empty, public * constructor !</b> * * @author Bela Ban * @version $Id: Protocol.java,v 1.43 2006/12/19 09:00:34 belaban Exp $ */ public abstract class Protocol { protected final Properties props=new Properties(); protected Protocol up_prot=null, down_prot=null; protected ProtocolStack stack=null; protected ProtocolObserver observer=null; // hook for debugger protected boolean stats=true; // determines whether to collect statistics (and expose them via JMX) protected final Log log=LogFactory.getLog(this.getClass()); protected boolean trace=log.isTraceEnabled(); protected boolean warn=log.isWarnEnabled(); /** * Configures the protocol initially. A configuration string consists of name=value * items, separated by a ';' (semicolon), e.g.:<pre> * "loopback=false;unicast_inport=4444" * </pre> */ public boolean setProperties(Properties props) { if(props != null) this.props.putAll(props); return true; } /** Called by Configurator. Removes 2 properties which are used by the Protocol directly and then * calls setProperties(), which might invoke the setProperties() method of the actual protocol instance. */ public boolean setPropertiesInternal(Properties props) { this.props.putAll(props); String str=props.getProperty("down_thread"); if(str != null) { if(warn) log.warn("down_thread was deprecated and is ignored"); props.remove("down_thread"); } str=props.getProperty("down_thread_prio"); if(str != null) { if(warn) log.warn("down_thread_prio was deprecated and is ignored"); props.remove("down_thread_prio"); } str=props.getProperty("up_thread"); if(str != null) { if(warn) log.warn("up_thread was deprecated and is ignored"); props.remove("up_thread"); } str=props.getProperty("up_thread_prio"); if(str != null) { if(warn) log.warn("up_thread_prio was deprecated and is ignored"); props.remove("up_thread_prio"); } str=props.getProperty("stats"); if(str != null) { stats=Boolean.valueOf(str).booleanValue(); props.remove("stats"); } return setProperties(props); } public Properties getProperties() { return props; } public boolean isTrace() { return trace; } public void setTrace(boolean trace) { this.trace=trace; } public boolean isWarn() { return warn; } public void setWarn(boolean warn) { this.warn=warn; } /** @deprecated up_thread was removed * @return false by default */ public boolean upThreadEnabled() { return false; } /** * @deprecated down thread was removed * @return boolean False by default */ public boolean downThreadEnabled() { return false; } public boolean statsEnabled() { return stats; } public void enableStats(boolean flag) { stats=flag; } public void resetStats() { ; } public String printStats() { return null; } public Map dumpStats() { return null; } public void setObserver(ProtocolObserver observer) { this.observer=observer; observer.setProtocol(this); } /** * Called after instance has been created (null constructor) and before protocol is started. * Properties are already set. Other protocols are not yet connected and events cannot yet be sent. * @exception Exception Thrown if protocol cannot be initialized successfully. This will cause the * ProtocolStack to fail, so the channel constructor will throw an exception */ public void init() throws Exception { } /** * This method is called on a {@link org.jgroups.Channel#connect(String)}. Starts work. * Protocols are connected and queues are ready to receive events. * Will be called <em>from bottom to top</em>. This call will replace * the <b>START</b> and <b>START_OK</b> events. * @exception Exception Thrown if protocol cannot be started successfully. This will cause the ProtocolStack * to fail, so {@link org.jgroups.Channel#connect(String)} will throw an exception */ public void start() throws Exception { } /** * This method is called on a {@link org.jgroups.Channel#disconnect()}. Stops work (e.g. by closing multicast socket). * Will be called <em>from top to bottom</em>. This means that at the time of the method invocation the * neighbor protocol below is still working. This method will replace the * <b>STOP</b>, <b>STOP_OK</b>, <b>CLEANUP</b> and <b>CLEANUP_OK</b> events. The ProtocolStack guarantees that * when this method is called all messages in the down queue will have been flushed */ public void stop() { } /** * This method is called on a {@link org.jgroups.Channel#close()}. * Does some cleanup; after the call the VM will terminate */ public void destroy() { } public Queue getUpQueue() { throw new UnsupportedOperationException("queues were removed in 2.5"); } // used by Debugger (ProtocolView) public Queue getDownQueue() { throw new UnsupportedOperationException("queues were removed in 2.5"); } // used by Debugger (ProtocolView) /** List of events that are required to be answered by some layer above. @return Vector (of Integers) */ public Vector requiredUpServices() { return null; } /** List of events that are required to be answered by some layer below. @return Vector (of Integers) */ public Vector requiredDownServices() { return null; } /** List of events that are provided to layers above (they will be handled when sent down from above). @return Vector (of Integers) */ public Vector providedUpServices() { return null; } /** List of events that are provided to layers below (they will be handled when sent down from below). @return Vector (of Integers) */ public Vector providedDownServices() { return null; } public abstract String getName(); // all protocol names have to be unique ! public Protocol getUpProtocol() { return up_prot; } public Protocol getDownProtocol() { return down_prot; } public void setUpProtocol(Protocol up_prot) { this.up_prot=up_prot; } public void setDownProtocol(Protocol down_prot) { this.down_prot=down_prot; } public void setProtocolStack(ProtocolStack stack) { this.stack=stack; } /** * Internal method, should not be called by clients. Used by ProtocolStack. I would have * used the 'friends' modifier, but this is available only in C++ ... If the up_handler thread * is not available (down_thread == false), then directly call the up() method: we will run on the * caller's thread (e.g. the protocol layer below us). */ protected void receiveUpEvent(Event evt) { if(observer != null) { // call debugger hook (if installed) if(observer.up(evt) == false) { // false means discard event return; } } up(evt); } /** * Internal method, should not be called by clients. Used by ProtocolStack. I would have * used the 'friends' modifier, but this is available only in C++ ... If the down_handler thread * is not available (down_thread == false), then directly call the down() method: we will run on the * caller's thread (e.g. the protocol layer above us). */ protected void receiveDownEvent(Event evt) { if(observer != null) { // call debugger hook (if installed) if(observer.down(evt) == false) { // false means discard event return; } } int type=evt.getType(); if(type == Event.START || type == Event.STOP) { if(handleSpecialDownEvent(evt) == false) return; } down(evt); } /** * Causes the event to be forwarded to the next layer up in the hierarchy. Typically called * by the implementation of <code>Up</code> (when done). */ public void passUp(Event evt) { up_prot.receiveUpEvent(evt); } /** * Causes the event to be forwarded to the next layer down in the hierarchy.Typically called * by the implementation of <code>Down</code> (when done). */ public void passDown(Event evt) { down_prot.receiveDownEvent(evt); } /** * An event was received from the layer below. Usually the current layer will want to examine * the event type and - depending on its type - perform some computation * (e.g. removing headers from a MSG event type, or updating the internal membership list * when receiving a VIEW_CHANGE event). * Finally the event is either a) discarded, or b) an event is sent down * the stack using <code>passDown()</code> or c) the event (or another event) is sent up * the stack using <code>passUp()</code>. */ public void up(Event evt) { passUp(evt); } /** * An event is to be sent down the stack. The layer may want to examine its type and perform * some action on it, depending on the event's type. If the event is a message MSG, then * the layer may need to add a header to it (or do nothing at all) before sending it down * the stack using <code>passDown()</code>. In case of a GET_ADDRESS event (which tries to * retrieve the stack's address from one of the bottom layers), the layer may need to send * a new response event back up the stack using <code>passUp()</code>. */ public void down(Event evt) { passDown(evt); } /** These are special internal events that should not be handled by protocols * @return boolean True: the event should be passed further down the stack. False: the event should * be discarded (not passed down the stack) */ protected boolean handleSpecialDownEvent(Event evt) { switch(evt.getType()) { case Event.START: try { start(); // if we're the transport protocol, reply with a START_OK up the stack if(down_prot == null) { passUp(new Event(Event.START_OK, Boolean.TRUE)); return false; // don't pass down the stack } else return true; // pass down the stack } catch(Exception e) { passUp(new Event(Event.START_OK, new Exception("exception caused by " + getName() + ".start()", e))); return false; } case Event.STOP: stop(); if(down_prot == null) { passUp(new Event(Event.STOP_OK, Boolean.TRUE)); return false; // don't pass down the stack } else return true; // pass down the stack default: return true; // pass down by default } } }
/* * $Id: StringPool.java,v 1.5 2014-05-30 07:18:25 tlipkis Exp $ */ package org.lockss.util; import java.util.*; import org.lockss.config.*; /** * Named intern() pools for Strings. Similer to String.intern(), but use * of context-dependent pools should allow for smaller maps with less * waste. */ public class StringPool { static final String PREFIX = Configuration.PREFIX + "stringPool."; /** List of mep keys whose value should be interned in the named pool. * Defaults are pool-specific; See {@link org.lockss.StringPool} static * fields. */ static final String PARAM_MAP_KEYS = PREFIX + "<poolname>.mapKeys"; static final String SUFFIX_MAP_KEYS = "mapKeys"; /** Pool for AU config property names. */ public static StringPool AU_CONFIG_PROPS = new StringPool("AU config props").setMapKeys(ListUtil.list("year")); /** Pool for TdbAu props. */ public static StringPool TDBAU_PROPS = new StringPool("TdbAu props").setMapKeys(ListUtil.list("type", "issn", "issn1", "eissn")); /** Pool for TdbAu attrs. */ public static StringPool TDBAU_ATTRS = new StringPool("TdbAu attrs").setMapKeys(ListUtil.list("publisher", "au_feature_key", "year", "rights")); /** Pool for HTTP header names. */ public static StringPool HTTP_HEADERS = new StringPool("HTTP headers"); /** Pool for plugin IDs. */ public static StringPool PLUGIN_IDS = new StringPool("Plugin IDs"); /** Pool for feature version strings. */ public static StringPool FEATURE_VERSIONS = new StringPool("Feature versions"); /** Pool for PropertyTree keys and subkeys. */ public static StringPool PROPERTY_TREE = new StringPool("Property trees"); private static Map<String,StringPool> pools; private String name; private Map<String,String> map; private boolean sealed = false; private Set mapKeys = Collections.EMPTY_SET; private int hits = 0; public StringPool(String name) { this(name, 20); } /** Create a StringPool with a name and initial size */ public StringPool(String name, int initialSize) { this.name = name; map = new HashMap<String,String>(initialSize); registerPool(name, this); } private static void registerPool(String name, StringPool pool) { if (pools == null) { pools = new HashMap<String,StringPool>(); } pools.put(name, pool); Configuration poolConf = ConfigManager.getCurrentConfig().getConfigTree(PREFIX + "." + name); if (poolConf != null) { pool.setPoolConfig(poolConf); } } /** Called by org.lockss.config.MiscConfig */ public static void setConfig(Configuration config, Configuration oldConfig, Configuration.Differences diffs) { if (diffs.contains(PREFIX)) { Configuration allPools = config.getConfigTree(PREFIX); for (Iterator<String> iter = allPools.nodeIterator(); iter.hasNext(); ) { String poolName = iter.next(); StringPool pool = getPool(poolName); if (pool != null) { Configuration poolConf = allPools.getConfigTree(poolName); if (poolConf != null) { pool.setPoolConfig(poolConf); } } } } } public void setPoolConfig(Configuration poolConfig) { setMapKeys(poolConfig.getList(SUFFIX_MAP_KEYS, Collections.EMPTY_LIST)); } public static StringPool getPool(String name) { return pools.get(name); } private StringPool setMapKeys(Collection<String> keys) { mapKeys = SetUtil.theSet(keys); return this; } /** Return the instance of the string already in the pool, if any, else * add this instance and return it. * @param str the String to be interned. If null, null is returned. */ public synchronized String intern(String str) { if (str == null) { return str; } String res = map.get(str); if (res != null) { hits++; return res; } if (sealed) { return str; } map.put(str, str); return str; } /** Intern the value iff the key is a member of this StringPool's set of * map keys whose values should be interned. * @param key the map key * @param val the String to be stored in the map. * @return the interned value if the key is contained in the set of map * keys whose values should be interned, else the original value. */ public synchronized String internMapValue(String key, String val) { if (mapKeys.contains(key)) { return intern(val); } else { return val; } } /** Seal the pool, so that no new additions will be made. If {@link * #intern(String)} is called with a string that matches an existing * entry the interned entry will be returned, else the argument. * Intended for contexts in which a predictable standard set of strings * appear as well as one-off strings that would needlessly fill the * pool. */ public void seal() { sealed = true; } private int sumStringChars() { int res = 0; for (String val : map.values()) { res += val.length(); } return res; } public String toString() { return "[StringPool " + name + ", " + map.size() + " entries]"; } public String toStats() { return "[StringPool " + name + ", " + map.size() + " entries, " + hits + " hits, " + sumStringChars() + " total chars]"; } public static String allStats() { StringBuilder sb = new StringBuilder(); for (StringPool pool : pools.values()) { sb.append(pool.toStats()); sb.append("\n"); } return sb.toString(); } }
package org.nutz.mvc.impl; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.nutz.ioc.annotation.InjectName; import org.nutz.ioc.loader.annotation.IocBean; import org.nutz.json.Json; import org.nutz.lang.Lang; import org.nutz.lang.Mirror; import org.nutz.lang.Strings; import org.nutz.lang.segment.Segments; import org.nutz.lang.util.Context; import org.nutz.log.Log; import org.nutz.log.Logs; import org.nutz.mvc.ActionFilter; import org.nutz.mvc.ActionInfo; import org.nutz.mvc.HttpAdaptor; import org.nutz.mvc.NutConfig; import org.nutz.mvc.ObjectInfo; import org.nutz.mvc.annotation.AdaptBy; import org.nutz.mvc.annotation.At; import org.nutz.mvc.annotation.By; import org.nutz.mvc.annotation.Chain; import org.nutz.mvc.annotation.DELETE; import org.nutz.mvc.annotation.Encoding; import org.nutz.mvc.annotation.Fail; import org.nutz.mvc.annotation.Filters; import org.nutz.mvc.annotation.GET; import org.nutz.mvc.annotation.Modules; import org.nutz.mvc.annotation.Ok; import org.nutz.mvc.annotation.POST; import org.nutz.mvc.annotation.PUT; import org.nutz.mvc.annotation.PathMap; import org.nutz.resource.Scans; public abstract class Loadings { private static final Log log = Logs.get(); static ActionInfo createInfo(Class<?> type) { ActionInfo ai = new ActionInfo(); evalEncoding(ai, type.getAnnotation(Encoding.class)); evalHttpAdaptor(ai, type.getAnnotation(AdaptBy.class)); evalActionFilters(ai, type.getAnnotation(Filters.class)); evalPathMap(ai, type.getAnnotation(PathMap.class)); evalOk(ai, type.getAnnotation(Ok.class)); evalFail(ai, type.getAnnotation(Fail.class)); evalAt(ai, type.getAnnotation(At.class), type.getSimpleName()); evalActionChainMaker(ai, type.getAnnotation(Chain.class)); evalModule(ai, type); return ai; } static ActionInfo createInfo(Method method) { ActionInfo ai = new ActionInfo(); evalEncoding(ai, method.getAnnotation(Encoding.class)); evalHttpAdaptor(ai, method.getAnnotation(AdaptBy.class)); evalActionFilters(ai, method.getAnnotation(Filters.class)); evalOk(ai, method.getAnnotation(Ok.class)); evalFail(ai, method.getAnnotation(Fail.class)); evalAt(ai, method.getAnnotation(At.class), method.getName()); evalActionChainMaker(ai, method.getAnnotation(Chain.class)); evalHttpMethod(ai, method); ai.setMethod(method); return ai; } static Set<Class<?>> scanModules(Class<?> mainModule) { Modules ann = mainModule.getAnnotation(Modules.class); boolean scan = null == ann ? false : ann.scanPackage(); List<Class<?>> list = new LinkedList<Class<?>>(); list.add(mainModule); if (null != ann) { for (Class<?> module : ann.value()) { list.add(module); } } Set<Class<?>> modules = new HashSet<Class<?>>(); for (Class<?> type : list) { if (scan) { if (log.isDebugEnabled()) log.debugf(" > scan '%s'", type.getPackage().getName()); List<Class<?>> subs = Scans.me().scanPackage(type); for (Class<?> sub : subs) { if (isModule(sub)) { if (log.isDebugEnabled()) log.debugf(" >> add '%s'", sub.getName()); modules.add(sub); } else if (log.isTraceEnabled()) { log.tracef(" >> ignore '%s'", sub.getName()); } } } else { if (isModule(type)) { if (log.isDebugEnabled()) log.debugf(" > add '%s'", type.getName()); modules.add(type); } else if (log.isTraceEnabled()) { log.tracef(" > ignore '%s'", type.getName()); } } } return modules; } private static void evalHttpMethod(ActionInfo ai, Method method) { if (method.getAnnotation(GET.class) != null) ai.getHttpMethods().add("GET"); if (method.getAnnotation(POST.class) != null) ai.getHttpMethods().add("POST"); if (method.getAnnotation(PUT.class) != null) ai.getHttpMethods().add("PUT"); if (method.getAnnotation(DELETE.class) != null) ai.getHttpMethods().add("DELETE"); } private static void evalActionChainMaker(ActionInfo ai, Chain cb) { if (null != cb) { ai.setChainName(cb.value()); } } private static void evalAt(ActionInfo ai, At at, String def) { if (null != at) { if (null == at.value() || at.value().length == 0) { ai.setPaths(Lang.array("/" + def.toLowerCase())); } else { ai.setPaths(at.value()); } if (!Strings.isBlank(at.key())) ai.setPathKey(at.key()); } } private static Map<String, String> path = null; @SuppressWarnings("unchecked") private static void evalPathMap(ActionInfo ai, PathMap pathMap){ if(pathMap != null){ path = Json.fromJson(Map.class, pathMap.value()); } } private static void evalFail(ActionInfo ai, Fail fail) { if (null == fail) { return; } ai.setFailView(parsePath(fail.key(), fail.value())); } private static void evalOk(ActionInfo ai, Ok ok) { if (null == ok) { return; } ai.setOkView(parsePath(ok.key(), ok.value())); } private static String parsePath(String key, String value){ if(path == null || key == null || key.equals("")){ return value; } return value + path.get(key); } private static void evalModule(ActionInfo ai, Class<?> type) { ai.setModuleType(type); String beanName = null; //5.10.3IocBean.namebean Modify By QinerG@gmai.com InjectName innm = type.getAnnotation(InjectName.class); IocBean iocBean = type.getAnnotation(IocBean.class); if(innm == null && iocBean == null) //TODO return; if (iocBean != null) { beanName = iocBean.name(); } if (Strings.isBlank(beanName)) { if (innm != null && !Strings.isBlank(innm.value())) { beanName = innm.value(); } else { beanName = Strings.lowerFirst(type.getSimpleName()); } } ai.setInjectName(beanName); } @SuppressWarnings({"unchecked", "rawtypes"}) private static void evalActionFilters(ActionInfo ai, Filters filters) { if (null != filters) { List<ObjectInfo<? extends ActionFilter>> list = new ArrayList<ObjectInfo<? extends ActionFilter>>(filters.value().length); for (By by : filters.value()) { list.add(new ObjectInfo(by.type(), by.args())); } ai.setFilterInfos(list.toArray(new ObjectInfo[list.size()])); } } @SuppressWarnings({"unchecked", "rawtypes"}) private static void evalHttpAdaptor(ActionInfo ai, AdaptBy ab) { if (null != ab) { ai.setAdaptorInfo((ObjectInfo<? extends HttpAdaptor>) new ObjectInfo( ab.type(), ab.args())); } } private static void evalEncoding(ActionInfo ai, Encoding encoding) { if (null == encoding) { ai.setInputEncoding(org.nutz.lang.Encoding.UTF8); ai.setOutputEncoding(org.nutz.lang.Encoding.UTF8); } else { ai.setInputEncoding(Strings.sNull(encoding.input(), org.nutz.lang.Encoding.UTF8)); ai.setOutputEncoding(Strings.sNull(encoding.output(), org.nutz.lang.Encoding.UTF8)); } } public static <T> T evalObj(NutConfig config, Class<T> type, String[] args) { Context context = config.getLoadingContext(); for (int i = 0; i < args.length; i++) { args[i] = Segments.replace(args[i], context); } // Ioc if (args.length == 1 && args[0].startsWith("ioc:")) { String name = Strings.trim(args[0].substring(4)); return config.getIoc().get(type, name); } return Mirror.me(type).born((Object[]) args); } private static boolean isModule(Class<?> classZ) { int classModify = classZ.getModifiers(); if (!Modifier.isPublic(classModify) || Modifier.isAbstract(classModify) || Modifier.isInterface(classModify)) return false; for (Method method : classZ.getMethods()) if (method.isAnnotationPresent(At.class)) return true; return false; } }
package org.pentaho.di.job; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import org.apache.commons.vfs.FileName; import org.apache.commons.vfs.FileObject; import org.apache.commons.vfs.FileSystemException; import org.eclipse.core.runtime.IProgressMonitor; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.DBCache; import org.pentaho.di.core.EngineMetaInterface; import org.pentaho.di.core.LastUsedFile; import org.pentaho.di.core.NotePadMeta; import org.pentaho.di.core.Props; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.SQLStatement; import org.pentaho.di.core.changed.ChangedFlag; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.gui.GUIPositionInterface; import org.pentaho.di.core.gui.OverwritePrompter; import org.pentaho.di.core.gui.Point; import org.pentaho.di.core.gui.UndoInterface; import org.pentaho.di.core.logging.LogWriter; import org.pentaho.di.core.reflection.StringSearchResult; import org.pentaho.di.core.reflection.StringSearcher; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.undo.TransAction; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.core.xml.XMLInterface; import org.pentaho.di.job.entries.special.JobEntrySpecial; import org.pentaho.di.job.entry.JobEntryCopy; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.Repository; import org.pentaho.di.repository.RepositoryDirectory; import org.pentaho.di.resource.ResourceDefinition; import org.pentaho.di.resource.ResourceExportInterface; import org.pentaho.di.resource.ResourceNamingInterface; import org.pentaho.di.resource.ResourceReference; import org.pentaho.di.shared.SharedObjectInterface; import org.pentaho.di.shared.SharedObjects; import org.pentaho.di.trans.HasDatabasesInterface; import org.pentaho.di.trans.HasSlaveServersInterface; import org.w3c.dom.Document; import org.w3c.dom.Node; /** * Defines a Job and provides methods to load, save, verify, etc. * * @author Matt * @since 11-08-2003 * */ public class JobMeta extends ChangedFlag implements Cloneable, Comparable<JobMeta>, XMLInterface, UndoInterface, HasDatabasesInterface, VariableSpace, EngineMetaInterface, ResourceExportInterface, HasSlaveServersInterface { public static final String XML_TAG = "job"; //$NON-NLS-1$ private static final String XML_TAG_SLAVESERVERS = "slaveservers"; //$NON-NLS-1$ public LogWriter log; protected long id; protected String name; protected String description; protected String extended_description; protected String job_version; protected int job_status; protected String filename; public List<JobEntryInterface> jobentries; public List<JobEntryCopy> jobcopies; public List<JobHopMeta> jobhops; public List<NotePadMeta> notes; public List<DatabaseMeta> databases; private List<SlaveServer> slaveServers; protected RepositoryDirectory directory; protected String arguments[]; protected boolean changed_entries, changed_hops, changed_notes, changed_databases; protected DatabaseMeta logconnection; protected String logTable; public DBCache dbcache; protected List<TransAction> undo; private VariableSpace variables = new Variables(); protected int max_undo; protected int undo_position; public static final int TYPE_UNDO_CHANGE = 1; public static final int TYPE_UNDO_NEW = 2; public static final int TYPE_UNDO_DELETE = 3; public static final int TYPE_UNDO_POSITION = 4; public static final String STRING_SPECIAL = "SPECIAL"; //$NON-NLS-1$ public static final String STRING_SPECIAL_START = "START"; //$NON-NLS-1$ public static final String STRING_SPECIAL_DUMMY = "DUMMY"; //$NON-NLS-1$ public static final String STRING_SPECIAL_OK = "OK"; //$NON-NLS-1$ public static final String STRING_SPECIAL_ERROR = "ERROR"; //$NON-NLS-1$ // Remember the size and position of the different windows... public boolean max[] = new boolean[1]; public String created_user, modifiedUser; public Date created_date, modifiedDate; protected boolean useBatchId; protected boolean batchIdPassed; protected boolean logfieldUsed; /** If this is null, we load from the default shared objects file : $KETTLE_HOME/.kettle/shared.xml */ protected String sharedObjectsFile; public JobMeta(LogWriter l) { log = l; clear(); initializeVariablesFrom(null); } public long getID() { return id; } public void setID(long id) { this.id = id; } public void clear() { name = null; jobcopies = new ArrayList<JobEntryCopy>(); jobentries = new ArrayList<JobEntryInterface>(); jobhops = new ArrayList<JobHopMeta>(); notes = new ArrayList<NotePadMeta>(); databases = new ArrayList<DatabaseMeta>(); slaveServers = new ArrayList<SlaveServer>(); logconnection = null; logTable = null; arguments = null; max_undo = Const.MAX_UNDO; dbcache = DBCache.getInstance(); undo = new ArrayList<TransAction>(); undo_position = -1; addDefaults(); setChanged(false); created_user = "-"; //$NON-NLS-1$ created_date = new Date(); modifiedUser = "-"; //$NON-NLS-1$ modifiedDate = new Date(); directory = new RepositoryDirectory(); description=null; job_status=-1; job_version=null; extended_description=null; useBatchId=true; logfieldUsed=true; // setInternalKettleVariables(); Don't clear the internal variables for ad-hoc jobs, it's ruins the previews // etc. } public void addDefaults() { /* addStart(); // Add starting point! addDummy(); // Add dummy! addOK(); // errors == 0 evaluation addError(); // errors != 0 evaluation */ clearChanged(); } public static final JobEntryCopy createStartEntry() { JobEntrySpecial jobEntrySpecial = new JobEntrySpecial(STRING_SPECIAL_START, true, false); JobEntryCopy jobEntry = new JobEntryCopy(); jobEntry.setID(-1L); jobEntry.setEntry(jobEntrySpecial); jobEntry.setLocation(50, 50); jobEntry.setDrawn(false); jobEntry.setDescription(Messages.getString("JobMeta.StartJobEntry.Description")); //$NON-NLS-1$ return jobEntry; } public static final JobEntryCopy createDummyEntry() { JobEntrySpecial jobEntrySpecial = new JobEntrySpecial(STRING_SPECIAL_DUMMY, false, true); JobEntryCopy jobEntry = new JobEntryCopy(); jobEntry.setID(-1L); jobEntry.setEntry(jobEntrySpecial); jobEntry.setLocation(50, 50); jobEntry.setDrawn(false); jobEntry.setDescription(Messages.getString("JobMeta.DummyJobEntry.Description")); //$NON-NLS-1$ return jobEntry; } public JobEntryCopy getStart() { for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy cge = getJobEntry(i); if (cge.isStart()) return cge; } return null; } public JobEntryCopy getDummy() { for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy cge = getJobEntry(i); if (cge.isDummy()) return cge; } return null; } /** * Compares two transformation on name, filename */ public int compare(JobMeta t1, JobMeta t2) { if (Const.isEmpty(t1.getName()) && !Const.isEmpty(t2.getName())) return -1; if (!Const.isEmpty(t1.getName()) && Const.isEmpty(t2.getName())) return 1; if (Const.isEmpty(t1.getName()) && Const.isEmpty(t2.getName())) { if (Const.isEmpty(t1.getFilename()) && !Const.isEmpty(t2.getFilename())) return -1; if (!Const.isEmpty(t1.getFilename()) && Const.isEmpty(t2.getFilename())) return 1; if (Const.isEmpty(t1.getFilename()) && Const.isEmpty(t2.getFilename())) { return 0; } return t1.getFilename().compareTo(t2.getFilename()); } return t1.getName().compareTo(t2.getName()); } public int compareTo(JobMeta o) { return compare(this, o); } public boolean equals(Object obj) { if (!(obj instanceof JobMeta)) return false; return compare(this,(JobMeta) obj) == 0; } public Object clone() { return realClone(true); } public Object realClone(boolean doClear) { try { JobMeta jobMeta = (JobMeta) super.clone(); if (doClear) { jobMeta.clear(); } else { jobMeta.jobcopies = new ArrayList<JobEntryCopy>(); jobMeta.jobentries = new ArrayList<JobEntryInterface>(); jobMeta.jobhops = new ArrayList<JobHopMeta>(); jobMeta.notes = new ArrayList<NotePadMeta>(); jobMeta.databases = new ArrayList<DatabaseMeta>(); jobMeta.slaveServers = new ArrayList<SlaveServer>(); } for (JobEntryInterface entry : jobentries) jobMeta.jobentries.add((JobEntryInterface)entry.clone()); for (JobEntryCopy entry : jobcopies) jobMeta.jobcopies.add((JobEntryCopy)entry.clone_deep()); for (JobHopMeta entry : jobhops) jobMeta.jobhops.add((JobHopMeta)entry.clone()); for (NotePadMeta entry : notes) jobMeta.notes.add((NotePadMeta)entry.clone()); for (DatabaseMeta entry : databases) jobMeta.databases.add((DatabaseMeta)entry.clone()); for (SlaveServer slave : slaveServers) jobMeta.getSlaveServers().add((SlaveServer)slave.clone()); return jobMeta; } catch (CloneNotSupportedException e) { return null; } } public String getName() { return name; } public void setName(String name) { this.name = name; setInternalKettleVariables(); } /** * Builds a name - if no name is set, yet - from the filename */ public void nameFromFilename() { if (!Const.isEmpty(filename)) { name = Const.createName(filename); } } /** * @return Returns the directory. */ public RepositoryDirectory getDirectory() { return directory; } /** * @param directory The directory to set. */ public void setDirectory(RepositoryDirectory directory) { this.directory = directory; setInternalKettleVariables(); } public String getFilename() { return filename; } public void setFilename(String filename) { this.filename = filename; setInternalKettleVariables(); } public DatabaseMeta getLogConnection() { return logconnection; } public void setLogConnection(DatabaseMeta ci) { logconnection = ci; } /** * @return Returns the databases. */ public List<DatabaseMeta> getDatabases() { return databases; } /** * @param databases The databases to set. */ public void setDatabases(List<DatabaseMeta> databases) { this.databases = databases; } public void setChanged(boolean ch) { if (ch) setChanged(); else clearChanged(); } public void clearChanged() { changed_entries = false; changed_hops = false; changed_notes = false; changed_databases = false; for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy entry = getJobEntry(i); entry.setChanged(false); } for (JobHopMeta hi:jobhops) // Look at all the hops { hi.setChanged(false); } for (int i = 0; i < nrDatabases(); i++) { DatabaseMeta db = getDatabase(i); db.setChanged(false); } for (int i = 0; i < nrNotes(); i++) { NotePadMeta note = getNote(i); note.setChanged(false); } super.clearChanged(); } public boolean hasChanged() { if (super.hasChanged()) return true; if (haveJobEntriesChanged()) return true; if (haveJobHopsChanged()) return true; if (haveConnectionsChanged()) return true; if (haveNotesChanged()) return true; return false; } protected void saveRepJob(Repository rep) throws KettleException { try { // The ID has to be assigned, even when it's a new item... rep.insertJob(getID(), directory.getID(), getName(), logconnection == null ? -1 : logconnection.getID(), logTable, modifiedUser, modifiedDate, useBatchId, batchIdPassed, logfieldUsed, sharedObjectsFile,description,extended_description,job_version, job_status, created_user,created_date); } catch (KettleDatabaseException dbe) { throw new KettleException(Messages.getString("JobMeta.Exception.UnableToSaveJobToRepository"), dbe); //$NON-NLS-1$ } } public boolean showReplaceWarning(Repository rep) { if (getID() < 0) { try { if (rep.getJobID(getName(), directory.getID()) > 0) return true; } catch (KettleException dbe) { return true; } } return false; } /** * This method asks all steps in the transformation whether or not the specified database connection is used. * The connection is used in the transformation if any of the steps uses it or if it is being used to log to. * @param databaseMeta The connection to check * @return true if the connection is used in this transformation. */ public boolean isDatabaseConnectionUsed(DatabaseMeta databaseMeta) { for (int i=0;i<nrJobEntries();i++) { JobEntryCopy jobEntry = getJobEntry(i); DatabaseMeta dbs[] = jobEntry.getEntry().getUsedDatabaseConnections(); for (int d=0;d<dbs.length;d++) { if (dbs[d]!=null && dbs[d].equals(databaseMeta)) return true; } } if (logconnection!=null && logconnection.equals(databaseMeta)) return true; return false; } public String getFileType() { return LastUsedFile.FILE_TYPE_JOB; } public String[] getFilterNames() { return Const.getJobFilterNames(); } public String[] getFilterExtensions() { return Const.STRING_JOB_FILTER_EXT; } public String getDefaultExtension() { return Const.STRING_JOB_DEFAULT_EXT; } public String getXML() { Props props = null; if (Props.isInitialized()) props=Props.getInstance(); DatabaseMeta ci = getLogConnection(); StringBuffer retval = new StringBuffer(500); retval.append("<").append(XML_TAG).append(">").append(Const.CR); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.addTagValue("name", getName())); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("description", description)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("extended_description", extended_description)); retval.append(" ").append(XMLHandler.addTagValue("job_version", job_version)); if ( job_status >= 0 ) { retval.append(" ").append(XMLHandler.addTagValue("job_status", job_status)); } retval.append(" ").append(XMLHandler.addTagValue("directory", directory.getPath())); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("created_user", created_user)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("created_date", XMLHandler.date2string(created_date))); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ retval.append(" ").append(XMLHandler.addTagValue("modified_user", modifiedUser)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("modified_date", XMLHandler.date2string(created_date))); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ // Save the database connections... for (int i = 0; i < nrDatabases(); i++) { DatabaseMeta dbMeta = getDatabase(i); if (props!=null && props.areOnlyUsedConnectionsSavedToXML()) { if (isDatabaseConnectionUsed(dbMeta)) { retval.append(dbMeta.getXML()); } } else { retval.append(dbMeta.getXML()); } } // The slave servers... retval.append(" ").append(XMLHandler.openTag(XML_TAG_SLAVESERVERS)).append(Const.CR); //$NON-NLS-1$ for (int i = 0; i < slaveServers.size(); i++) { SlaveServer slaveServer = slaveServers.get(i); retval.append(" ").append(slaveServer.getXML()).append(Const.CR); } retval.append(" ").append(XMLHandler.closeTag(XML_TAG_SLAVESERVERS)).append(Const.CR); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.addTagValue("logconnection", ci == null ? "" : ci.getName())); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ retval.append(" ").append(XMLHandler.addTagValue("logtable", logTable)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("use_batchid", useBatchId)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("pass_batchid", batchIdPassed)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("use_logfield", logfieldUsed)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("shared_objects_file", sharedObjectsFile)); // $NON-NLS-1$ retval.append(" <entries>").append(Const.CR); //$NON-NLS-1$ for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy jge = getJobEntry(i); retval.append(jge.getXML()); } retval.append(" </entries>").append(Const.CR); //$NON-NLS-1$ retval.append(" <hops>").append(Const.CR); //$NON-NLS-1$ for (JobHopMeta hi:jobhops) // Look at all the hops { retval.append(hi.getXML()); } retval.append(" </hops>").append(Const.CR); //$NON-NLS-1$ retval.append(" <notepads>").append(Const.CR); //$NON-NLS-1$ for (int i = 0; i < nrNotes(); i++) { NotePadMeta ni = getNote(i); retval.append(ni.getXML()); } retval.append(" </notepads>").append(Const.CR); //$NON-NLS-1$ retval.append("</").append(XML_TAG).append(">").append(Const.CR); //$NON-NLS-1$ return retval.toString(); } public JobMeta(LogWriter log, String fname, Repository rep) throws KettleXMLException { this(log, null, fname, rep, null); } public JobMeta(LogWriter log, String fname, Repository rep, OverwritePrompter prompter) throws KettleXMLException { this(log, null, fname, rep, prompter); } /** * Load the job from the XML file specified. * * @param log the logging channel * @param fname The filename to load as a job * @param rep The repository to bind againt, null if there is no repository available. * @throws KettleXMLException */ public JobMeta(LogWriter log, VariableSpace parentSpace, String fname, Repository rep, OverwritePrompter prompter) throws KettleXMLException { this.log = log; this.initializeVariablesFrom(parentSpace); try { // OK, try to load using the VFS stuff... Document doc = XMLHandler.loadXMLFile(KettleVFS.getFileObject(fname)); if (doc != null) { // Clear the job clear(); // The jobnode Node jobnode = XMLHandler.getSubNode(doc, XML_TAG); loadXML(jobnode, rep, prompter); // Do this at the end setFilename(fname); } else { throw new KettleXMLException(Messages.getString("JobMeta.Exception.ErrorReadingFromXMLFile") + fname); //$NON-NLS-1$ } } catch (Exception e) { throw new KettleXMLException(Messages.getString("JobMeta.Exception.UnableToLoadJobFromXMLFile") + fname + "]", e); //$NON-NLS-1$ //$NON-NLS-2$ } } public JobMeta(LogWriter log, Node jobnode, Repository rep, OverwritePrompter prompter) throws KettleXMLException { this.log = log; loadXML(jobnode, rep, prompter); } public boolean isRepReference() { return isRepReference(getFilename(), this.getName()); } public boolean isFileReference() { return !isRepReference(getFilename(), this.getName()); } public static boolean isRepReference(String fileName, String transName) { return Const.isEmpty(fileName) && !Const.isEmpty(transName); } public static boolean isFileReference(String fileName, String transName) { return !isRepReference(fileName, transName); } public void loadXML(Node jobnode, Repository rep, OverwritePrompter prompter ) throws KettleXMLException { Props props = null; if (Props.isInitialized()) props = Props.getInstance(); try { // clear the jobs; clear(); // get job info: name = XMLHandler.getTagValue(jobnode, "name"); //$NON-NLS-1$ // description description = XMLHandler.getTagValue(jobnode, "description"); // extended description extended_description = XMLHandler.getTagValue(jobnode, "extended_description"); // job version job_version = XMLHandler.getTagValue(jobnode, "job_version"); // job status job_status = Const.toInt(XMLHandler.getTagValue(jobnode, "job_status"),-1); // Created user/date created_user = XMLHandler.getTagValue(jobnode, "created_user"); //$NON-NLS-1$ String createDate = XMLHandler.getTagValue(jobnode, "created_date"); //$NON-NLS-1$ if (createDate != null) { created_date = XMLHandler.stringToDate(createDate); } // Changed user/date modifiedUser = XMLHandler.getTagValue(jobnode, "modified_user"); //$NON-NLS-1$ String modDate = XMLHandler.getTagValue(jobnode, "modified_date"); //$NON-NLS-1$ if (modDate != null) { modifiedDate = XMLHandler.stringToDate(modDate); } // Load the default list of databases // Read objects from the shared XML file & the repository try { sharedObjectsFile = XMLHandler.getTagValue(jobnode, "shared_objects_file"); //$NON-NLS-1$ //$NON-NLS-2$ readSharedObjects(rep); } catch(Exception e) { LogWriter.getInstance().logError(toString(), Messages.getString("JobMeta.ErrorReadingSharedObjects.Message", e.toString())); // $NON-NLS-1$ //$NON-NLS-1$ LogWriter.getInstance().logError(toString(), Const.getStackTracker(e)); } // Read the database connections int nr = XMLHandler.countNodes(jobnode, "connection"); //$NON-NLS-1$ for (int i = 0; i < nr; i++) { Node dbnode = XMLHandler.getSubNodeByNr(jobnode, "connection", i); //$NON-NLS-1$ DatabaseMeta dbcon = new DatabaseMeta(dbnode); DatabaseMeta exist = findDatabase(dbcon.getName()); if (exist == null) { addDatabase(dbcon); } else { boolean askOverwrite = Props.isInitialized() ? props.askAboutReplacingDatabaseConnections() : false; boolean overwrite = Props.isInitialized() ? props.replaceExistingDatabaseConnections() : true; if (askOverwrite && prompter != null) { overwrite = prompter.overwritePrompt( Messages.getString("JobMeta.Dialog.ConnectionExistsOverWrite.Message", dbcon.getName() ), Messages.getString("JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"), Props.STRING_ASK_ABOUT_REPLACING_DATABASES); } if (overwrite) { int idx = indexOfDatabase(exist); removeDatabase(idx); addDatabase(idx, dbcon); } } } // Read the slave servers... Node slaveServersNode = XMLHandler.getSubNode(jobnode, XML_TAG_SLAVESERVERS); //$NON-NLS-1$ int nrSlaveServers = XMLHandler.countNodes(slaveServersNode, SlaveServer.XML_TAG); //$NON-NLS-1$ for (int i = 0 ; i < nrSlaveServers ; i++) { Node slaveServerNode = XMLHandler.getSubNodeByNr(slaveServersNode, SlaveServer.XML_TAG, i); SlaveServer slaveServer = new SlaveServer(slaveServerNode); // Check if the object exists and if it's a shared object. // If so, then we will keep the shared version, not this one. // The stored XML is only for backup purposes. SlaveServer check = findSlaveServer(slaveServer.getName()); if (check!=null) { if (!check.isShared()) // we don't overwrite shared objects. { addOrReplaceSlaveServer(slaveServer); } } else { slaveServers.add(slaveServer); } } /* * Get the log database connection & log table */ String logcon = XMLHandler.getTagValue(jobnode, "logconnection"); //$NON-NLS-1$ logconnection = findDatabase(logcon); logTable = XMLHandler.getTagValue(jobnode, "logtable"); //$NON-NLS-1$ useBatchId = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_batchid")); //$NON-NLS-1$ //$NON-NLS-2$ batchIdPassed = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "pass_batchid")); //$NON-NLS-1$ //$NON-NLS-2$ logfieldUsed = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_logfield")); //$NON-NLS-1$ //$NON-NLS-2$ /* * read the job entries... */ Node entriesnode = XMLHandler.getSubNode(jobnode, "entries"); //$NON-NLS-1$ int tr = XMLHandler.countNodes(entriesnode, "entry"); //$NON-NLS-1$ for (int i = 0; i < tr; i++) { Node entrynode = XMLHandler.getSubNodeByNr(entriesnode, "entry", i); //$NON-NLS-1$ // System.out.println("Reading entry:\n"+entrynode); JobEntryCopy je = new JobEntryCopy(entrynode, databases, slaveServers, rep); JobEntryCopy prev = findJobEntry(je.getName(), 0, true); if (prev != null) { if (je.getNr() == 0) // See if the #0 already exists! { // Replace previous version with this one: remove it first int idx = indexOfJobEntry(prev); removeJobEntry(idx); } else if (je.getNr() > 0) // Use previously defined JobEntry info! { je.setEntry(prev.getEntry()); // See if entry already exists... prev = findJobEntry(je.getName(), je.getNr(), true); if (prev != null) // remove the old one! { int idx = indexOfJobEntry(prev); removeJobEntry(idx); } } } // Add the JobEntryCopy... addJobEntry(je); } Node hopsnode = XMLHandler.getSubNode(jobnode, "hops"); //$NON-NLS-1$ int ho = XMLHandler.countNodes(hopsnode, "hop"); //$NON-NLS-1$ for (int i = 0; i < ho; i++) { Node hopnode = XMLHandler.getSubNodeByNr(hopsnode, "hop", i); //$NON-NLS-1$ JobHopMeta hi = new JobHopMeta(hopnode, this); jobhops.add(hi); } // Read the notes... Node notepadsnode = XMLHandler.getSubNode(jobnode, "notepads"); //$NON-NLS-1$ int nrnotes = XMLHandler.countNodes(notepadsnode, "notepad"); //$NON-NLS-1$ for (int i = 0; i < nrnotes; i++) { Node notepadnode = XMLHandler.getSubNodeByNr(notepadsnode, "notepad", i); //$NON-NLS-1$ NotePadMeta ni = new NotePadMeta(notepadnode); notes.add(ni); } clearChanged(); } catch (Exception e) { throw new KettleXMLException(Messages.getString("JobMeta.Exception.UnableToLoadJobFromXMLNode"), e); //$NON-NLS-1$ } finally { setInternalKettleVariables(); } } /** * Read the database connections in the repository and add them to this job if they are not yet present. * * @param rep The repository to load the database connections from. * @throws KettleException */ public void readDatabases(Repository rep) throws KettleException { readDatabases(rep, true); } /* * (non-Javadoc) * * @see org.pentaho.di.trans.HasDatabaseInterface#readDatabases(org.pentaho.di.repository.Repository, boolean) */ public void readDatabases(Repository rep, boolean overWriteShared) throws KettleException { try { long dbids[] = rep.getDatabaseIDs(); for (int i = 0; i < dbids.length; i++) { DatabaseMeta databaseMeta = new DatabaseMeta(rep, dbids[i]); DatabaseMeta check = findDatabase(databaseMeta.getName()); // Check if there already is one in the // transformation if (check == null || overWriteShared) // We only add, never overwrite database connections. { if (databaseMeta.getName() != null) { addOrReplaceDatabase(databaseMeta); if (!overWriteShared) databaseMeta.setChanged(false); } } } setChanged(false); } catch (KettleDatabaseException dbe) { throw new KettleException(Messages.getString("JobMeta.Log.UnableToReadDatabaseIDSFromRepository"), dbe); //$NON-NLS-1$ } catch (KettleException ke) { throw new KettleException(Messages.getString("JobMeta.Log.UnableToReadDatabasesFromRepository"), ke); //$NON-NLS-1$ } } public void readSharedObjects(Repository rep) throws KettleException { // Extract the shared steps, connections, etc. using the SharedObjects class String soFile = environmentSubstitute(sharedObjectsFile); SharedObjects sharedObjects = new SharedObjects(soFile); Map<?, SharedObjectInterface> objectsMap = sharedObjects.getObjectsMap(); // First read the databases... // We read databases & slaves first because there might be dependencies that need to be resolved. for (SharedObjectInterface object : objectsMap.values()) { if (object instanceof DatabaseMeta) { DatabaseMeta databaseMeta = (DatabaseMeta) object; addOrReplaceDatabase(databaseMeta); } else if (object instanceof SlaveServer) { SlaveServer slaveServer = (SlaveServer) object; addOrReplaceSlaveServer(slaveServer); } } if (rep!=null) { readDatabases(rep, true); } } public boolean saveSharedObjects() { try { // First load all the shared objects... String soFile = environmentSubstitute(sharedObjectsFile); SharedObjects sharedObjects = new SharedObjects(soFile); // Now overwrite the objects in there List<Object> shared = new ArrayList<Object>(); shared.addAll(databases); shared.addAll(slaveServers); // The databases connections... for (int i=0;i<shared.size();i++) { SharedObjectInterface sharedObject = (SharedObjectInterface) shared.get(i); if (sharedObject.isShared()) { sharedObjects.storeObject(sharedObject); } } // Save the objects sharedObjects.saveToFile(); return true; } catch(Exception e) { log.logError(toString(), "Unable to save shared ojects: "+e.toString()); return false; } } /** * Find a database connection by it's name * * @param name The database name to look for * @return The database connection or null if nothing was found. */ public DatabaseMeta findDatabase(String name) { for (int i = 0; i < nrDatabases(); i++) { DatabaseMeta ci = getDatabase(i); if (ci.getName().equalsIgnoreCase(name)) { return ci; } } return null; } public void saveRep(Repository rep) throws KettleException { saveRep(rep, null); } public void saveRep(Repository rep, IProgressMonitor monitor) throws KettleException { try { int nrWorks = 2 + nrDatabases() + nrNotes() + nrJobEntries() + nrJobHops(); if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.SavingTransformation") + directory + Const.FILE_SEPARATOR + getName(), nrWorks); //$NON-NLS-1$ rep.lockRepository(); rep.insertLogEntry("save job '"+getName()+"'"); //$NON-NLS-1$ //$NON-NLS-2$ // Before we start, make sure we have a valid job ID! // Two possibilities: // 1) We have a ID: keep it // 2) We don't have an ID: look it up. // If we find a transformation with the same name: ask! if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.HandlingPreviousVersionOfJob")); //$NON-NLS-1$ setID(rep.getJobID(getName(), directory.getID())); // If no valid id is available in the database, assign one... if (getID() <= 0) { setID(rep.getNextJobID()); } else { // If we have a valid ID, we need to make sure everything is cleared out // of the database for this id_job, before we put it back in... rep.delAllFromJob(getID()); } if (monitor != null) monitor.worked(1); // Now, save the job entry in R_JOB // Note, we save this first so that we have an ID in the database. // Everything else depends on this ID, including recursive job entries to the save job. (retry) if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.SavingJobDetails")); //$NON-NLS-1$ log.logDetailed(toString(), "Saving job info to repository..."); //$NON-NLS-1$ saveRepJob(rep); if (monitor != null) monitor.worked(1); // Save the slaves for (int i=0;i<slaveServers.size();i++) { SlaveServer slaveServer = slaveServers.get(i); slaveServer.saveRep(rep, getID(), false); } // Save the notes log.logDetailed(toString(), "Saving notes to repository..."); //$NON-NLS-1$ for (int i = 0; i < nrNotes(); i++) { if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.SavingNoteNr") + (i + 1) + "/" + nrNotes()); //$NON-NLS-1$ //$NON-NLS-2$ NotePadMeta ni = getNote(i); ni.saveRep(rep, getID()); if (ni.getID() > 0) { rep.insertJobNote(getID(), ni.getID()); } if (monitor != null) monitor.worked(1); } // Save the job entries log.logDetailed(toString(), "Saving " + nrJobEntries() + " Job enty copies to repository..."); //$NON-NLS-1$ //$NON-NLS-2$ rep.updateJobEntryTypes(); for (int i = 0; i < nrJobEntries(); i++) { if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.SavingJobEntryNr") + (i + 1) + "/" + nrJobEntries()); //$NON-NLS-1$ //$NON-NLS-2$ JobEntryCopy cge = getJobEntry(i); cge.saveRep(rep, getID()); if (monitor != null) monitor.worked(1); } log.logDetailed(toString(), "Saving job hops to repository..."); //$NON-NLS-1$ for (int i = 0; i < nrJobHops(); i++) { if (monitor != null) monitor.subTask("Saving job hop #" + (i + 1) + "/" + nrJobHops()); //$NON-NLS-1$ //$NON-NLS-2$ JobHopMeta hi = getJobHop(i); hi.saveRep(rep, getID()); if (monitor != null) monitor.worked(1); } // Commit this transaction!! rep.commit(); clearChanged(); if (monitor != null) monitor.done(); } catch (KettleDatabaseException dbe) { rep.rollback(); throw new KettleException(Messages.getString("JobMeta.Exception.UnableToSaveJobInRepositoryRollbackPerformed"), dbe); //$NON-NLS-1$ } finally { // don't forget to unlock the repository. // Normally this is done by the commit / rollback statement, but hey there are some freaky database out // there... rep.unlockRepository(); } } /** * Load a job in a directory * * @param log the logging channel * @param rep The Repository * @param jobname The name of the job * @param repdir The directory in which the job resides. * @throws KettleException */ public JobMeta(LogWriter log, Repository rep, String jobname, RepositoryDirectory repdir) throws KettleException { this(log, rep, jobname, repdir, null); } /** * Load a job in a directory * * @param log the logging channel * @param rep The Repository * @param jobname The name of the job * @param repdir The directory in which the job resides. * @throws KettleException */ public JobMeta(LogWriter log, Repository rep, String jobname, RepositoryDirectory repdir, IProgressMonitor monitor) throws KettleException { this.log = log; try { // Clear everything... clear(); directory = repdir; // Get the transformation id setID(rep.getJobID(jobname, repdir.getID())); // If no valid id is available in the database, then give error... if (getID() > 0) { // Load the notes... long noteids[] = rep.getJobNoteIDs(getID()); long jecids[] = rep.getJobEntryCopyIDs(getID()); long hopid[] = rep.getJobHopIDs(getID()); int nrWork = 2 + noteids.length + jecids.length + hopid.length; if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork); //$NON-NLS-1$ // get job info: if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingJobInformation")); //$NON-NLS-1$ RowMetaAndData jobRow = rep.getJob(getID()); name = jobRow.getString("NAME", null); //$NON-NLS-1$ description = jobRow.getString("DESCRIPTION", null); //$NON-NLS-1$ extended_description = jobRow.getString("EXTENDED_DESCRIPTION", null); //$NON-NLS-1$ job_version = jobRow.getString("JOB_VERSION", null); //$NON-NLS-1$ job_status = Const.toInt(jobRow.getString("JOB_STATUS", null),-1); //$NON-NLS-1$ logTable = jobRow.getString("TABLE_NAME_LOG", null); //$NON-NLS-1$ created_user = jobRow.getString("CREATED_USER", null); //$NON-NLS-1$ created_date = jobRow.getDate("CREATED_DATE", new Date()); //$NON-NLS-1$ modifiedUser = jobRow.getString("MODIFIED_USER", null); //$NON-NLS-1$ modifiedDate = jobRow.getDate("MODIFIED_DATE", new Date()); //$NON-NLS-1$ long id_logdb = jobRow.getInteger("ID_DATABASE_LOG", 0); //$NON-NLS-1$ if (id_logdb > 0) { // Get the logconnection logconnection = new DatabaseMeta(rep, id_logdb); } useBatchId = jobRow.getBoolean("USE_BATCH_ID", false); //$NON-NLS-1$ batchIdPassed = jobRow.getBoolean("PASS_BATCH_ID", false); //$NON-NLS-1$ logfieldUsed = jobRow.getBoolean("USE_LOGFIELD", false); //$NON-NLS-1$ if (monitor != null) monitor.worked(1); // Load the common database connections if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingAvailableDatabasesFromRepository")); //$NON-NLS-1$ // Read objects from the shared XML file & the repository try { sharedObjectsFile = jobRow.getString("SHARED_FILE", null); readSharedObjects(rep); } catch(Exception e) { LogWriter.getInstance().logError(toString(), Messages.getString("JobMeta.ErrorReadingSharedObjects.Message", e.toString())); // $NON-NLS-1$ //$NON-NLS-1$ LogWriter.getInstance().logError(toString(), Const.getStackTracker(e)); } if (monitor != null) monitor.worked(1); log.logDetailed(toString(), "Loading " + noteids.length + " notes"); //$NON-NLS-1$ //$NON-NLS-2$ for (int i = 0; i < noteids.length; i++) { if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length); //$NON-NLS-1$ //$NON-NLS-2$ NotePadMeta ni = new NotePadMeta(log, rep, noteids[i]); if (indexOfNote(ni) < 0) addNote(ni); if (monitor != null) monitor.worked(1); } // Load the job entries... log.logDetailed(toString(), "Loading " + jecids.length + " job entries"); //$NON-NLS-1$ //$NON-NLS-2$ for (int i = 0; i < jecids.length; i++) { if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length)); //$NON-NLS-1$ //$NON-NLS-2$ JobEntryCopy jec = new JobEntryCopy(log, rep, getID(), jecids[i], jobentries, databases, slaveServers); // Also set the copy number... // We count the number of job entry copies that use the job entry int copyNr = 0; for (JobEntryCopy copy : jobcopies) { if (jec.getEntry()==copy.getEntry()) { copyNr++; } } jec.setNr(copyNr); int idx = indexOfJobEntry(jec); if (idx < 0) { if (jec.getName() != null && jec.getName().length() > 0) addJobEntry(jec); } else { setJobEntry(idx, jec); // replace it! } if (monitor != null) monitor.worked(1); } // Load the hops... log.logDetailed(toString(), "Loading " + hopid.length + " job hops"); //$NON-NLS-1$ //$NON-NLS-2$ for (int i = 0; i < hopid.length; i++) { if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length)); //$NON-NLS-1$ //$NON-NLS-2$ JobHopMeta hi = new JobHopMeta(rep, hopid[i], this, jobcopies); jobhops.add(hi); if (monitor != null) monitor.worked(1); } // Finally, clear the changed flags... clearChanged(); if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.FinishedLoadOfJob")); //$NON-NLS-1$ if (monitor != null) monitor.done(); } else { throw new KettleException(Messages.getString("JobMeta.Exception.CanNotFindJob") + jobname); //$NON-NLS-1$ } } catch (KettleException dbe) { throw new KettleException(Messages.getString("JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe); } finally { setInternalKettleVariables(); } } public JobEntryCopy getJobEntryCopy(int x, int y, int iconsize) { int i, s; s = nrJobEntries(); for (i = s - 1; i >= 0; i--) // Back to front because drawing goes from start to end { JobEntryCopy je = getJobEntry(i); Point p = je.getLocation(); if (p != null) { if (x >= p.x && x <= p.x + iconsize && y >= p.y && y <= p.y + iconsize) { return je; } } } return null; } public int nrJobEntries() { return jobcopies.size(); } public int nrJobHops() { return jobhops.size(); } public int nrNotes() { return notes.size(); } public int nrDatabases() { return databases.size(); } public JobHopMeta getJobHop(int i) { return jobhops.get(i); } public JobEntryCopy getJobEntry(int i) { return jobcopies.get(i); } public NotePadMeta getNote(int i) { return notes.get(i); } public DatabaseMeta getDatabase(int i) { return databases.get(i); } public void addJobEntry(JobEntryCopy je) { jobcopies.add(je); setChanged(); } public void addJobHop(JobHopMeta hi) { jobhops.add(hi); setChanged(); } public void addNote(NotePadMeta ni) { notes.add(ni); setChanged(); } public void addDatabase(DatabaseMeta ci) { databases.add(ci); changed_databases = true; } public void addJobEntry(int p, JobEntryCopy si) { jobcopies.add(p, si); changed_entries = true; } public void addJobHop(int p, JobHopMeta hi) { jobhops.add(p, hi); changed_hops = true; } public void addNote(int p, NotePadMeta ni) { notes.add(p, ni); changed_notes = true; } public void addDatabase(int p, DatabaseMeta ci) { databases.add(p, ci); changed_databases = true; } /* * (non-Javadoc) * * @see org.pentaho.di.trans.HasDatabaseInterface#addOrReplaceDatabase(org.pentaho.di.core.database.DatabaseMeta) */ public void addOrReplaceDatabase(DatabaseMeta databaseMeta) { int index = databases.indexOf(databaseMeta); if (index < 0) { databases.add(databaseMeta); } else { DatabaseMeta previous = getDatabase(index); previous.replaceMeta(databaseMeta); } changed_databases = true; } /** * Add a new slave server to the transformation if that didn't exist yet. * Otherwise, replace it. * * @param slaveServer The slave server to be added. */ public void addOrReplaceSlaveServer(SlaveServer slaveServer) { int index = slaveServers.indexOf(slaveServer); if (index<0) { slaveServers.add(slaveServer); } else { SlaveServer previous = slaveServers.get(index); previous.replaceMeta(slaveServer); } setChanged(); } public void removeJobEntry(int i) { jobcopies.remove(i); setChanged(); } public void removeJobHop(int i) { jobhops.remove(i); setChanged(); } public void removeNote(int i) { notes.remove(i); setChanged(); } public void raiseNote(int p) { // if valid index and not last index if ((p >=0) && (p < notes.size()-1)) { NotePadMeta note = notes.remove(p); notes.add(note); changed_notes = true; } } public void lowerNote(int p) { // if valid index and not first index if ((p >0) && (p < notes.size())) { NotePadMeta note = notes.remove(p); notes.add(0, note); changed_notes = true; } } public void removeDatabase(int i) { if (i < 0 || i >= databases.size()) return; databases.remove(i); changed_databases = true; } public int indexOfJobHop(JobHopMeta he) { return jobhops.indexOf(he); } public int indexOfNote(NotePadMeta ni) { return notes.indexOf(ni); } public int indexOfJobEntry(JobEntryCopy ge) { return jobcopies.indexOf(ge); } public int indexOfDatabase(DatabaseMeta di) { return databases.indexOf(di); } public void setJobEntry(int idx, JobEntryCopy jec) { jobcopies.set(idx, jec); } /** * Find an existing JobEntryCopy by it's name and number * * @param name The name of the job entry copy * @param nr The number of the job entry copy * @return The JobEntryCopy or null if nothing was found! */ public JobEntryCopy findJobEntry(String name, int nr, boolean searchHiddenToo) { for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy jec = getJobEntry(i); if (jec.getName().equalsIgnoreCase(name) && jec.getNr() == nr) { if (searchHiddenToo || jec.isDrawn()) { return jec; } } } return null; } public JobEntryCopy findJobEntry(String full_name_nr) { int i; for (i = 0; i < nrJobEntries(); i++) { JobEntryCopy jec = getJobEntry(i); JobEntryInterface je = jec.getEntry(); if (je.toString().equalsIgnoreCase(full_name_nr)) { return jec; } } return null; } public JobHopMeta findJobHop(String name) { for (JobHopMeta hi:jobhops) // Look at all the hops { if (hi.toString().equalsIgnoreCase(name)) { return hi; } } return null; } public JobHopMeta findJobHopFrom(JobEntryCopy jge) { if (jge != null) { for (JobHopMeta hi:jobhops) { if (hi!=null && (hi.from_entry != null) && hi.from_entry.equals(jge)) // return the first { return hi; } } } return null; } public JobHopMeta findJobHop(JobEntryCopy from, JobEntryCopy to) { for (JobHopMeta hi:jobhops) { if (hi.isEnabled()) { if (hi != null && hi.from_entry != null && hi.to_entry != null && hi.from_entry.equals(from) && hi.to_entry.equals(to)) { return hi; } } } return null; } public JobHopMeta findJobHopTo(JobEntryCopy jge) { for (JobHopMeta hi:jobhops) { if (hi != null && hi.to_entry != null && hi.to_entry.equals(jge)) // Return the first! { return hi; } } return null; } public int findNrPrevJobEntries(JobEntryCopy from) { return findNrPrevJobEntries(from, false); } public JobEntryCopy findPrevJobEntry(JobEntryCopy to, int nr) { return findPrevJobEntry(to, nr, false); } public int findNrPrevJobEntries(JobEntryCopy to, boolean info) { int count = 0; for (JobHopMeta hi:jobhops) // Look at all the hops { if (hi.isEnabled() && hi.to_entry.equals(to)) { count++; } } return count; } public JobEntryCopy findPrevJobEntry(JobEntryCopy to, int nr, boolean info) { int count = 0; for (JobHopMeta hi:jobhops) // Look at all the hops { if (hi.isEnabled() && hi.to_entry.equals(to)) { if (count == nr) { return hi.from_entry; } count++; } } return null; } public int findNrNextJobEntries(JobEntryCopy from) { int count = 0; for (JobHopMeta hi:jobhops) // Look at all the hops { if (hi.isEnabled() && (hi.from_entry != null) && hi.from_entry.equals(from)) count++; } return count; } public JobEntryCopy findNextJobEntry(JobEntryCopy from, int cnt) { int count = 0; for (JobHopMeta hi:jobhops) // Look at all the hops { if (hi.isEnabled() && (hi.from_entry != null) && hi.from_entry.equals(from)) { if (count == cnt) { return hi.to_entry; } count++; } } return null; } public boolean hasLoop(JobEntryCopy entry) { return hasLoop(entry, null); } public boolean hasLoop(JobEntryCopy entry, JobEntryCopy lookup) { return false; } public boolean isEntryUsedInHops(JobEntryCopy jge) { JobHopMeta fr = findJobHopFrom(jge); JobHopMeta to = findJobHopTo(jge); if (fr != null || to != null) return true; return false; } public int countEntries(String name) { int count = 0; int i; for (i = 0; i < nrJobEntries(); i++) // Look at all the hops; { JobEntryCopy je = getJobEntry(i); if (je.getName().equalsIgnoreCase(name)) count++; } return count; } public int generateJobEntryNameNr(String basename) { int nr = 1; JobEntryCopy e = findJobEntry(basename + " " + nr, 0, true); //$NON-NLS-1$ while (e != null) { nr++; e = findJobEntry(basename + " " + nr, 0, true); //$NON-NLS-1$ } return nr; } public int findUnusedNr(String name) { int nr = 1; JobEntryCopy je = findJobEntry(name, nr, true); while (je != null) { nr++; // log.logDebug("findUnusedNr()", "Trying unused nr: "+nr); je = findJobEntry(name, nr, true); } return nr; } public int findMaxNr(String name) { int max = 0; for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy je = getJobEntry(i); if (je.getName().equalsIgnoreCase(name)) { if (je.getNr() > max) max = je.getNr(); } } return max; } /** * Proposes an alternative job entry name when the original already exists... * * @param entryname The job entry name to find an alternative for.. * @return The alternative stepname. */ public String getAlternativeJobentryName(String entryname) { String newname = entryname; JobEntryCopy jec = findJobEntry(newname); int nr = 1; while (jec != null) { nr++; newname = entryname + " " + nr; //$NON-NLS-1$ jec = findJobEntry(newname); } return newname; } public JobEntryCopy[] getAllJobGraphEntries(String name) { int count = 0; for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy je = getJobEntry(i); if (je.getName().equalsIgnoreCase(name)) count++; } JobEntryCopy retval[] = new JobEntryCopy[count]; count = 0; for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy je = getJobEntry(i); if (je.getName().equalsIgnoreCase(name)) { retval[count] = je; count++; } } return retval; } public JobHopMeta[] getAllJobHopsUsing(String name) { List<JobHopMeta> hops = new ArrayList<JobHopMeta>(); for (JobHopMeta hi:jobhops) // Look at all the hops { if (hi.from_entry != null && hi.to_entry != null) { if (hi.from_entry.getName().equalsIgnoreCase(name) || hi.to_entry.getName().equalsIgnoreCase(name)) { hops.add(hi); } } } return hops.toArray(new JobHopMeta[hops.size()]); } public NotePadMeta getNote(int x, int y) { int i, s; s = notes.size(); for (i = s - 1; i >= 0; i--) // Back to front because drawing goes from start to end { NotePadMeta ni = notes.get(i); Point loc = ni.getLocation(); Point p = new Point(loc.x, loc.y); if (x >= p.x && x <= p.x + ni.width + 2 * Const.NOTE_MARGIN && y >= p.y && y <= p.y + ni.height + 2 * Const.NOTE_MARGIN) { return ni; } } return null; } public void selectAll() { int i; for (i = 0; i < nrJobEntries(); i++) { JobEntryCopy ce = getJobEntry(i); ce.setSelected(true); } setChanged(); notifyObservers("refreshGraph"); } public void unselectAll() { int i; for (i = 0; i < nrJobEntries(); i++) { JobEntryCopy ce = getJobEntry(i); ce.setSelected(false); } } public int getMaxUndo() { return max_undo; } public void setMaxUndo(int mu) { max_undo = mu; while (undo.size() > mu && undo.size() > 0) undo.remove(0); } public int getUndoSize() { if (undo == null) return 0; return undo.size(); } public void clearUndo() { undo = new ArrayList<TransAction>(); undo_position = -1; } public void addUndo(Object from[], Object to[], int pos[], Point prev[], Point curr[], int type_of_change, boolean nextAlso) { // First clean up after the current position. // Example: position at 3, size=5 // 012345 // remove 34 // Add 4 // 01234 while (undo.size() > undo_position + 1 && undo.size() > 0) { int last = undo.size() - 1; undo.remove(last); } TransAction ta = new TransAction(); switch (type_of_change) { case TYPE_UNDO_CHANGE: ta.setChanged(from, to, pos); break; case TYPE_UNDO_DELETE: ta.setDelete(from, pos); break; case TYPE_UNDO_NEW: ta.setNew(from, pos); break; case TYPE_UNDO_POSITION: ta.setPosition(from, pos, prev, curr); break; } undo.add(ta); undo_position++; if (undo.size() > max_undo) { undo.remove(0); undo_position } } // get previous undo, change position public TransAction previousUndo() { if (undo.isEmpty() || undo_position < 0) return null; // No undo left! TransAction retval = undo.get(undo_position); undo_position return retval; } /** * View current undo, don't change undo position * * @return The current undo transaction */ public TransAction viewThisUndo() { if (undo.isEmpty() || undo_position < 0) return null; // No undo left! TransAction retval = undo.get(undo_position); return retval; } // View previous undo, don't change position public TransAction viewPreviousUndo() { if (undo.isEmpty() || undo_position < 0) return null; // No undo left! TransAction retval = undo.get(undo_position); return retval; } public TransAction nextUndo() { int size = undo.size(); if (size == 0 || undo_position >= size - 1) return null; // no redo left... undo_position++; TransAction retval = undo.get(undo_position); return retval; } public TransAction viewNextUndo() { int size = undo.size(); if (size == 0 || undo_position >= size - 1) return null; // no redo left... TransAction retval = undo.get(undo_position + 1); return retval; } public Point getMaximum() { int maxx = 0, maxy = 0; for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy entry = getJobEntry(i); Point loc = entry.getLocation(); if (loc.x > maxx) maxx = loc.x; if (loc.y > maxy) maxy = loc.y; } for (int i = 0; i < nrNotes(); i++) { NotePadMeta ni = getNote(i); Point loc = ni.getLocation(); if (loc.x + ni.width > maxx) maxx = loc.x + ni.width; if (loc.y + ni.height > maxy) maxy = loc.y + ni.height; } return new Point(maxx + 100, maxy + 100); } public Point[] getSelectedLocations() { int sels = nrSelected(); Point retval[] = new Point[sels]; for (int i = 0; i < sels; i++) { JobEntryCopy si = getSelected(i); Point p = si.getLocation(); retval[i] = new Point(p.x, p.y); // explicit copy of location } return retval; } public JobEntryCopy[] getSelectedEntries() { int sels = nrSelected(); if (sels == 0) return null; JobEntryCopy retval[] = new JobEntryCopy[sels]; for (int i = 0; i < sels; i++) { JobEntryCopy je = getSelected(i); retval[i] = je; } return retval; } public int nrSelected() { int i, count; count = 0; for (i = 0; i < nrJobEntries(); i++) { JobEntryCopy je = getJobEntry(i); if (je.isSelected() && je.isDrawn()) count++; } return count; } public JobEntryCopy getSelected(int nr) { int i, count; count = 0; for (i = 0; i < nrJobEntries(); i++) { JobEntryCopy je = getJobEntry(i); if (je.isSelected()) { if (nr == count) return je; count++; } } return null; } public int[] getEntryIndexes(JobEntryCopy entries[]) { int retval[] = new int[entries.length]; for (int i = 0; i < entries.length; i++) retval[i] = indexOfJobEntry(entries[i]); return retval; } public JobEntryCopy findStart() { for (int i = 0; i < nrJobEntries(); i++) { if (getJobEntry(i).isStart()) return getJobEntry(i); } return null; } public String toString() { if (name != null) return name; if (filename != null) return filename; else return getClass().getName(); } /** * @return Returns the logfieldUsed. */ public boolean isLogfieldUsed() { return logfieldUsed; } /** * @param logfieldUsed The logfieldUsed to set. */ public void setLogfieldUsed(boolean logfieldUsed) { this.logfieldUsed = logfieldUsed; } /** * @return Returns the useBatchId. */ public boolean isBatchIdUsed() { return useBatchId; } /** * @param useBatchId The useBatchId to set. */ public void setUseBatchId(boolean useBatchId) { this.useBatchId = useBatchId; } /** * @return Returns the batchIdPassed. */ public boolean isBatchIdPassed() { return batchIdPassed; } /** * @param batchIdPassed The batchIdPassed to set. */ public void setBatchIdPassed(boolean batchIdPassed) { this.batchIdPassed = batchIdPassed; } /** * Builds a list of all the SQL statements that this transformation needs in order to work properly. * * @return An ArrayList of SQLStatement objects. */ public List<SQLStatement> getSQLStatements(Repository repository, IProgressMonitor monitor) throws KettleException { if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.GettingSQLNeededForThisJob"), nrJobEntries() + 1); //$NON-NLS-1$ List<SQLStatement> stats = new ArrayList<SQLStatement>(); for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy copy = getJobEntry(i); if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.GettingSQLForJobEntryCopy") + copy + "]"); //$NON-NLS-1$ //$NON-NLS-2$ List<SQLStatement> list = copy.getEntry().getSQLStatements(repository, this); stats.addAll(list); if (monitor != null) monitor.worked(1); } // Also check the sql for the logtable... if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.GettingSQLStatementsForJobLogTables")); //$NON-NLS-1$ if (logconnection != null && logTable != null && logTable.length() > 0) { Database db = new Database(logconnection); try { db.connect(); RowMetaInterface fields = Database.getJobLogrecordFields(false, useBatchId, logfieldUsed); String sql = db.getDDL(logTable, fields); if (sql != null && sql.length() > 0) { SQLStatement stat = new SQLStatement(Messages.getString("JobMeta.SQLFeedback.ThisJob"), logconnection, sql); //$NON-NLS-1$ stats.add(stat); } } catch (KettleDatabaseException dbe) { SQLStatement stat = new SQLStatement(Messages.getString("JobMeta.SQLFeedback.ThisJob"), logconnection, null); //$NON-NLS-1$ stat.setError(Messages.getString("JobMeta.SQLFeedback.ErrorObtainingJobLogTableInfo") + dbe.getMessage()); //$NON-NLS-1$ stats.add(stat); } finally { db.disconnect(); } } if (monitor != null) monitor.worked(1); if (monitor != null) monitor.done(); return stats; } /** * @return Returns the logTable. */ public String getLogTable() { return logTable; } /** * @param logTable The logTable to set. */ public void setLogTable(String logTable) { this.logTable = logTable; } /** * @return Returns the arguments. */ public String[] getArguments() { return arguments; } /** * @param arguments The arguments to set. */ public void setArguments(String[] arguments) { this.arguments = arguments; } /** * Get a list of all the strings used in this job. * * @return A list of StringSearchResult with strings used in the job */ public List<StringSearchResult> getStringList(boolean searchSteps, boolean searchDatabases, boolean searchNotes) { List<StringSearchResult> stringList = new ArrayList<StringSearchResult>(); if (searchSteps) { // Loop over all steps in the transformation and see what the used vars are... for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy entryMeta = getJobEntry(i); stringList.add(new StringSearchResult(entryMeta.getName(), entryMeta, this, Messages.getString("JobMeta.SearchMetadata.JobEntryName"))); //$NON-NLS-1$ if (entryMeta.getDescription() != null) stringList.add(new StringSearchResult(entryMeta.getDescription(), entryMeta, this, Messages.getString("JobMeta.SearchMetadata.JobEntryDescription"))); //$NON-NLS-1$ JobEntryInterface metaInterface = entryMeta.getEntry(); StringSearcher.findMetaData(metaInterface, 1, stringList, entryMeta, this); } } // Loop over all steps in the transformation and see what the used vars are... if (searchDatabases) { for (int i = 0; i < nrDatabases(); i++) { DatabaseMeta meta = getDatabase(i); stringList.add(new StringSearchResult(meta.getName(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseConnectionName"))); //$NON-NLS-1$ if (meta.getDatabaseName() != null) stringList.add(new StringSearchResult(meta.getDatabaseName(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseName"))); //$NON-NLS-1$ if (meta.getUsername() != null) stringList.add(new StringSearchResult(meta.getUsername(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseUsername"))); //$NON-NLS-1$ if (meta.getDatabaseTypeDesc() != null) stringList.add(new StringSearchResult(meta.getDatabaseTypeDesc(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseTypeDescription"))); //$NON-NLS-1$ if (meta.getDatabasePortNumberString() != null) stringList.add(new StringSearchResult(meta.getDatabasePortNumberString(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabasePort"))); //$NON-NLS-1$ } } // Loop over all steps in the transformation and see what the used vars are... if (searchNotes) { for (int i = 0; i < nrNotes(); i++) { NotePadMeta meta = getNote(i); if (meta.getNote() != null) stringList.add(new StringSearchResult(meta.getNote(), meta, this, Messages.getString("JobMeta.SearchMetadata.NotepadText"))); //$NON-NLS-1$ } } return stringList; } public List<String> getUsedVariables() { // Get the list of Strings. List<StringSearchResult> stringList = getStringList(true, true, false); List<String> varList = new ArrayList<String>(); // Look around in the strings, see what we find... for (StringSearchResult result : stringList) { StringUtil.getUsedVariables(result.getString(), varList, false); } return varList; } /** * Get an array of all the selected job entries * * @return A list containing all the selected & drawn job entries. */ public List<GUIPositionInterface> getSelectedDrawnJobEntryList() { List<GUIPositionInterface> list = new ArrayList<GUIPositionInterface>(); for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy jobEntryCopy = getJobEntry(i); if (jobEntryCopy.isDrawn() && jobEntryCopy.isSelected()) { list.add( jobEntryCopy); } } return list; } public boolean haveConnectionsChanged() { if (changed_databases) return true; for (int i = 0; i < nrDatabases(); i++) { DatabaseMeta ci = getDatabase(i); if (ci.hasChanged()) return true; } return false; } public boolean haveJobEntriesChanged() { if (changed_entries) return true; for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy entry = getJobEntry(i); if (entry.hasChanged()) return true; } return false; } public boolean haveJobHopsChanged() { if (changed_hops) return true; for (JobHopMeta hi:jobhops) // Look at all the hops { if (hi.hasChanged()) return true; } return false; } public boolean haveNotesChanged() { if (changed_notes) return true; for (int i = 0; i < nrNotes(); i++) { NotePadMeta note = getNote(i); if (note.hasChanged()) return true; } return false; } /** * @return the sharedObjectsFile */ public String getSharedObjectsFile() { return sharedObjectsFile; } /** * @param sharedObjectsFile the sharedObjectsFile to set */ public void setSharedObjectsFile(String sharedObjectsFile) { this.sharedObjectsFile = sharedObjectsFile; } /** * @param modifiedUser The modifiedUser to set. */ public void setModifiedUser(String modified_User) { modifiedUser = modified_User; } /** * @return Returns the modifiedUser. */ public String getModifiedUser() { return modifiedUser; } /** * @param modifiedDate The modifiedDate to set. */ public void setModifiedDate(Date modified_Date) { modifiedDate = modified_Date; } /** * @return Returns the modifiedDate. */ public Date getModifiedDate() { return modifiedDate; } /** * @return The description of the job */ public String getDescription() { return description; } /** * @return The extended description of the job */ public String getExtendedDescription() { return extended_description; } /** * @return The version of the job */ public String getJobversion() { return job_version; } /** * Get the status of the job */ public int getJobstatus() { return job_status; } /** * Set the description of the job. * * @param n The new description of the job */ public void setDescription(String n) { description = n; } /** * Set the description of the job. * * @param n The new extended description of the job */ public void setExtendedDescription(String n) { extended_description = n; } /** * Set the version of the job. * * @param n The new version description of the job */ public void setJobversion(String n) { job_version = n; } /** * Set the status of the job. * * @param n The new status description of the job */ public void setJobstatus(int n) { job_status = n; } /** * @return Returns the createdDate. */ public Date getCreatedDate() { return created_date; } /** * @param createdDate The createdDate to set. */ public void setCreatedDate(Date createddate) { created_date = createddate; } /** * @param createdUser The createdUser to set. */ public void setCreatedUser(String createduser) { created_user = createduser; } /** * @return Returns the createdUser. */ public String getCreatedUser() { return created_user; } /** * Find a jobentry with a certain ID in a list of job entries. * @param jobentries The List of jobentries * @param id_jobentry The id of the jobentry * @return The JobEntry object if one was found, null otherwise. */ public static final JobEntryInterface findJobEntry(List<JobEntryInterface> jobentries, long id_jobentry) { if (jobentries == null) return null; for (JobEntryInterface je : jobentries) { if (je.getID() == id_jobentry) { return je; } } return null; } /** * Find a jobentrycopy with a certain ID in a list of job entry copies. * @param jobcopies The List of jobentry copies * @param id_jobentry_copy The id of the jobentry copy * @return The JobEntryCopy object if one was found, null otherwise. */ public static final JobEntryCopy findJobEntryCopy(List<JobEntryCopy> jobcopies, long id_jobentry_copy) { if (jobcopies == null) return null; for (JobEntryCopy jec : jobcopies) { if (jec.getID() == id_jobentry_copy) { return jec; } } return null; } /** * Calls setInternalKettleVariables on the default object. */ public void setInternalKettleVariables() { setInternalKettleVariables(variables); } /** * This method sets various internal kettle variables that can be used by the transformation. */ public void setInternalKettleVariables(VariableSpace var) { if (filename!=null) // we have a filename that's defined. { try { FileObject fileObject = KettleVFS.getFileObject(filename); FileName fileName = fileObject.getName(); // The filename of the transformation var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, fileName.getBaseName()); // The directory of the transformation FileName fileDir = fileName.getParent(); var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, fileDir.getURI()); } catch(IOException e) { var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, ""); var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, ""); } } else { var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, ""); //$NON-NLS-1$ var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, ""); //$NON-NLS-1$ } // The name of the job var.setVariable(Const.INTERNAL_VARIABLE_JOB_NAME, Const.NVL(name, "")); //$NON-NLS-1$ // The name of the directory in the repository var.setVariable(Const.INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY, directory != null ? directory.getPath() : ""); //$NON-NLS-1$ // Undefine the transformation specific variables: // transformations can't run jobs, so if you use these they are 99.99% wrong. var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, null); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, null); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, null); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, null); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_NAME, null); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY, null); } public void copyVariablesFrom(VariableSpace space) { variables.copyVariablesFrom(space); } public String environmentSubstitute(String aString) { return variables.environmentSubstitute(aString); } public String[] environmentSubstitute(String aString[]) { return variables.environmentSubstitute(aString); } public VariableSpace getParentVariableSpace() { return variables.getParentVariableSpace(); } public void setParentVariableSpace(VariableSpace parent) { variables.setParentVariableSpace(parent); } public String getVariable(String variableName, String defaultValue) { return variables.getVariable(variableName, defaultValue); } public String getVariable(String variableName) { return variables.getVariable(variableName); } public boolean getBooleanValueOfVariable(String variableName, boolean defaultValue) { if (!Const.isEmpty(variableName)) { String value = environmentSubstitute(variableName); if (!Const.isEmpty(value)) { return ValueMeta.convertStringToBoolean(value); } } return defaultValue; } public void initializeVariablesFrom(VariableSpace parent) { variables.initializeVariablesFrom(parent); } public String[] listVariables() { return variables.listVariables(); } public void setVariable(String variableName, String variableValue) { variables.setVariable(variableName, variableValue); } public void shareVariablesWith(VariableSpace space) { variables = space; } public void injectVariables(Map<String,String> prop) { variables.injectVariables(prop); } /** * Check all job entries within the job. Each Job Entry has the opportunity to * check their own settings. * @param remarks List of CheckResult remarks inserted into by each JobEntry * @param only_selected true if you only want to check the selected jobs * @param monitor Progress monitor (not presently in use) */ public void checkJobEntries(List<CheckResultInterface> remarks, boolean only_selected, IProgressMonitor monitor) { remarks.clear(); // Empty remarks if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.VerifyingThisJobEntryTask.Title"), jobcopies.size() + 2); //$NON-NLS-1$ boolean stop_checking = false; for (int i=0; i<jobcopies.size() && !stop_checking; i++) { JobEntryCopy copy = jobcopies.get(i); // get the job entry copy if ( (!only_selected) || (only_selected && copy.isSelected()) ) { JobEntryInterface entry = copy.getEntry(); if (entry != null) { if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.VerifyingJobEntry.Title",entry.getName())); //$NON-NLS-1$ //$NON-NLS-2$ entry.check(remarks, this); if (monitor != null) { monitor.worked(1); // progress bar... if (monitor.isCanceled()) { stop_checking = true; } } } } if (monitor != null) { monitor.worked(1); } } if (monitor != null) { monitor.done(); } } public List<ResourceReference> getResourceDependencies() { List<ResourceReference> resourceReferences = new ArrayList<ResourceReference>(); JobEntryCopy copy = null; JobEntryInterface entry = null; for (int i=0;i<jobcopies.size();i++) { copy = jobcopies.get(i); // get the job entry copy entry = copy.getEntry(); resourceReferences.addAll( entry.getResourceDependencies(this) ); } return resourceReferences; } public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface) throws KettleException { try { FileObject fileObject = KettleVFS.getFileObject(getFilename()); String name = namingInterface.nameResource(fileObject.getName().getBaseName(), fileObject.getParent().getName().getPath(), "kjb"); ResourceDefinition definition = definitions.get(name); if (definition==null) { // If we do this once, it will be plenty :-) JobMeta jobMeta = (JobMeta) this.realClone(false); // Add used resources, modify transMeta accordingly // Go through the list of steps, etc. // These critters change the steps in the cloned TransMeta // At the end we make a new XML version of it in "exported" format... // loop over steps, databases will be exported to XML anyway. for (JobEntryCopy jobEntry: jobMeta.jobcopies) { jobEntry.getEntry().exportResources(jobMeta, definitions, namingInterface); } // At the end, add ourselves to the map... String transMetaContent = jobMeta.getXML(); definition = new ResourceDefinition(name, transMetaContent); definitions.put(fileObject.getName().getPath(), definition); } } catch (FileSystemException e) { throw new KettleException(Messages.getString("JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e); } catch (IOException e) { throw new KettleException(Messages.getString("JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e); } return filename; } /** * @return the slaveServer list */ public List<SlaveServer> getSlaveServers() { return slaveServers; } /** * @param slaveServers the slaveServers to set */ public void setSlaveServers(List<SlaveServer> slaveServers) { this.slaveServers = slaveServers; } /** * Find a slave server using the name * @param serverString the name of the slave server * @return the slave server or null if we couldn't spot an approriate entry. */ public SlaveServer findSlaveServer(String serverString) { return SlaveServer.findSlaveServer(slaveServers, serverString); } /** * @return An array list slave server names */ public String[] getSlaveServerNames() { return SlaveServer.getSlaveServerNames(slaveServers); } /** * See if the name of the supplied job entry copy doesn't collide with any other job entry copy in the job. * @param je The job entry copy to verify the name for. */ public void renameJobEntryIfNameCollides(JobEntryCopy je) { // First see if the name changed. // If so, we need to verify that the name is not already used in the job. String newname = je.getName(); // See if this name exists in the other job entries boolean found; int nr=1; do { found=false; for (JobEntryCopy copy : jobcopies) { if (copy!=je && copy.getName().equalsIgnoreCase(newname) && copy.getNr()==0) found=true; } if (found) { nr++; newname = je.getName()+" ("+nr+")"; } } while(found); // Rename if required. je.setName(newname); } }
package com.emartynov.android.app.urlsetter.android.ui; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import static org.fest.assertions.api.Assertions.assertThat; import static org.robolectric.Robolectric.buildActivity; @Config(emulateSdk = 18, reportSdk = 18) @RunWith( RobolectricTestRunner.class ) public class MainActivityTest { private MainActivity activity; @Before public void setUp() throws Exception { activity = buildActivity( MainActivity.class ).create().get(); } @Test public void finishesAfterStart() throws Exception { assertThat( activity.isFinishing() ).isFalse(); } }
package com.sri.ai.test.grinder.library; import java.util.Collection; import org.junit.Test; import com.sri.ai.brewer.api.Grammar; import com.sri.ai.brewer.core.CommonGrammar; import com.sri.ai.expresso.api.Expression; import com.sri.ai.grinder.api.RewritingProcess; import com.sri.ai.grinder.helper.GrinderUtil; import com.sri.ai.grinder.library.DirectCardinalityComputationFactory; import com.sri.ai.grinder.library.equality.cardinality.direct.CardinalityRewriter; import com.sri.ai.grinder.library.equality.cardinality.direct.core.CardinalityTypeOfLogicalVariable; import com.sri.ai.test.grinder.AbstractGrinderTest; import com.sri.ai.test.grinder.TestData; import com.sri.ai.util.Util; public class SimplifyAndCompleteSimplifyTest extends AbstractGrinderTest { @Override public Grammar makeGrammar() { return new CommonGrammar(); } @Test public void testSimplifyPassesBasicTests() { TestData[] tests = new TestData[] { // Replaces 0*E' by 0 new SimplifyTestData( "0*2", "0"), new SimplifyTestData( "0*p(a)", "0"), new SimplifyTestData( "2*0", "0"), new SimplifyTestData( "p(a)*0", "0"), // Replaces 1*E' by E' new SimplifyTestData( "1*2", "2"), new SimplifyTestData( "1*p(a)", "p(a)"), new SimplifyTestData( "2*1", "2"), new SimplifyTestData( "p(a)*1", "p(a)"), // Replaces 0+E' by E' new SimplifyTestData( "0+2", "2"), new SimplifyTestData( "0+p(a)", "p(a)"), new SimplifyTestData( "2+0", "2"), new SimplifyTestData( "p(a)+0", "p(a)"), // Replaces 0^E' by 0 new SimplifyTestData( "0^2", "0"), new SimplifyTestData( "0^p(a)", "0^p(a)"), // Replace 0^0 by 1 // see: http://en.wikipedia.org/wiki/Exponentiation#Zero_to_the_zero_power // for discussion. new SimplifyTestData( "0^0", "1"), // Replaces E'^0 by 1 new SimplifyTestData( "2^0", "1"), new SimplifyTestData( "p(a)^0", "1"), // Replaces E'^1 by E' new SimplifyTestData( "2^1", "2"), new SimplifyTestData( "p(a)^1", "p(a)"), // Replaces E'/1 by E' new SimplifyTestData( "2/1", "2"), new SimplifyTestData( "p(a)/1", "p(a)"), // Replaces --E' by E' // Test for resolved issue: JIRA ALBP-69 new SimplifyTestData( "--2", "2"), // TODO - need fix for ALBP-68, currently returns --p(a) // new SimplifyTestData( // Replaces E'-0 by E' new SimplifyTestData( "2-0", "2"), new SimplifyTestData( "p(a)-0", "p(a)"), // Replaces 0-E' by -E' // Note: I use (-2) here instead of 2 // as the result is the numeric valued // symbol -2. However, when I parse // "-2" directly I get the function appliction // -(2), which is correct but does not compare // correctly, so using (-2) gets around this. // Test for resolved issue: JIRA ALBP-69 new SimplifyTestData( "0-(-2)", "2"), // Test for resolved issue: JIRA ALBP-69 new SimplifyTestData( "0-p(a)", "-p(a)"), // Replaces false and E' by false new SimplifyTestData( "false and p(a)", "false"), // Replaces true and E' by E' new SimplifyTestData( "true and p(a)", "p(a)"), // Replaces false or E' by E' new SimplifyTestData( "false or p(a)", "p(a)"), // Replaces true or E' by true new SimplifyTestData( "true or p(a)", "true"), // Replaces not not E' by E' new SimplifyTestData( "not not p(a)", "p(a)"), // Replaces not true by false new SimplifyTestData( "not true", "false"), // Replaces not false by true. new SimplifyTestData( "not false", "true"), // Replaces if true then E' else E'' by E' new SimplifyTestData( "if true then p(a) else p(b)", "p(a)"), // Replaces if false then E' else E'' by E'' new SimplifyTestData( "if false then p(a) else p(b)", "p(b)"), // Replaces if C then E' else E' by E' new SimplifyTestData( "if p(b) then p(a) else p(a)", "p(a)"), new SimplifyTestData( "if X = Y then p(a) else p(a)", "p(a)"), // Replaces function applications of numeric operations on // actual numbers by its result new SimplifyTestData( "4+2", "6"), new SimplifyTestData( "4-2", "2"), new SimplifyTestData( "4*2", "8"), new SimplifyTestData( "4/2", "2"), new SimplifyTestData( "4^2", "16"), // Replaces function applications of boolean operations // on actual boolean values by its result // TODO - add tests // Externalizes Conditionals // TODO - add tests // Tests for resolved issue: JIRA ALBP-31 new SimplifyTestData( "{ B, (if A = B then C else D) }", "if A = B then { B, C } else { B, D }"), new SimplifyTestData( "{ if A = B then C else D }", "if A = B then { C } else { D }"), // Tests for resolved issue: JIRA ALBP-51 new SimplifyTestData( "if A = B then if B = A then 1 else 2 else 3", "if A = B then 1 else 3"), // Tests for resolved issue: JIRA ALBP-51 new SimplifyTestData( "if B = A then {C} else if A = B then {D} else {E}", "if B = A then {C} else {E}"), // TODO - test injective functions. // TODO - test exclusive ranges. }; perform(tests); } @Test public void testSimplifyAsDescribedInPapers() { // Tests based on how simplification should work, as described in: // /aic-smf/doc/papers/CP 2012/CP 2012 submitted version with revisions // /aic-smf/doc/papers/CP 2012/StaRAI-12 TestData[] tests = new TestData[] { // Basic: 1. if-then-elses are externalized. new SimplifyTestData( "and(A = a, (if B = b then C = c else C = d), E = e)", "A = a and (B = b and C = c or B != b and C = d) and E = e"), //"B = b and A = a and C = c and E = e or B != b and A = a and C = d and E = e"), // Note: before FromConditionalFormulaToFormula, used to be // if B = b then and(A = a, C = c, E = e) else and(A = a, C = d, E = e) // Basic: 2. Operations on constants are performed. new SimplifyTestData( "2 + 2", "4"), new SimplifyTestData( "not(0 = 1)", "true"), new SimplifyTestData( "0 = 1", "false"), new SimplifyTestData( "a = a", "true"), new SimplifyTestData( "a = b", "false"), new SimplifyTestData( "not(false)", "true"), new SimplifyTestData( "not(true)", "false"), new SimplifyTestData( "and(false, true)", "false"), new SimplifyTestData( "or(false, true)", "true"), new SimplifyTestData( "if true then 1 else 2", "1"), new SimplifyTestData( "if false then 1 else 2", "2"), // Basic: 3. Operations whose results can be defined // by a subset of their arguments equal to certain constants. new SimplifyTestData( "and(false, X = Y)", "false"), new SimplifyTestData( "0 + 5", "5"), new SimplifyTestData( "5 - 0", "5"), new SimplifyTestData( "0 * 5", "0"), new SimplifyTestData( "0 * | X != a |", "0"), new SimplifyTestData( "1 * 5", "5"), new SimplifyTestData( "if true then | X = a | else 2", "| X = a |"), new SimplifyTestData( "if X != a then true else true", "true"), new SimplifyTestData( "if X != a then false else false", "false"), // Basic: 4 equalities and disequalities on formulas new SimplifyTestData( "X = X", "true"), new SimplifyTestData( "X != X", "false"), // Note: as we support normalization (i.e. an ordering) // on equalities and disequalities the following // will simplify. new SimplifyTestData( "(X != x) = (x != X)", "true"), new SimplifyTestData( "(X != x) != (x != X)", "false"), // Basic: 5 conjuncts with equality on different constants new SimplifyTestData( "and(X = a, X = b)", "false"), new SimplifyTestData( "and(X = a, b = X)", "false"), // Basic: 6 conjuncts with equality and inequality on the same term new SimplifyTestData( "and(X = a, X != a)", "false"), new SimplifyTestData( "and(X = a, a != X)", "false"), new SimplifyTestData( "and(X = Y, X != Y)", "false"), new SimplifyTestData( "and(X = Y, Y != X)", "false"), // Basic: 7 transitive equalities results in a contradiction // Note: requires complete simplification. // Note: replacement of transitive equalities in conjuncts, i.e: // and(X != a, X = Y, Y != b) -> and(X != a, X = Y, X != b) // has been decided not to be needed as R_implied_certainty // will pick out if its a contradiction anyway. So the // following case will not simplify. new SimplifyTestData( "and(X != a, X = Y, Y != b)", "and(X != a, X = Y, Y != b)"), // Basic: 8. False if not satisfiable new SimplifyTestData( "true => false", "false"), new SimplifyTestData( "X = X => X != X", "false"), // Basic: 9. True if not falsifiable new SimplifyTestData( "true => true", "true"), new SimplifyTestData( "X = X => X = X", "true"), // Basic: 10. if-then-else, true false sub-formula replacement new SimplifyTestData( "if X = a then if X = b then 1 else 2 else 3", "if X = a then 2 else 3"), new SimplifyTestData( "if X = a then if X != b then 1 else 2 else 3", "if X = a then 1 else 3"), }; perform(tests); } @Test public void testSimplifyNonFormulaConditionalTests() { TestData[] tests = new TestData[] { // Basic: new SimplifyTestData( "+((if not query then 1 else 0) * 2, (if query then 1 else 0) * 3)", "if not query then 2 else 3"), }; perform(tests); } @Test public void testCompleteSimplifyRequired() { TestData[] tests = new TestData[] { // Tests for resolved issue: JIRA ALBP-53 new CompleteSimplifyTestData( "if A = C then {Z1} else if B = C then if A = B then {Z2} else {Z3} else {Z4}", "if A = C then {Z1} else if B = C then {Z3} else {Z4}"), // Basic: 7 transitive equalities results in a contradiction // Note: requires complete simplification. new CompleteSimplifyTestData( "and(X = a, X = Y, Y != a)", "false"), }; perform(tests); } @Test public void testCompleteSimplifyPerformance() { TestData[] tests = new TestData[] { // This is a contradiction new CompleteSimplifyTestDataWithContext( Util.list(parse("X"), parse("Y")), "Y != X and (X = dave and Y = bob or Y = dave and X = bob)", "Y != X and (X = dave and Y = bob or Y = dave and X = bob) and X = dave and Y = bob", "X = dave and Y = bob"), }; perform(tests); } @Test public void testCompleteSimplifyContradictions() { TestData[] tests = new TestData[] { // This is a contradiction new CompleteSimplifyTestData( "X = w7 => not(X0 != Y and X0 != Z and Z != Y and (X0 = w7 and X = Y or X0 = w7 and X = Z))", "true"), // This is the same contradiction just formulated slightly differently new CompleteSimplifyTestData( "not(X != w7) => not(X0 != Y and X0 != Z and Z != Y and (X0 = w7 and X = Y or X0 = w7 and X = Z))", "true"), new CompleteSimplifyTestData( "X = w7 and X0 != Y and X0 != Z and Z != Y and (X0 = w7 and X = Y or X0 = w7 and X = Z)", "false"), new CompleteSimplifyTestData( "not(X != w7) and X0 != Y and X0 != Z and Z != Y and (X0 = w7 and X = Y or X0 = w7 and X = Z)", "false"), }; perform(tests); } @Test public void testCompleteSimplifyUnreachableBranch() { TestData[] tests = new TestData[] { new CompleteSimplifyTestData( "if X = person1 or X = person2 or X = person3 then (if X != person1 and X != person2 and X != person3 then 1 else 2) else 3", "if X = person1 or X = person2 or X = person3 then 2 else 3"), new CompleteSimplifyTestData( "if X != w7 then 1 else (if (X0 != Y and X0 != Z and Z != Y and (X0 = w7 and X = Y or X0 = w7 and X = Z)) then 2 else 3)", "if X != w7 then 1 else 3"), }; perform(tests); } // PRIVATE METHODS class SimplifyTestData extends TestData implements CardinalityTypeOfLogicalVariable.DomainSizeOfLogicalVariable { private String expressionString; private Expression expression; public SimplifyTestData(String expressionString, String expected) { super(false, expected); this.expressionString = expressionString; }; // START-DomainSizeOfLogicalVariable @Override public Integer size(Expression logicalVariable, RewritingProcess process) { return 100; // Default to this } // END-DomainSizeOfLogicalVariable @Override public Expression getTopExpression() { this.expression = parse(expressionString); return expression; } @Override public Expression callRewrite(RewritingProcess process) { // Ensure explicit counts added for all variable domains. CardinalityTypeOfLogicalVariable.registerDomainSizeOfLogicalVariableWithProcess(this, process); Expression result = DirectCardinalityComputationFactory.newCardinalityProcess(expression, process).rewrite(getSimplifyName(), expression); return result; } // PROTECTED protected String getSimplifyName() { return CardinalityRewriter.R_simplify; } }; class CompleteSimplifyTestData extends SimplifyTestData { public CompleteSimplifyTestData(String expressionString, String expected) { super(expressionString, expected); }; @Override protected String getSimplifyName() { return CardinalityRewriter.R_complete_simplify; } }; class CompleteSimplifyTestDataWithContext extends CompleteSimplifyTestData { private Expression context; private Collection<Expression> contextualVariables; public CompleteSimplifyTestDataWithContext(Collection<Expression> contextualVariables, String context, String expressionString, String expected) { super(expressionString, expected); this.contextualVariables = contextualVariables; this.context = parse(context); }; @Override public Expression callRewrite(RewritingProcess process) { process = GrinderUtil.extendContextualVariablesAndConstraint(contextualVariables, context, process); return super.callRewrite(process); } }; }
package org.elasticsearch.watcher.actions.throttler; import com.carrotsearch.randomizedtesting.annotations.Repeat; import org.apache.lucene.util.LuceneTestCase.Slow; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.joda.time.DateTime; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.watcher.actions.Action; import org.elasticsearch.watcher.actions.ActionWrapper; import org.elasticsearch.watcher.actions.email.EmailAction; import org.elasticsearch.watcher.actions.email.service.EmailTemplate; import org.elasticsearch.watcher.actions.index.IndexAction; import org.elasticsearch.watcher.actions.logging.LoggingAction; import org.elasticsearch.watcher.actions.webhook.WebhookAction; import org.elasticsearch.watcher.client.WatchSourceBuilder; import org.elasticsearch.watcher.execution.ActionExecutionMode; import org.elasticsearch.watcher.execution.ExecutionState; import org.elasticsearch.watcher.execution.ManualExecutionContext; import org.elasticsearch.watcher.history.WatchRecord; import org.elasticsearch.watcher.support.clock.SystemClock; import org.elasticsearch.watcher.support.http.HttpRequestTemplate; import org.elasticsearch.watcher.support.template.Template; import org.elasticsearch.watcher.support.xcontent.MapPath; import org.elasticsearch.watcher.test.AbstractWatcherIntegrationTests; import org.elasticsearch.watcher.transport.actions.execute.ExecuteWatchResponse; import org.elasticsearch.watcher.transport.actions.get.GetWatchRequest; import org.elasticsearch.watcher.transport.actions.put.PutWatchRequest; import org.elasticsearch.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.watcher.trigger.manual.ManualTriggerEvent; import org.elasticsearch.watcher.trigger.schedule.IntervalSchedule; import org.elasticsearch.watcher.trigger.schedule.ScheduleTrigger; import org.elasticsearch.watcher.trigger.schedule.ScheduleTriggerEvent; import org.junit.Test; import java.io.IOException; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.joda.time.DateTimeZone.UTC; import static org.elasticsearch.watcher.client.WatchSourceBuilders.watchBuilder; import static org.elasticsearch.watcher.trigger.TriggerBuilders.schedule; import static org.elasticsearch.watcher.trigger.schedule.Schedules.interval; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; public class ActionThrottleTests extends AbstractWatcherIntegrationTests { @Override protected boolean timeWarped() { return true; } @Test @Slow @Repeat(iterations = 10) public void testSingleActionAckThrottle() throws Exception { boolean useClientForAcking = randomBoolean(); WatchSourceBuilder watchSourceBuilder = watchBuilder() .trigger(schedule(interval("60m"))); AvailableAction availableAction = randomFrom(AvailableAction.values()); Action.Builder action = availableAction.action(); watchSourceBuilder.addAction("test_id", action); PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet(); assertThat(putWatchResponse.getVersion(), greaterThan(0L)); refresh(); assertThat(watcherClient().prepareGetWatch("_id").get().isFound(), equalTo(true)); ManualExecutionContext ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS)); WatchRecord watchRecord = executionService().execute(ctx); assertThat(watchRecord.execution().actionsResults().get("test_id").action().status(), equalTo(Action.Result.Status.SIMULATED)); if (timeWarped()) { timeWarp().clock().fastForward(TimeValue.timeValueSeconds(1)); } boolean ack = randomBoolean(); if (ack) { if (useClientForAcking) { watcherClient().prepareAckWatch("_id").setActionIds("test_id").get(); } else { watchService().ackWatch("_id", new String[] { "test_id" }, new TimeValue(5, TimeUnit.SECONDS)); } } ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS)); watchRecord = executionService().execute(ctx); if (ack) { assertThat(watchRecord.execution().actionsResults().get("test_id").action().status(), equalTo(Action.Result.Status.THROTTLED)); } else { assertThat(watchRecord.execution().actionsResults().get("test_id").action().status(), equalTo(Action.Result.Status.SIMULATED)); } } @Test @Slow @Repeat(iterations = 10) public void testRandomMultiActionAckThrottle() throws Exception { boolean useClientForAcking = randomBoolean(); WatchSourceBuilder watchSourceBuilder = watchBuilder() .trigger(schedule(interval("60m"))); Set<String> ackingActions = new HashSet<>(); for (int i = 0; i < scaledRandomIntBetween(5,10); ++i) { AvailableAction availableAction = randomFrom(AvailableAction.values()); Action.Builder action = availableAction.action(); watchSourceBuilder.addAction("test_id" + i, action); if (randomBoolean()) { ackingActions.add("test_id" + i); } } PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet(); assertThat(putWatchResponse.getVersion(), greaterThan(0L)); refresh(); assertThat(watcherClient().getWatch(new GetWatchRequest("_id")).actionGet().isFound(), equalTo(true)); ManualExecutionContext ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS)); executionService().execute(ctx); for (String actionId : ackingActions) { if (useClientForAcking) { watcherClient().prepareAckWatch("_id").setActionIds(actionId).get(); } else { watchService().ackWatch("_id", new String[]{actionId}, new TimeValue(5, TimeUnit.SECONDS)); } } if (timeWarped()) { timeWarp().clock().fastForwardSeconds(5); } ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS)); WatchRecord watchRecord = executionService().execute(ctx); for (ActionWrapper.Result result : watchRecord.execution().actionsResults()) { if (ackingActions.contains(result.id())) { assertThat(result.action().status(), equalTo(Action.Result.Status.THROTTLED)); } else { assertThat(result.action().status(), equalTo(Action.Result.Status.SIMULATED)); } } } @Test @Slow public void testDifferentThrottlePeriods() throws Exception { WatchSourceBuilder watchSourceBuilder = watchBuilder() .trigger(schedule(interval("60m"))); watchSourceBuilder.addAction("ten_sec_throttle", new TimeValue(10, TimeUnit.SECONDS), randomFrom(AvailableAction.values()).action()); watchSourceBuilder.addAction("fifteen_sec_throttle", new TimeValue(15, TimeUnit.SECONDS), randomFrom(AvailableAction.values()).action()); PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet(); assertThat(putWatchResponse.getVersion(), greaterThan(0L)); refresh(); assertThat(watcherClient().getWatch(new GetWatchRequest("_id")).actionGet().isFound(), equalTo(true)); if (timeWarped()) { timeWarp().clock().setTime(new DateTime(UTC)); } ManualExecutionContext ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS)); WatchRecord watchRecord = executionService().execute(ctx); long firstExecution = System.currentTimeMillis(); for(ActionWrapper.Result actionResult : watchRecord.execution().actionsResults()) { assertThat(actionResult.action().status(), equalTo(Action.Result.Status.SIMULATED)); } ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS)); watchRecord = executionService().execute(ctx); for(ActionWrapper.Result actionResult : watchRecord.execution().actionsResults()) { assertThat(actionResult.action().status(), equalTo(Action.Result.Status.THROTTLED)); } if (timeWarped()) { timeWarp().clock().fastForwardSeconds(11); } assertBusy(new Runnable() { @Override public void run() { try { ManualExecutionContext ctx = getManualExecutionContext(new TimeValue(0, TimeUnit.SECONDS)); WatchRecord watchRecord = executionService().execute(ctx); for (ActionWrapper.Result actionResult : watchRecord.execution().actionsResults()) { if ("ten_sec_throttle".equals(actionResult.id())) { assertThat(actionResult.action().status(), equalTo(Action.Result.Status.SIMULATED)); } else { assertThat(actionResult.action().status(), equalTo(Action.Result.Status.THROTTLED)); } } } catch (IOException ioe) { throw new ElasticsearchException("failed to execute", ioe); } } }, 11000 - (System.currentTimeMillis() - firstExecution), TimeUnit.MILLISECONDS); } @Test @Slow public void testDefaultThrottlePeriod() throws Exception { WatchSourceBuilder watchSourceBuilder = watchBuilder() .trigger(schedule(interval("60m"))); AvailableAction availableAction = randomFrom(AvailableAction.values()); final String actionType = availableAction.type(); watchSourceBuilder.addAction("default_global_throttle", availableAction.action()); PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet(); assertThat(putWatchResponse.getVersion(), greaterThan(0L)); refresh(); if (timeWarped()) { timeWarp().clock().setTime(new DateTime(UTC)); } ExecuteWatchResponse executeWatchResponse = watcherClient().prepareExecuteWatch("_id") .setTriggerEvent(new ManualTriggerEvent("execute_id", new ScheduleTriggerEvent(new DateTime(UTC), new DateTime(UTC)))) .setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE) .setRecordExecution(true) .get(); Map<String, Object> watchRecordMap = executeWatchResponse.getRecordSource().getAsMap(); Object resultStatus = getExecutionStatus(watchRecordMap); assertThat(resultStatus.toString(), equalTo("simulated")); if (timeWarped()) { timeWarp().clock().fastForwardSeconds(1); } executeWatchResponse = watcherClient().prepareExecuteWatch("_id") .setTriggerEvent(new ManualTriggerEvent("execute_id", new ScheduleTriggerEvent(new DateTime(UTC), new DateTime(UTC)))) .setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE) .setRecordExecution(true) .get(); watchRecordMap = executeWatchResponse.getRecordSource().getAsMap(); resultStatus = getExecutionStatus(watchRecordMap); assertThat(resultStatus.toString(), equalTo("throttled")); if (timeWarped()) { timeWarp().clock().fastForwardSeconds(5); } assertBusy(new Runnable() { @Override public void run() { try { ExecuteWatchResponse executeWatchResponse = watcherClient().prepareExecuteWatch("_id") .setTriggerEvent(new ManualTriggerEvent("execute_id", new ScheduleTriggerEvent(new DateTime(UTC), new DateTime(UTC)))) .setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE) .setRecordExecution(true) .get(); Map<String, Object> watchRecordMap = executeWatchResponse.getRecordSource().getAsMap(); Object resultStatus = getExecutionStatus(watchRecordMap); assertThat(resultStatus.toString(), equalTo("simulated")); } catch (IOException ioe) { throw new ElasticsearchException("failed to execute", ioe); } } }, 6, TimeUnit.SECONDS); } @Test @Slow public void testWatchThrottlePeriod() throws Exception { WatchSourceBuilder watchSourceBuilder = watchBuilder() .trigger(schedule(interval("60m"))) .defaultThrottlePeriod(new TimeValue(1, TimeUnit.SECONDS)); AvailableAction availableAction = randomFrom(AvailableAction.values()); final String actionType = availableAction.type(); watchSourceBuilder.addAction("default_global_throttle", availableAction.action()); PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet(); assertThat(putWatchResponse.getVersion(), greaterThan(0L)); refresh(); if (timeWarped()) { timeWarp().clock().setTime(new DateTime(UTC)); } ExecuteWatchResponse executeWatchResponse = watcherClient().prepareExecuteWatch("_id") .setTriggerEvent(new ManualTriggerEvent("execute_id", new ScheduleTriggerEvent(new DateTime(UTC), new DateTime(UTC)))) .setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE) .setRecordExecution(true) .get(); Map<String, Object> watchRecordMap = executeWatchResponse.getRecordSource().getAsMap(); Object resultStatus = getExecutionStatus(watchRecordMap); assertThat(resultStatus.toString(), equalTo("simulated")); if (timeWarped()) { timeWarp().clock().fastForwardSeconds(1); } executeWatchResponse = watcherClient().prepareExecuteWatch("_id") .setTriggerEvent(new ManualTriggerEvent("execute_id", new ScheduleTriggerEvent(new DateTime(UTC), new DateTime(UTC)))) .setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE) .setRecordExecution(true) .get(); watchRecordMap = executeWatchResponse.getRecordSource().getAsMap(); resultStatus = getExecutionStatus(watchRecordMap); assertThat(resultStatus.toString(), equalTo("throttled")); if (timeWarped()) { timeWarp().clock().fastForwardSeconds(1); } assertBusy(new Runnable() { @Override public void run() { try { //Since the default throttle period is 5 seconds but we have overridden the period in the watch this should trigger ExecuteWatchResponse executeWatchResponse = watcherClient().prepareExecuteWatch("_id") .setTriggerEvent(new ManualTriggerEvent("execute_id", new ScheduleTriggerEvent(new DateTime(UTC), new DateTime(UTC)))) .setActionMode("default_global_throttle", ActionExecutionMode.SIMULATE) .setRecordExecution(true) .get(); Map<String, Object> watchRecordMap = executeWatchResponse.getRecordSource().getAsMap(); Object resultStatus = getExecutionStatus(watchRecordMap); assertThat(resultStatus.toString(), equalTo("simulated")); } catch (IOException ioe) { throw new ElasticsearchException("failed to execute", ioe); } } }, 1, TimeUnit.SECONDS); } @Test @Slow public void testFailingActionDoesGetThrottled() throws Exception { TimeValue throttlePeriod = new TimeValue(60, TimeUnit.MINUTES); WatchSourceBuilder watchSourceBuilder = watchBuilder() .trigger(new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(60, IntervalSchedule.Interval.Unit.MINUTES)))) .defaultThrottlePeriod(throttlePeriod); watchSourceBuilder.addAction("logging", LoggingAction.builder(new Template.Builder.Inline("test out").build())); watchSourceBuilder.addAction("failing_hook", WebhookAction.builder(HttpRequestTemplate.builder("unknown.foo", 80).build())); PutWatchResponse putWatchResponse = watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder)).actionGet(); assertThat(putWatchResponse.getVersion(), greaterThan(0L)); refresh(); ManualTriggerEvent triggerEvent = new ManualTriggerEvent("_id", new ScheduleTriggerEvent(new DateTime(UTC), new DateTime(UTC))); ManualExecutionContext.Builder ctxBuilder = ManualExecutionContext.builder(watchService().getWatch("_id"), triggerEvent, throttlePeriod); ctxBuilder.recordExecution(true); ManualExecutionContext ctx = ctxBuilder.build(); WatchRecord watchRecord = executionService().execute(ctx); assertThat(watchRecord.execution().actionsResults().get("logging").action().status(), equalTo(Action.Result.Status.SUCCESS)); assertThat(watchRecord.execution().actionsResults().get("failing_hook").action().status(), equalTo(Action.Result.Status.FAILURE)); assertThat(watchRecord.state(), equalTo(ExecutionState.EXECUTED)); triggerEvent = new ManualTriggerEvent("_id", new ScheduleTriggerEvent(new DateTime(UTC), new DateTime(UTC))); ctxBuilder = ManualExecutionContext.builder(watchService().getWatch("_id"), triggerEvent, throttlePeriod); ctxBuilder.recordExecution(true); ctx = ctxBuilder.build(); watchRecord = executionService().execute(ctx); assertThat(watchRecord.execution().actionsResults().get("logging").action().status(), equalTo(Action.Result.Status.THROTTLED)); assertThat(watchRecord.execution().actionsResults().get("failing_hook").action().status(), equalTo(Action.Result.Status.FAILURE)); assertThat(watchRecord.state(), equalTo(ExecutionState.THROTTLED)); } private String getExecutionStatus(Map<String, Object> watchRecordMap) { return MapPath.eval("result.actions.0.status", watchRecordMap); } private ManualExecutionContext getManualExecutionContext(TimeValue throttlePeriod) { ManualTriggerEvent triggerEvent = new ManualTriggerEvent("_id", new ScheduleTriggerEvent(new DateTime(UTC), new DateTime(UTC))); return ManualExecutionContext.builder(watchService().getWatch("_id"), triggerEvent, throttlePeriod) .executionTime(timeWarped() ? timeWarp().clock().nowUTC() : SystemClock.INSTANCE.nowUTC()) .allActionsMode(ActionExecutionMode.SIMULATE) .recordExecution(true) .build(); } enum AvailableAction { EMAIL { @Override public Action.Builder action() throws Exception { EmailTemplate.Builder emailBuilder = EmailTemplate.builder(); emailBuilder.from("test@test.com"); emailBuilder.to("test@test.com"); emailBuilder.subject("test subject"); return EmailAction.builder(emailBuilder.build()); } @Override public String type() { return EmailAction.TYPE; } }, WEBHOOK { @Override public Action.Builder action() throws Exception { HttpRequestTemplate.Builder requestBuilder = HttpRequestTemplate.builder("foo.bar.com", 1234); return WebhookAction.builder(requestBuilder.build()); } @Override public String type() { return WebhookAction.TYPE; } }, LOGGING { @Override public Action.Builder action() throws Exception { Template.Builder templateBuilder = new Template.Builder.Inline("{{ctx.watch_id}}"); return LoggingAction.builder(templateBuilder.build()); } @Override public String type() { return LoggingAction.TYPE; } }, INDEX { @Override public Action.Builder action() throws Exception { return IndexAction.builder("test_index", "test_type"); } @Override public String type() { return IndexAction.TYPE; } }; public abstract Action.Builder action() throws Exception; public abstract String type(); } }
package net.kiberion.swampmachine.jruby; import static org.junit.Assert.*; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.Path; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; import org.junit.Test; import org.junit.Ignore; import org.jruby.embed.jsr223.JRubyEngineFactory; import net.kiberion.swampmachine.factories.ScriptEntityFactory; import net.kiberion.swampmachine.scripting.AbstractScriptTest; import net.kiberion.utils.FilePathUtils; public class TestRuby extends AbstractScriptTest{ private final RubyEntityFactory factory = new RubyEntityFactory(); @Ignore("Fails due to jython bug (http://bugs.jython.org/issue2502)") @Test @Override public void testThreadSafety() { super.testThreadSafety(); } @Ignore("Fails due to jython bug (http://bugs.jython.org/issue2502)") @Test @Override public void testInvokeScript() { super.testInvokeScript(); } @Override protected ScriptEntityFactory getEntityFactory() { return factory; } @Override protected Path getPathToTestResourcres() { return FilePathUtils.getResourceRootPath(TestRuby.class, "test.rb"); } public void simpleTest () { ClassLoader cl = Thread.currentThread().getContextClassLoader(); // make sure we have classloader which does not find jruby ClassLoader c = new URLClassLoader( new URL[] {}, null ); try { c.loadClass( "org.jruby.embed.ScriptingContainer" ); fail( "this classloader shall not find jruby" ); } catch( ClassNotFoundException expected){} // set it as context classloader Thread.currentThread().setContextClassLoader( c ); ScriptEngineManager m = new ScriptEngineManager(); m.registerEngineName( "jruby", new JRubyEngineFactory() ); ScriptEngine jruby = m.getEngineByName("jruby"); try { String result = jruby.eval("$LOAD_PATH" ).toString(); } catch (ScriptException e1) { e1.printStackTrace(); } } }
package org.cytoscape.internal.view; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.WeakHashMap; import javax.swing.BorderFactory; import javax.swing.InputMap; import javax.swing.JMenuItem; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTree; import javax.swing.KeyStroke; import javax.swing.ListSelectionModel; import javax.swing.SwingUtilities; import javax.swing.ToolTipManager; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultTreeSelectionModel; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import org.cytoscape.application.CyApplicationManager; import org.cytoscape.application.events.SetSelectedNetworksEvent; import org.cytoscape.application.events.SetSelectedNetworksListener; import org.cytoscape.application.swing.CyAction; import org.cytoscape.internal.task.TaskFactoryTunableAction; import org.cytoscape.model.CyNetwork; import org.cytoscape.model.CyNetworkManager; import org.cytoscape.model.CyRow; import org.cytoscape.model.CyTable; import org.cytoscape.model.events.NetworkAboutToBeDestroyedEvent; import org.cytoscape.model.events.NetworkAboutToBeDestroyedListener; import org.cytoscape.model.events.NetworkAddedEvent; import org.cytoscape.model.events.NetworkAddedListener; import org.cytoscape.model.events.RowSetRecord; import org.cytoscape.model.events.RowsSetEvent; import org.cytoscape.model.events.RowsSetListener; import org.cytoscape.model.subnetwork.CyRootNetwork; import org.cytoscape.model.subnetwork.CySubNetwork; import org.cytoscape.task.DynamicTaskFactoryProvisioner; import org.cytoscape.task.NetworkCollectionTaskFactory; import org.cytoscape.task.NetworkTaskFactory; import org.cytoscape.task.NetworkViewCollectionTaskFactory; import org.cytoscape.task.NetworkViewTaskFactory; import org.cytoscape.task.edit.EditNetworkTitleTaskFactory; import org.cytoscape.util.swing.JTreeTable; import org.cytoscape.view.model.CyNetworkView; import org.cytoscape.view.model.CyNetworkViewManager; import org.cytoscape.view.model.events.NetworkViewAboutToBeDestroyedEvent; import org.cytoscape.view.model.events.NetworkViewAboutToBeDestroyedListener; import org.cytoscape.view.model.events.NetworkViewAddedEvent; import org.cytoscape.view.model.events.NetworkViewAddedListener; import org.cytoscape.work.ServiceProperties; import org.cytoscape.work.TaskFactory; import org.cytoscape.work.swing.DialogTaskManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class NetworkPanel extends JPanel implements TreeSelectionListener, SetSelectedNetworksListener, NetworkAddedListener, NetworkViewAddedListener, NetworkAboutToBeDestroyedListener, NetworkViewAboutToBeDestroyedListener, RowsSetListener { private final static long serialVersionUID = 1213748836763243L; private static final Logger logger = LoggerFactory.getLogger(NetworkPanel.class); static final Color FONT_COLOR = new Color(20, 20, 20); private static final int TABLE_ROW_HEIGHT = 16; private static final Dimension PANEL_SIZE = new Dimension(400, 700); private final JTreeTable treeTable; private final NetworkTreeNode root; private JPanel navigatorPanel; private JSplitPane split; private final NetworkTreeTableModel treeTableModel; private final CyApplicationManager appMgr; final CyNetworkManager netMgr; final CyNetworkViewManager netViewMgr; private final DialogTaskManager taskMgr; private final DynamicTaskFactoryProvisioner factoryProvisioner; private final JPopupMenu popup; private JMenuItem editRootNetworTitle; private final Map<TaskFactory, JMenuItem> popupMap; private final Map<TaskFactory, CyAction> popupActions; private final Map<CyTable, CyNetwork> nameTables; private final Map<CyTable, CyNetwork> nodeEdgeTables; private final Map<Long, NetworkTreeNode> treeNodeMap; private final Map<Object, TaskFactory> provisionerMap; private HashMap<JMenuItem, Double> actionGravityMap; private final Map<CyNetwork, NetworkTreeNode> network2nodeMap; private boolean ignoreTreeSelectionEvents; private final EditNetworkTitleTaskFactory rootNetworkTitleEditor; private final JPopupMenu rootPopupMenu; private CyRootNetwork selectedRoot; private Set<CyRootNetwork> selectedRootSet; /** * * @param appMgr * @param netMgr * @param netViewMgr * @param bird * @param taskMgr */ public NetworkPanel(final CyApplicationManager appMgr, final CyNetworkManager netMgr, final CyNetworkViewManager netViewMgr, final BirdsEyeViewHandler bird, final DialogTaskManager taskMgr, final DynamicTaskFactoryProvisioner factoryProvisioner, final EditNetworkTitleTaskFactory networkTitleEditor) { super(); this.treeNodeMap = new HashMap<Long, NetworkTreeNode>(); this.provisionerMap = new HashMap<Object, TaskFactory>(); this.appMgr = appMgr; this.netMgr = netMgr; this.netViewMgr = netViewMgr; this.taskMgr = taskMgr; this.factoryProvisioner = factoryProvisioner; root = new NetworkTreeNode("Network Root", null); treeTableModel = new NetworkTreeTableModel(this, root); treeTable = new JTreeTable(treeTableModel); initialize(); this.actionGravityMap = new HashMap<JMenuItem, Double>(); // create and populate the popup window popup = new JPopupMenu(); popupMap = new WeakHashMap<TaskFactory, JMenuItem>(); popupActions = new WeakHashMap<TaskFactory, CyAction>(); nameTables = new WeakHashMap<CyTable, CyNetwork>(); nodeEdgeTables = new WeakHashMap<CyTable, CyNetwork>(); this.network2nodeMap = new WeakHashMap<CyNetwork, NetworkTreeNode>(); setNavigator(bird.getBirdsEyeView()); /* * Remove CTR-A for enabling select all function in the main window. */ for (KeyStroke listener : treeTable.getRegisteredKeyStrokes()) { if (listener.toString().equals("ctrl pressed A")) { final InputMap map = treeTable.getInputMap(); map.remove(listener); treeTable.setInputMap(WHEN_FOCUSED, map); treeTable.setInputMap(WHEN_ANCESTOR_OF_FOCUSED_COMPONENT, map); } } this.rootNetworkTitleEditor = networkTitleEditor; rootPopupMenu = new JPopupMenu(); editRootNetworTitle = new JMenuItem("Rename Network Collection..."); editRootNetworTitle.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { taskMgr.execute(rootNetworkTitleEditor.createTaskIterator(selectedRoot)); } }); rootPopupMenu.add(editRootNetworTitle); JMenuItem selectAllSubNetsMenuItem = new JMenuItem("Select All Networks"); selectAllSubNetsMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { selectAllSubnetwork(); } }); rootPopupMenu.add(selectAllSubNetsMenuItem); } protected void initialize() { setLayout(new BorderLayout()); setPreferredSize(PANEL_SIZE); setSize(PANEL_SIZE); treeTable.getTree().addTreeSelectionListener(this); treeTable.getTree().setRootVisible(false); ToolTipManager.sharedInstance().registerComponent(treeTable); treeTable.getTree().setCellRenderer(new TreeCellRenderer(treeTable)); treeTable.setBackground(Color.white); treeTable.setSelectionBackground(new Color(200, 200, 200, 150)); treeTable.getColumn("Network").setPreferredWidth(250); treeTable.getColumn("Nodes").setPreferredWidth(45); treeTable.getColumn("Edges").setPreferredWidth(45); treeTable.setBackground(Color.WHITE); treeTable.setRowHeight(TABLE_ROW_HEIGHT); treeTable.setForeground(FONT_COLOR); treeTable.setSelectionForeground(FONT_COLOR); treeTable.setCellSelectionEnabled(false); treeTable.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); treeTable.getTree().setSelectionModel(new DefaultTreeSelectionModel()); navigatorPanel = new JPanel(); navigatorPanel.setLayout(new BorderLayout()); navigatorPanel.setPreferredSize(new Dimension(280, 280)); navigatorPanel.setSize(new Dimension(280, 280)); navigatorPanel.setBackground(Color.white); JScrollPane scroll = new JScrollPane(treeTable); split = new JSplitPane(JSplitPane.VERTICAL_SPLIT, scroll, navigatorPanel); split.setBorder(BorderFactory.createEmptyBorder()); split.setResizeWeight(1); split.setDividerLocation(400); add(split); // this mouse listener listens for the right-click event and will show // the pop-up window when that occurrs treeTable.addMouseListener(new PopupListener()); } public void addTaskFactory(TaskFactory factory, @SuppressWarnings("rawtypes") Map props) { addFactory(factory, props); } public void removeTaskFactory(TaskFactory factory, @SuppressWarnings("rawtypes") Map props) { removeFactory(factory); } public void addNetworkCollectionTaskFactory(NetworkCollectionTaskFactory factory, @SuppressWarnings("rawtypes") Map props) { TaskFactory provisioner = factoryProvisioner.createFor(factory); provisionerMap.put(factory, provisioner); addFactory(provisioner, props); } public void removeNetworkCollectionTaskFactory(NetworkCollectionTaskFactory factory, @SuppressWarnings("rawtypes") Map props) { removeFactory(provisionerMap.remove(factory)); } public void addNetworkViewCollectionTaskFactory(NetworkViewCollectionTaskFactory factory, @SuppressWarnings("rawtypes") Map props) { TaskFactory provisioner = factoryProvisioner.createFor(factory); provisionerMap.put(factory, provisioner); addFactory(provisioner, props); } public void removeNetworkViewCollectionTaskFactory(NetworkViewCollectionTaskFactory factory, @SuppressWarnings("rawtypes") Map props) { removeFactory(provisionerMap.remove(factory)); } public void addNetworkTaskFactory(NetworkTaskFactory factory, @SuppressWarnings("rawtypes") Map props) { TaskFactory provisioner = factoryProvisioner.createFor(factory); provisionerMap.put(factory, provisioner); addFactory(provisioner, props); } public void removeNetworkTaskFactory(NetworkTaskFactory factory, @SuppressWarnings("rawtypes") Map props) { removeFactory(provisionerMap.remove(factory)); } public void addNetworkViewTaskFactory(final NetworkViewTaskFactory factory, @SuppressWarnings("rawtypes") Map props) { TaskFactory provisioner = factoryProvisioner.createFor(factory); provisionerMap.put(factory, provisioner); addFactory(provisioner, props); } public void removeNetworkViewTaskFactory(NetworkViewTaskFactory factory, @SuppressWarnings("rawtypes") Map props) { removeFactory(provisionerMap.remove(factory)); } public void setNavigator(final Component comp) { this.navigatorPanel.removeAll(); this.navigatorPanel.add(comp, BorderLayout.CENTER); } /** * This is used by Session writer. * @return */ public JTreeTable getTreeTable() { return treeTable; } public JPanel getNavigatorPanel() { return navigatorPanel; } // // Event handlers // // @Override public void handleEvent(final NetworkAboutToBeDestroyedEvent nde) { final CyNetwork net = nde.getNetwork(); logger.debug("Network about to be destroyed: " + net); removeNetwork(net); nameTables.values().removeAll(Collections.singletonList(net)); nodeEdgeTables.values().removeAll(Collections.singletonList(net)); } @Override public void handleEvent(final NetworkAddedEvent e) { final CyNetwork net = e.getNetwork(); logger.debug("Network added: " + net); addNetwork(net); nameTables.put(net.getDefaultNetworkTable(), net); nodeEdgeTables.put(net.getDefaultNodeTable(), net); nodeEdgeTables.put(net.getDefaultEdgeTable(), net); } @Override public void handleEvent(final RowsSetEvent e) { final Collection<RowSetRecord> payload = e.getPayloadCollection(); if (payload.size() == 0) return; final RowSetRecord record = e.getPayloadCollection().iterator().next(); if (record == null) return; final CyTable table = e.getSource(); final CyNetwork network = nameTables.get(table); // Case 1: Network name/title updated if (network != null && record.getColumn().equals(CyNetwork.NAME)) { final CyRow row = payload.iterator().next().getRow(); final String newTitle = row.get(CyNetwork.NAME, String.class); final NetworkTreeNode node = this.network2nodeMap.get(network); final String oldTitle = treeTableModel.getValueAt(node, 0).toString(); if (newTitle.equals(oldTitle) == false) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { treeTableModel.setValueAt(newTitle, node, 0); treeTable.repaint(); } }); } return; } final CyNetwork updateSelected = nodeEdgeTables.get(table); // Case 2: Selection updated. if (updateSelected != null && record.getColumn().equals(CyNetwork.SELECTED)) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { treeTable.repaint(); } }); } } @Override public void handleEvent(final SetSelectedNetworksEvent e) { updateNetworkTreeSelection(); } @Override public void handleEvent(final NetworkViewAboutToBeDestroyedEvent nde) { final CyNetworkView netView = nde.getNetworkView(); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { logger.debug("Network view about to be destroyed: " + netView); final NetworkTreeNode node = treeNodeMap.get(netView.getModel().getSUID()); if (node != null) { node.setNodeColor(Color.red); treeTable.repaint(); } } }); } @Override public void handleEvent(final NetworkViewAddedEvent nde) { final CyNetworkView netView = nde.getNetworkView(); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { logger.debug("Network view added to NetworkPanel: " + netView); final NetworkTreeNode node = treeNodeMap.get(netView.getModel().getSUID()); if (node != null) { node.setNodeColor(Color.black); treeTable.repaint(); } } }); } /** * This method highlights a network in the NetworkPanel. */ @Override public void valueChanged(final TreeSelectionEvent e) { if (ignoreTreeSelectionEvents) return; final JTree tree = treeTable.getTree(); // Sets the "current" network based on last node in the tree selected final NetworkTreeNode node = (NetworkTreeNode) tree.getLastSelectedPathComponent(); if (node == null || node.getUserObject() == null) return; CyNetwork cn = node.getNetwork(); final List<CyNetwork> selectedNetworks = new LinkedList<CyNetwork>(); /* if (cn instanceof CyRootNetwork) { // This is a "network set" node... // When selecting root node, all of the subnetworks are selected. CyRootNetwork root = (CyRootNetwork) cn; //((NetworkTreeNode) node.getFirstChild()).getNetwork()).getRootNetwork(); // Creates a list of all selected networks List<CySubNetwork> subNetworks = root.getSubNetworkList(); for (CySubNetwork sn : subNetworks) { if (netMgr.networkExists(sn.getSUID())) selectedNetworks.add(sn); } // Determine the current network if (!selectedNetworks.isEmpty()) cn = ((NetworkTreeNode) node.getFirstChild()).getNetwork(); } else { */ // Regular multiple networks selection... try { // Create a list of all selected networks for (int i = tree.getMinSelectionRow(); i <= tree.getMaxSelectionRow(); i++) { NetworkTreeNode tn = (NetworkTreeNode) tree.getPathForRow(i).getLastPathComponent(); if (tn != null && tn.getUserObject() != null && tree.isRowSelected(i)) selectedNetworks.add(tn.getNetwork()); } } catch (Exception ex) { logger.error("Error creating the list of selected networks", ex); } final List<CyNetworkView> selectedViews = new ArrayList<CyNetworkView>(); for (final CyNetwork n : selectedNetworks) { final Collection<CyNetworkView> views = netViewMgr.getNetworkViews(n); if (!views.isEmpty()) selectedViews.addAll(views); } // No need to set the same network again. It should prevent infinite loops. // Also check if the network still exists (it could have been removed by another thread). if (cn == null || netMgr.networkExists(cn.getSUID())) { if (cn == null || !cn.equals(appMgr.getCurrentNetwork())) appMgr.setCurrentNetwork(cn); CyNetworkView cv = null; // Try to get the first view of the current network final Collection<CyNetworkView> cnViews = cn != null ? netViewMgr.getNetworkViews(cn) : null; cv = (cnViews == null || cnViews.isEmpty()) ? null : cnViews.iterator().next(); if (cv == null || !cv.equals(appMgr.getCurrentNetworkView())) appMgr.setCurrentNetworkView(cv); appMgr.setSelectedNetworks(selectedNetworks); appMgr.setSelectedNetworkViews(selectedViews); } } // // Private Methods // // private void addNetwork(final CyNetwork network) { // first see if it is not in the tree already if (this.network2nodeMap.get(network) == null) { NetworkTreeNode parentTreeNode = null; CyRootNetwork parentNetwork = null; // In current version, ALL networks are created as Subnetworks. // So, this should be always true. if (network instanceof CySubNetwork) { parentNetwork = ((CySubNetwork) network).getRootNetwork(); parentTreeNode = treeNodeMap.get(parentNetwork.getSUID()); } if (parentTreeNode == null){ final String rootNetName = parentNetwork.getRow(parentNetwork).get(CyNetwork.NAME, String.class); parentTreeNode = new NetworkTreeNode(rootNetName, parentNetwork); nameTables.put(parentNetwork.getDefaultNetworkTable(), parentNetwork); network2nodeMap.put(parentNetwork, parentTreeNode); } // Actual tree node for this network String netName = network.getRow(network).get(CyNetwork.NAME, String.class); if (netName == null) { logger.error("Network name is null--SUID=" + network.getSUID()); netName = "? (SUID: " + network.getSUID() + ")"; } final NetworkTreeNode dmtn = new NetworkTreeNode(netName, network); network2nodeMap.put(network, dmtn); parentTreeNode.add(dmtn); if (treeNodeMap.values().contains(parentTreeNode) == false) root.add(parentTreeNode); // Register top-level node to map if (parentNetwork != null) treeNodeMap.put(parentNetwork.getSUID(), parentTreeNode); if (netViewMgr.viewExists(network)) dmtn.setNodeColor(Color.black); treeNodeMap.put(network.getSUID(), dmtn); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { ignoreTreeSelectionEvents = true; // apparently this doesn't fire valueChanged treeTable.getTree().collapsePath(new TreePath(new TreeNode[] { root })); treeTable.getTree().updateUI(); final TreePath path = new TreePath(dmtn.getPath()); treeTable.getTree().expandPath(path); treeTable.getTree().scrollPathToVisible(path); treeTable.doLayout(); ignoreTreeSelectionEvents = false; } }); } } /** * Remove a network from the panel. */ private void removeNetwork(final CyNetwork network) { final NetworkTreeNode node = this.network2nodeMap.remove(network); if (node == null) return; treeNodeMap.values().remove(node); final List<NetworkTreeNode> removedChildren = new ArrayList<NetworkTreeNode>(); final Enumeration<?> children = node.children(); if (children.hasMoreElements()) { while (children.hasMoreElements()) removedChildren.add((NetworkTreeNode) children.nextElement()); } SwingUtilities.invokeLater(new Runnable() { @Override public void run() { ignoreTreeSelectionEvents = true; for (final NetworkTreeNode child : removedChildren) { child.removeFromParent(); root.add(child); } final NetworkTreeNode parentNode = (NetworkTreeNode) node.getParent(); node.removeFromParent(); if (parentNode.isLeaf()) { // Remove from root node parentNode.removeFromParent(); } treeTable.getTree().updateUI(); treeTable.repaint(); ignoreTreeSelectionEvents = false; } }); } /** * Update selected row. */ private final void updateNetworkTreeSelection() { final List<TreePath> paths = new ArrayList<TreePath>(); final List<CyNetwork> selectedNetworks = appMgr.getSelectedNetworks(); for (final CyNetwork network : selectedNetworks) { final NetworkTreeNode node = this.network2nodeMap.get(network); if (node != null) { final TreePath tp = new TreePath(node.getPath()); paths.add(tp); } } SwingUtilities.invokeLater(new Runnable() { @Override public void run() { ignoreTreeSelectionEvents = true; treeTable.getTree().getSelectionModel().setSelectionPaths(paths.toArray(new TreePath[paths.size()])); ignoreTreeSelectionEvents = false; int maxRow = 0; for (final TreePath tp : paths) { final int row = treeTable.getTree().getRowForPath(tp); maxRow = Math.max(maxRow, row); } final int row = maxRow; treeTable.getTree().scrollRowToVisible(row); treeTable.repaint(); } }); } private void selectAllSubnetwork(){ if (selectedRootSet == null) return; final List<CyNetwork> selectedNetworks = new LinkedList<CyNetwork>(); CyNetwork cn = null; NetworkTreeNode node = null; for (final CyRootNetwork root : selectedRootSet) { // This is a "network set" node... // When selecting root node, all of the subnetworks are selected. node = this.network2nodeMap.get(root); // Creates a list of all selected networks List<CySubNetwork> subNetworks = root.getSubNetworkList(); for (CySubNetwork sn : subNetworks) { if (netMgr.networkExists(sn.getSUID())) selectedNetworks.add(sn); } cn = root; } // Determine the current network if (!selectedNetworks.isEmpty()) cn = ((NetworkTreeNode) node.getFirstChild()).getNetwork(); final List<CyNetworkView> selectedViews = new ArrayList<CyNetworkView>(); for (final CyNetwork n : selectedNetworks) { final Collection<CyNetworkView> views = netViewMgr.getNetworkViews(n); if (!views.isEmpty()) selectedViews.addAll(views); } // No need to set the same network again. It should prevent infinite loops. // Also check if the network still exists (it could have been removed by another thread). if (cn == null || netMgr.networkExists(cn.getSUID())) { if (cn == null || !cn.equals(appMgr.getCurrentNetwork())) appMgr.setCurrentNetwork(cn); CyNetworkView cv = null; // Try to get the first view of the current network final Collection<CyNetworkView> cnViews = cn != null ? netViewMgr.getNetworkViews(cn) : null; cv = (cnViews == null || cnViews.isEmpty()) ? null : cnViews.iterator().next(); if (cv == null || !cv.equals(appMgr.getCurrentNetworkView())) appMgr.setCurrentNetworkView(cv); appMgr.setSelectedNetworks(selectedNetworks); appMgr.setSelectedNetworkViews(selectedViews); } } @SuppressWarnings({ "unchecked", "rawtypes" }) private void addFactory(TaskFactory factory, Map props) { CyAction action; if ( props.containsKey("enableFor") ) action = new TaskFactoryTunableAction(taskMgr, factory, props, appMgr, netViewMgr); else action = new TaskFactoryTunableAction(taskMgr, factory, props); final JMenuItem item = new JMenuItem(action); Double gravity = 10.0; if (props.containsKey(ServiceProperties.MENU_GRAVITY)){ gravity = Double.valueOf(props.get(ServiceProperties.MENU_GRAVITY).toString()); } this.actionGravityMap.put(item, gravity); popupMap.put(factory, item); popupActions.put(factory, action); int menuIndex = getMenuIndexByGravity(item); popup.insert(item, menuIndex); popup.addPopupMenuListener(action); } private int getMenuIndexByGravity(JMenuItem item) { Double gravity = this.actionGravityMap.get(item); Double gravityX; for (int i=0; i < popup.getComponentCount(); i++ ){ gravityX = this.actionGravityMap.get(popup.getComponent(i)); if (gravity < gravityX){ return i; } } return popup.getComponentCount(); } private void removeFactory(TaskFactory factory) { JMenuItem item = popupMap.remove(factory); if (item != null) popup.remove(item); CyAction action = popupActions.remove(factory); if (action != null) popup.removePopupMenuListener(action); } @SuppressWarnings("unchecked") private <T extends CyNetwork> Set<T> getSelectionNetworks(Class<T> type) { final Set<T> nets = new LinkedHashSet<T>(); final JTree tree = treeTable.getTree(); final TreePath[] selectionPaths = tree.getSelectionPaths(); if (selectionPaths != null) { for (final TreePath tp : selectionPaths) { final CyNetwork n = ((NetworkTreeNode) tp.getLastPathComponent()).getNetwork(); if (n != null && type.isAssignableFrom(n.getClass())) nets.add((T) n); } } return nets; } // // Private Classes // // /** * This class listens to mouse events from the TreeTable, if the mouse event * is one that is canonically associated with a popup menu (ie, a right * click) it will pop up the menu with option for destroying view, creating * view, and destroying network (this is platform specific apparently) */ private final class PopupListener extends MouseAdapter { @Override public void mousePressed(MouseEvent e) { maybeShowPopup(e); } // On Windows, popup is triggered by mouse release, not press @Override public void mouseReleased(MouseEvent e) { maybeShowPopup(e); } /** * if the mouse press is of the correct type, this function will maybe * display the popup */ private final void maybeShowPopup(final MouseEvent e) { // Ignore if not valid trigger. if (!e.isPopupTrigger()) return; // get the row where the mouse-click originated final int row = treeTable.rowAtPoint(e.getPoint()); if (row == -1) return; final JTree tree = treeTable.getTree(); final TreePath treePath = tree.getPathForRow(row); Long networkID = null; try { final CyNetwork clickedNet = ((NetworkTreeNode) treePath.getLastPathComponent()).getNetwork(); if (clickedNet instanceof CyRootNetwork) { networkID = null; selectedRoot = (CyRootNetwork) clickedNet; } else { networkID = clickedNet.getSUID(); selectedRoot = null; } } catch (NullPointerException npe) { // The tree root does not represent a network, ignore it. return; } if (networkID != null) { final CyNetwork network = netMgr.getNetwork(networkID); if (network != null) { // if the network is not selected, select it final List<CyNetwork> selectedList = appMgr.getSelectedNetworks(); if (selectedList == null || !selectedList.contains(network)) { appMgr.setCurrentNetwork(network); appMgr.setSelectedNetworks(Arrays.asList(new CyNetwork[]{ network })); final Collection<CyNetworkView> netViews = netViewMgr.getNetworkViews(network); appMgr.setSelectedNetworkViews(new ArrayList<CyNetworkView>(netViews)); } // Always repaint, because the rendering of the tree selection // may be out of sync with the AppManager one SwingUtilities.invokeLater(new Runnable() { @Override public void run() { treeTable.repaint(); } }); // enable/disable any actions based on state of system for (CyAction action : popupActions.values()) action.updateEnableState(); // then popup menu popup.show(e.getComponent(), e.getX(), e.getY()); } } else if (selectedRoot != null) { // if the right-clicked root-network is not selected, select it (other selected items will be unselected) selectedRootSet = getSelectionNetworks(CyRootNetwork.class); if (!selectedRootSet.contains(selectedRoot)) { final NetworkTreeNode node = network2nodeMap.get(selectedRoot); if (node != null) { appMgr.setCurrentNetwork(null); appMgr.setSelectedNetworks(null); appMgr.setSelectedNetworkViews(null); selectedRootSet = Collections.singleton(selectedRoot); final TreePath tp = new TreePath(new NetworkTreeNode[]{ root, node }); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { ignoreTreeSelectionEvents = true; tree.getSelectionModel().setSelectionPaths(new TreePath[]{ tp }); treeTable.repaint(); ignoreTreeSelectionEvents = false; } }); } } editRootNetworTitle.setEnabled(selectedRootSet.size() == 1); rootPopupMenu.show(e.getComponent(), e.getX(), e.getY()); } } } }
package io.quarkus.test.common; import static io.quarkus.test.common.LauncherUtil.createStartedFunction; import static io.quarkus.test.common.LauncherUtil.updateConfigForPort; import static io.quarkus.test.common.LauncherUtil.waitForCapturedListeningData; import static io.quarkus.test.common.LauncherUtil.waitForStartedFunction; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; import io.quarkus.test.common.http.TestHTTPResourceManager; public class DefaultJarLauncher implements JarArtifactLauncher { private static final String JAVA_HOME_SYS = "java.home"; private static final String JAVA_HOME_ENV = "JAVA_HOME"; private static final String VERTX_HTTP_RECORDER = "io.quarkus.vertx.http.runtime.VertxHttpRecorder"; static boolean HTTP_PRESENT; static { boolean http = true; try { Class.forName(VERTX_HTTP_RECORDER); } catch (ClassNotFoundException e) { http = false; } HTTP_PRESENT = http; } private int httpPort; private int httpsPort; private long waitTimeSeconds; private String testProfile; private List<String> argLine; private Path jarPath; private final Map<String, String> systemProps = new HashMap<>(); private Process quarkusProcess; private boolean isSsl; @Override public void init(JarArtifactLauncher.JarInitContext initContext) { this.httpPort = initContext.httpPort(); this.httpsPort = initContext.httpsPort(); this.waitTimeSeconds = initContext.waitTime().getSeconds(); this.testProfile = initContext.testProfile(); this.argLine = initContext.argLine(); this.jarPath = initContext.jarPath(); } public void start() throws IOException { start(new String[0], true); Function<IntegrationTestStartedNotifier.Context, IntegrationTestStartedNotifier.Result> startedFunction = createStartedFunction(); var logFile = PropertyTestUtil.getLogFilePath(); if (startedFunction != null) { IntegrationTestStartedNotifier.Result result = waitForStartedFunction(startedFunction, quarkusProcess, waitTimeSeconds, logFile); isSsl = result.isSsl(); } else { ListeningAddress result = waitForCapturedListeningData(quarkusProcess, logFile, waitTimeSeconds); updateConfigForPort(result.getPort()); isSsl = result.isSsl(); } } @Override public LaunchResult runToCompletion(String[] args) { try { start(args, false); ProcessReader error = new ProcessReader(quarkusProcess.getErrorStream()); ProcessReader stdout = new ProcessReader(quarkusProcess.getInputStream()); Thread t = new Thread(error, "Error stream reader"); t.start(); t = new Thread(stdout, "Stdout stream reader"); t.start(); byte[] s = stdout.get(); byte[] e = error.get(); return new LaunchResult(quarkusProcess.waitFor(), s, e); } catch (Exception e) { throw new RuntimeException(e); } } public void start(String[] programArgs, boolean handleIo) throws IOException { System.setProperty("test.url", TestHTTPResourceManager.getUri()); List<String> args = new ArrayList<>(); args.add(determineJavaPath()); if (!argLine.isEmpty()) { args.addAll(argLine); } if (HTTP_PRESENT) { args.add("-Dquarkus.http.port=" + httpPort); args.add("-Dquarkus.http.ssl-port=" + httpsPort); // this won't be correct when using the random port but it's really only used by us for the rest client tests // in the main module, since those tests hit the application itself args.add("-Dtest.url=" + TestHTTPResourceManager.getUri()); } Path logFile = PropertyTestUtil.getLogFilePath(); args.add("-Dquarkus.log.file.path=" + logFile.toAbsolutePath().toString()); args.add("-Dquarkus.log.file.enable=true"); if (testProfile != null) { args.add("-Dquarkus.profile=" + testProfile); } for (Map.Entry<String, String> e : systemProps.entrySet()) { args.add("-D" + e.getKey() + "=" + e.getValue()); } args.add("-jar"); args.add(jarPath.toAbsolutePath().toString()); args.addAll(Arrays.asList(programArgs)); System.out.println("Executing \"" + String.join(" ", args) + "\""); Files.deleteIfExists(logFile); Files.createDirectories(logFile.getParent()); if (handleIo) { quarkusProcess = LauncherUtil.launchProcess(args); } else { quarkusProcess = Runtime.getRuntime().exec(args.toArray(new String[0])); } } private String determineJavaPath() { // try system property first - it will be the JAVA_HOME used by the current JVM String home = System.getProperty(JAVA_HOME_SYS); if (home == null) { // No luck, somewhat a odd JVM not enforcing this property // try with the JAVA_HOME environment variable home = System.getenv(JAVA_HOME_ENV); } if (home != null) { File javaHome = new File(home); File file = new File(javaHome, "bin/java"); if (file.exists()) { return file.getAbsolutePath(); } } // just assume 'java' is on the system path return "java"; } @Override public boolean listensOnSsl() { return isSsl; } @Override public void includeAsSysProps(Map<String, String> systemProps) { this.systemProps.putAll(systemProps); } @Override public void close() { quarkusProcess.destroy(); } }
package io.quarkus.test.junit; import java.io.Closeable; import java.io.IOException; import java.lang.reflect.Field; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import javax.enterprise.inject.Alternative; import javax.inject.Inject; import org.junit.jupiter.api.extension.AfterEachCallback; import org.junit.jupiter.api.extension.BeforeAllCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.api.extension.TestInstancePostProcessor; import org.junit.platform.commons.JUnitException; import org.opentest4j.TestAbortedException; import io.quarkus.runtime.configuration.ProfileManager; import io.quarkus.runtime.test.TestHttpEndpointProvider; import io.quarkus.test.common.NativeImageLauncher; import io.quarkus.test.common.PropertyTestUtil; import io.quarkus.test.common.RestAssuredURLManager; import io.quarkus.test.common.TestResourceManager; import io.quarkus.test.common.TestScopeManager; import io.quarkus.test.common.http.TestHTTPResourceManager; public class NativeTestExtension implements BeforeEachCallback, AfterEachCallback, BeforeAllCallback, TestInstancePostProcessor { private static boolean failedBoot; private static List<Function<Class<?>, String>> testHttpEndpointProviders; private static boolean ssl; private static Class<? extends QuarkusTestProfile> quarkusTestProfile; private static Throwable firstException; //if this is set then it will be thrown from the very first test that is run, the rest are aborted @Override public void afterEach(ExtensionContext context) throws Exception { if (!failedBoot) { RestAssuredURLManager.clearURL(); TestScopeManager.tearDown(true); } } @Override public void beforeEach(ExtensionContext context) throws Exception { if (failedBoot) { throwBootFailureException(); } else { RestAssuredURLManager.setURL(ssl, QuarkusTestExtension.getEndpointPath(context, testHttpEndpointProviders)); TestScopeManager.setup(true); } } @Override public void beforeAll(ExtensionContext extensionContext) throws Exception { ensureStarted(extensionContext); } private void ensureNoInjectAnnotationIsUsed(Class<?> testClass) { Class<?> current = testClass; while (current.getSuperclass() != null) { for (Field field : current.getDeclaredFields()) { Inject injectAnnotation = field.getAnnotation(Inject.class); if (injectAnnotation != null) { throw new JUnitException( "@Inject is not supported in NativeImageTest tests. Offending field is " + field.getDeclaringClass().getTypeName() + "." + field.getName()); } } current = current.getSuperclass(); } } private ExtensionState ensureStarted(ExtensionContext extensionContext) { Class<?> testClass = extensionContext.getRequiredTestClass(); ensureNoInjectAnnotationIsUsed(testClass); ExtensionContext root = extensionContext.getRoot(); ExtensionContext.Store store = root.getStore(ExtensionContext.Namespace.GLOBAL); ExtensionState state = store.get(ExtensionState.class.getName(), ExtensionState.class); TestProfile annotation = testClass.getAnnotation(TestProfile.class); Class<? extends QuarkusTestProfile> selectedProfile = null; if (annotation != null) { selectedProfile = annotation.value(); } boolean wrongProfile = !Objects.equals(selectedProfile, quarkusTestProfile); if ((state == null && !failedBoot) || wrongProfile) { if (wrongProfile) { if (state != null) { try { state.close(); } catch (Throwable throwable) { throwable.printStackTrace(); } } } PropertyTestUtil.setLogFileProperty(); try { state = doNativeStart(extensionContext, selectedProfile); store.put(ExtensionState.class.getName(), state); } catch (Throwable e) { failedBoot = true; firstException = e; } } return state; } private ExtensionState doNativeStart(ExtensionContext context, Class<? extends QuarkusTestProfile> profile) throws Throwable { quarkusTestProfile = profile; TestResourceManager testResourceManager = null; try { Class<?> requiredTestClass = context.getRequiredTestClass(); Map<String, String> sysPropRestore = new HashMap<>(); sysPropRestore.put(ProfileManager.QUARKUS_TEST_PROFILE_PROP, System.getProperty(ProfileManager.QUARKUS_TEST_PROFILE_PROP)); QuarkusTestProfile profileInstance = null; final Map<String, String> additional = new HashMap<>(); if (profile != null) { profileInstance = profile.newInstance(); additional.putAll(profileInstance.getConfigOverrides()); final Set<Class<?>> enabledAlternatives = profileInstance.getEnabledAlternatives(); if (!enabledAlternatives.isEmpty()) { additional.put("quarkus.arc.selected-alternatives", enabledAlternatives.stream() .peek((c) -> { if (!c.isAnnotationPresent(Alternative.class)) { throw new RuntimeException( "Enabled alternative " + c + " is not annotated with @Alternative"); } }) .map(Class::getName).collect(Collectors.joining(","))); } final String configProfile = profileInstance.getConfigProfile(); if (configProfile != null) { additional.put(ProfileManager.QUARKUS_PROFILE_PROP, configProfile); } additional.put("quarkus.configuration.build-time-mismatch-at-runtime", "fail"); for (Map.Entry<String, String> i : additional.entrySet()) { sysPropRestore.put(i.getKey(), System.getProperty(i.getKey())); } for (Map.Entry<String, String> i : additional.entrySet()) { System.setProperty(i.getKey(), i.getValue()); } } testResourceManager = new TestResourceManager(requiredTestClass); testResourceManager.init(); additional.putAll(testResourceManager.start()); NativeImageLauncher launcher = new NativeImageLauncher(requiredTestClass); launcher.addSystemProperties(additional); try { launcher.start(); } catch (IOException e) { try { launcher.close(); } catch (Throwable t) { } throw e; } if (launcher.isDefaultSsl()) { ssl = true; } final ExtensionState state = new ExtensionState(testResourceManager, launcher, sysPropRestore); testHttpEndpointProviders = TestHttpEndpointProvider.load(); return state; } catch (Throwable e) { try { if (testResourceManager != null) { testResourceManager.close(); } } catch (Exception ex) { e.addSuppressed(ex); } throw e; } } @Override public void postProcessTestInstance(Object testInstance, ExtensionContext context) throws Exception { if (!failedBoot) { TestHTTPResourceManager.inject(testInstance); ExtensionContext root = context.getRoot(); ExtensionContext.Store store = root.getStore(ExtensionContext.Namespace.GLOBAL); ExtensionState state = store.get(ExtensionState.class.getName(), ExtensionState.class); state.testResourceManager.inject(testInstance); } } private void throwBootFailureException() throws Exception { if (firstException != null) { Throwable throwable = firstException; firstException = null; throw new RuntimeException(throwable); } else { throw new TestAbortedException("Boot failed"); } } public class ExtensionState implements ExtensionContext.Store.CloseableResource { private final TestResourceManager testResourceManager; private final Closeable resource; private final Map<String, String> sysPropRestore; private final Thread shutdownHook; ExtensionState(TestResourceManager testResourceManager, Closeable resource, Map<String, String> sysPropRestore) { this.testResourceManager = testResourceManager; this.resource = resource; this.sysPropRestore = sysPropRestore; this.shutdownHook = new Thread(new Runnable() { @Override public void run() { try { ExtensionState.this.close(); } catch (IOException ignored) { } } }, "Quarkus Test Cleanup Shutdown task"); Runtime.getRuntime().addShutdownHook(shutdownHook); } @Override public void close() throws IOException { testResourceManager.close(); resource.close(); for (Map.Entry<String, String> entry : sysPropRestore.entrySet()) { String val = entry.getValue(); if (val == null) { System.clearProperty(entry.getKey()); } else { System.setProperty(entry.getKey(), val); } } Runtime.getRuntime().removeShutdownHook(shutdownHook); } } }