code
stringlengths
4
1.01M
language
stringclasses
2 values
package de.mhu.com.morse.channel.sql; import java.lang.reflect.InvocationTargetException; import java.sql.SQLException; import java.util.Iterator; import java.util.LinkedList; import de.mhu.lib.ASql; import de.mhu.lib.dtb.Sth; import de.mhu.com.morse.aaa.IAclManager; import de.mhu.com.morse.channel.CMql; import de.mhu.com.morse.channel.IChannelDriverServer; import de.mhu.com.morse.channel.IConnectionServer; import de.mhu.com.morse.channel.IQueryFunction; import de.mhu.com.morse.channel.IQueryWhereFunction; import de.mhu.com.morse.mql.ICompiledQuery; import de.mhu.com.morse.types.IAttribute; import de.mhu.com.morse.types.IAttributeDefault; import de.mhu.com.morse.types.ITypes; import de.mhu.com.morse.usr.UserInformation; import de.mhu.com.morse.utils.AttributeUtil; import de.mhu.com.morse.utils.MorseException; public class WhereSqlListener implements WhereParser.IWhereListener { private StringBuffer sb = null; private Descriptor desc; private SqlDriver driver; private IConnectionServer con; private ITypes types; private IAclManager aclm; private UserInformation user; private ICompiledQuery code; private boolean needComma; public WhereSqlListener( SqlDriver pDriver, IConnectionServer pCon, ITypes pTypes, IAclManager pAclm, UserInformation pUser, Descriptor pDesc, ICompiledQuery pCode, StringBuffer dest ) { desc = pDesc; driver = pDriver; con = pCon; types = pTypes; aclm = pAclm; user = pUser; code = pCode; sb = dest; } public int appendTableSelect(String name, int off) throws MorseException { name = name.toLowerCase(); if ( ! AttributeUtil.isAttrName( name, true ) ) throw new MorseException( MorseException.UNKNOWN_ATTRIBUTE, name ); Object[] obj = desc.attrMap.get( name ); if ( obj == null ) throw new MorseException( MorseException.UNKNOWN_ATTRIBUTE, name ); if ( obj.length == 0 ) throw new MorseException( MorseException.ATTR_AMBIGIOUS, name ); String tName = (String)obj[3]; int pos = tName.indexOf('.'); if ( pos < 0 ) tName = IAttribute.M_ID; else tName = tName.substring( 0, pos + 1 ) + IAttribute.M_ID; sb.append( driver.getColumnName( tName ) ); sb.append( " IN ( SELECT " ); sb.append( driver.getColumnName( IAttribute.M_ID ) ); sb.append( " FROM r_" ); sb.append( ((IAttribute)obj[1]).getSourceType().getName() ).append( '_' ).append( ((IAttribute)obj[1]).getName() ); sb.append( " WHERE " ); Descriptor desc2 = new Descriptor(); Attr a = new Attr(); a.name = IAttribute.M_ID; desc2.addAttr( a ); // find all tables / types Table newTable = new Table(); newTable.name = ((IAttribute)obj[1]).getSourceType().getName() + '.' + ((IAttribute)obj[1]).getName(); desc2.addTable( newTable ); SqlUtils.checkTables( desc2, types, con, user, aclm ); SqlUtils.checkAttributes( con, desc2, user, aclm ); off+=2; off = SqlUtils.createWhereClause( con, driver, off, code, desc2, types, sb, user, aclm ); // sb.append( ')' ); off++; return off; } public void brackedClose() { sb.append( ')' ); } public void brackedOpen() { sb.append( '(' ); } public void compareEQ(String left, String right) { sb.append( left ).append( '=' ).append( right ); } public void compareGT(String left, String right) { sb.append( left ).append( '>' ).append( right ); } public void compareGTEQ(String left, String right) { sb.append( left ).append( ">=" ).append( right ); } public void compareINBegin(String left) { sb.append( left ).append( " IN (" ); needComma = false; } public void compareINEnd() { sb.append( ')' ); } public void compareINValue(String string) { if ( needComma ) sb.append( ',' ); needComma = true; sb.append( string ); } public void compareLIKE(String left, String right) { sb.append( left ).append( " LIKE " ).append( right ); } public void compareLT(String left, String right) { sb.append( left ).append( '<' ).append( right ); } public void compareLTEQ(String left, String right) { sb.append( left ).append( "<=" ).append( right ); } public void compareNOTEQ(String left, String right) { sb.append( left ).append( "!=" ).append( right ); } public int compareSubSelect(String name, int off, boolean distinct) throws MorseException { Descriptor desc2 = new Descriptor(); off = SqlUtils.findAttributes(off, code, desc2); if ( desc.attrSize == 0 ) throw new MorseException( MorseException.NO_ATTRIBUTES ); off++; // FROM // find all tables / types off = SqlUtils.findTables(off, code, desc2 ); SqlUtils.checkTables( desc2, types, con, user, aclm ); SqlUtils.checkAttributes( con, desc2, user, aclm ); SqlUtils.postCheckAttributes( desc2 ); SqlUtils.checkFunctions( con, desc2, desc2, user, driver.getAclManager() ); StringBuffer sb2 = new StringBuffer(); SqlUtils.createSelect( driver, desc2, sb2, distinct ); boolean hasWhere = false; if ( SqlUtils.needHintWhere( driver, desc2 ) ) { if ( ! hasWhere ) { sb2.append( " WHERE (" ); } else { sb2.append( " AND (" ); } SqlUtils.createHintWhereClause( con, driver, desc2, driver.getTypes(), sb2, user, aclm ); sb2.append( " ) " ); hasWhere = true; } if ( code.getInteger( off ) == CMql.WHERE ) { if ( ! hasWhere ) { sb2.append( " WHERE (" ); } else { sb2.append( " AND (" ); } off++; off = SqlUtils.createWhereClause( con, driver, off, code, desc2, types, sb2, user, aclm ); } sb.append( name ).append( " IN ( " ).append( sb2.toString() ).append( " ) "); off++; // ) return off; } public String executeFunction( IQueryFunction function, LinkedList<Object> functionAttrs ) throws MorseException { // Object[] obj = desc.attrMap.get( aName.toLowerCase() ); if ( function instanceof IQuerySqlFunction ) { String[] attrs = (String[])functionAttrs.toArray( new String[ functionAttrs.size() ] ); for ( int j = 0; j < attrs.length; j++ ) { attrs[j] = SqlUtils.checkAttribute( driver, null, attrs[j], desc, user ); } return ((IQuerySqlFunction)function).appendSqlCommand( driver, attrs ); } else { Object[] values = new Object[ functionAttrs.size() ]; Class[] classes = new Class[ functionAttrs.size() ]; int cnt = 0; for ( Iterator i = functionAttrs.iterator(); i.hasNext(); ) { values[cnt] = i.next(); classes[cnt] = values[cnt].getClass(); cnt++; } if ( function instanceof IQueryWhereFunction ) return ((IQueryWhereFunction)function).getSingleResult( values ); else { try { function.getClass().getMethod( "append", classes ).invoke( function, values ); } catch (Exception e) { throw new MorseException( MorseException.ERROR, e ); } return function.getResult(); } } } public void appendInFunction( String left, IQueryFunction function, LinkedList<Object> functionAttrs) throws MorseException { Sth sth = null; String tmpName = null; try { Object[] obj = desc.attrMap.get( left.toLowerCase() ); tmpName = "x_" + driver.getNextTmpId(); String drop = driver.getDropTmpTableSql( tmpName ); sth = driver.internatConnection.getPool().aquireStatement(); if ( drop != null ) { try { sth.executeUpdate( drop ); } catch ( SQLException sqle ) { } } String create = new StringBuffer() .append( driver.getCreateTmpTablePrefixSql() ) .append( ' ' ) .append( tmpName ) .append( " ( v " ) .append( driver.getColumnDefinition( (IAttribute)obj[1], false ) ) .append( ") ") .append( driver.getCreateTmpTableSuffixSql() ) .toString(); sth.executeUpdate( create ); sth.executeUpdate( driver.getCreateTmpIndexSql( 1, tmpName, "v" ) ); if ( ! ( function instanceof IQueryWhereFunction ) ) throw new MorseException( MorseException.FUNCTION_NOT_COMPATIBLE ); Iterator<String> res = ((IQueryWhereFunction)function).getRepeatingResult( (Object[])functionAttrs.toArray( new Object[ functionAttrs.size() ] ) ); while ( res.hasNext() ) { String insert = "INSERT INTO " + tmpName + "(v) VALUES (" + SqlUtils.getValueRepresentation(driver, (IAttribute)obj[1], res.next() ) + ")"; sth.executeUpdate( insert ); } } catch ( Exception sqle ) { if ( sqle instanceof MorseException ) throw (MorseException)sqle; throw new MorseException( MorseException.ERROR, sqle ); } finally { try { sth.release(); } catch ( Exception ex ) {} } desc.addTmpTable( tmpName ); sb.append( " IN ( SELECT v FROM " ).append( tmpName ).append( " ) "); } public void operatorAnd() { sb.append( " AND " ); } public void operatorNot() { sb.append( " NOT " ); } public void operatorOr() { sb.append( " OR " ); } public String transformAttribute(String name) throws MorseException { Object[] obj = desc.attrMap.get( name ); if ( obj == null ) throw new MorseException( MorseException.UNKNOWN_ATTRIBUTE, name ); if ( obj.length == 0 ) throw new MorseException( MorseException.ATTR_AMBIGIOUS, name ); String tName = (String)obj[3]; /* int pos = tName.indexOf('.'); if ( pos < 0 ) tName = IAttribute.M_ID; else tName = tName.substring( 0, pos + 1 ) + IAttribute.M_ID; */ return driver.getColumnName( tName ); // return SqlUtils.checkAttribute( driver, null, name, desc, user ); } public Object transformValue( String attrName, String name) throws MorseException { if ( ! AttributeUtil.isValue( name ) ) throw new MorseException( MorseException.WRONG_VALUE_FORMAT, name ); if ( attrName != null ) { Object[] obj = desc.attrMap.get( attrName.toLowerCase() ); if ( obj != null && obj.length != 0 && obj[1] != null ) { IAttribute attr = (IAttribute)obj[1]; String value = name; if ( name.length() > 1 && name.charAt( 0 ) == '\'' && name.charAt( name.length() - 1 ) == '\'' ) value = ASql.unescape( name.substring( 1, name.length() - 1 ) ); if ( ! attr.getAco().validate( value ) ) throw new MorseException( MorseException.ATTR_VALUE_NOT_VALIDE, new String[] { attrName, name } ); return SqlUtils.getValueRepresentation( driver, attr, value ); } else { IAttribute attr = IAttributeDefault.getAttribute( attrName ); if ( attr != null ) return SqlUtils.getValueRepresentation( driver, attr, name ); } } return name; } }
Java
// // UIImage+Image.h // 生日管家 // // Created by yz on 15/7/6. // Copyright (c) 2015年 yz. All rights reserved. // #import <UIKit/UIKit.h> @interface UIImage (Image) // 根据颜色生成一张尺寸为1*1的相同颜色图片 + (UIImage *)imageWithColor:(UIColor *)color; // 拉伸图片 + (UIImage *)resizedImageWithName:(NSString *)name; + (UIImage *)resizedImageWithName:(NSString *)name left:(CGFloat)left top:(CGFloat)top; // 返回一张未渲染的图片 + (instancetype)imageWithRenderingModeOriginal:(NSString *)imageName; // 对图片压缩 + (UIImage *)imageCompressForSize:(UIImage *)sourceImage targetSize:(CGSize)size; // 对图片压缩2 + (UIImage *)resizeImage:(UIImage *)image toWidth:(CGFloat)width height:(CGFloat)height; // 把图片按比例压缩 + (instancetype)zoomImage:(UIImage *)image toScale:(CGFloat)scale; // 把图片压缩到buttonItem大小 + (UIImage *)resizeImageToBarButtonItemSize:(UIImage*)image; @end
Java
/* * Copyright 2016 Code Above Lab LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.codeabovelab.dm.common.utils; import java.util.function.Function; import java.util.function.IntPredicate; import java.util.function.Supplier; import java.util.regex.Matcher; import java.util.regex.Pattern; /** */ public class StringUtils { private StringUtils() { } public static String before(String s, char c) { return beforeOr(s, c, () -> { // we throw exception for preserve old behavior throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'."); }); } /** * Return part of 's' before 'c' * @param s string which may contain char 'c' * @param c char * @param ifNone supplier of value which is used when 'c' is not present in 's' (null not allowed) * @return part of 's' before 'c' or 'ifNone.get()' */ public static String beforeOr(String s, char c, Supplier<String> ifNone) { int pos = s.indexOf(c); if(pos < 0) { return ifNone.get(); } return s.substring(0, pos); } public static String after(String s, char c) { int pos = s.indexOf(c); if(pos < 0) { throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'."); } return s.substring(pos + 1); } public static String beforeLast(String s, char c) { int pos = s.lastIndexOf(c); if(pos < 0) { throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'."); } return s.substring(0, pos); } public static String afterLast(String s, char c) { int pos = s.lastIndexOf(c); if(pos < 0) { throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'."); } return s.substring(pos + 1); } /** * Split string into two pieces at last appearing of delimiter. * @param s string * @param c delimiter * @return null if string does not contains delimiter */ public static String[] splitLast(String s, char c) { int pos = s.lastIndexOf(c); if(pos < 0) { return null; } return new String[] {s.substring(0, pos), s.substring(pos + 1)}; } /** * Split string into two pieces at last appearing of delimiter. * @param s string * @param delimiter delimiter * @return null if string does not contains delimiter */ public static String[] splitLast(String s, String delimiter) { int pos = s.lastIndexOf(delimiter); if(pos < 0) { return null; } return new String[] {s.substring(0, pos), s.substring(pos + delimiter.length())}; } /** * Return string which contains only chars for which charJudge give true. * @param src source string, may be null * @param charJudge predicate which consume codePoint (not chars) * @return string, null when incoming string is null */ public static String retain(String src, IntPredicate charJudge) { if (src == null) { return null; } final int length = src.length(); StringBuilder sb = new StringBuilder(length); for (int i = 0; i < length; i++) { int cp = src.codePointAt(i); if(charJudge.test(cp)) { sb.appendCodePoint(cp); } } return sb.toString(); } /** * Retain only characters which is {@link #isAz09(int)} * @param src source string, may be null * @return string, null when incoming string is null */ public static String retainAz09(String src) { return retain(src, StringUtils::isAz09); } /** * Retain chars which is acceptable as file name or part of url on most operation systems. <p/> * It: <code>'A'-'z', '0'-'9', '_', '-', '.'</code> * @param src source string, may be null * @return string, null when incoming string is null */ public static String retainForFileName(String src) { return retain(src, StringUtils::isAz09); } /** * Test that specified codePoint is an ASCII letter or digit * @param cp codePoint * @return true for specified chars */ public static boolean isAz09(int cp) { return cp >= '0' && cp <= '9' || cp >= 'a' && cp <= 'z' || cp >= 'A' && cp <= 'Z'; } /** * Test that specified codePoint is an ASCII letter, digit or hyphen '-'. * @param cp codePoint * @return true for specified chars */ public static boolean isAz09Hyp(int cp) { return isAz09(cp) || cp == '-'; } /** * Test that specified codePoint is an ASCII letter, digit or hyphen '-', '_', ':', '.'. <p/> * It common matcher that limit alphabet acceptable for our system IDs. * @param cp codePoint * @return true for specified chars */ public static boolean isId(int cp) { return isAz09(cp) || cp == '-' || cp == '_' || cp == ':' || cp == '.'; } public static boolean isHex(int cp) { return cp >= '0' && cp <= '9' || cp >= 'a' && cp <= 'f' || cp >= 'A' && cp <= 'F'; } /** * Chars which is acceptable as file name or part of url on most operation systems. <p/> * It: <code>'A'-'z', '0'-'9', '_', '-', '.'</code> * @param cp codePoint * @return true for specified chars */ public static boolean isForFileName(int cp) { return isAz09(cp) || cp == '-' || cp == '_' || cp == '.'; } /** * Invoke {@link Object#toString()} on specified argument, if arg is null then return null. * @param o * @return null or result of o.toString() */ public static String valueOf(Object o) { return o == null? null : o.toString(); } /** * Test that each char of specified string match for predicate. <p/> * Note that it method does not support unicode, because it usual applicable only for match letters that placed under 128 code. * @param str string * @param predicate char matcher * @return true if all chars match */ public static boolean match(String str, IntPredicate predicate) { final int len = str.length(); if(len == 0) { return false; } for(int i = 0; i < len; i++) { if(!predicate.test(str.charAt(i))) { return false; } } return true; } /** * Is a <code>match(str, StringUtils::isAz09);</code>. * @param str string * @return true if string match [A-Za-z0-9]* */ public static boolean matchAz09(String str) { return match(str, StringUtils::isAz09); } /** * Is a <code>match(str, StringUtils::isAz09Hyp);</code>. * @param str string * @return true if string match [A-Za-z0-9-]* */ public static boolean matchAz09Hyp(String str) { return match(str, StringUtils::isAz09Hyp); } /** * Is a <code>match(str, StringUtils::isId);</code>. * @param str string * @return true if string match [A-Za-z0-9-_:.]* */ public static boolean matchId(String str) { return match(str, StringUtils::isId); } public static boolean matchHex(String str) { return match(str, StringUtils::isHex); } /** * Replace string with pattern obtaining replacement values through handler function. <p/> * Note that it differ from usual Pattern behavior when it process replacement for group references, * this code do nothing with replacement. * @param pattern pattern * @param src source string * @param handler function which take matched part of source string and return replacement value, must never return null * @return result string */ public static String replace(Pattern pattern, String src, Function<String, String> handler) { StringBuilder sb = null; Matcher matcher = pattern.matcher(src); int pos = 0; while(matcher.find()) { if(sb == null) { // replacement can be a very rare operation, and we not need excess string buffer sb = new StringBuilder(); } String expr = matcher.group(); String replacement = handler.apply(expr); sb.append(src, pos, matcher.start()); sb.append(replacement); pos = matcher.end(); } if(sb == null) { return src; } sb.append(src, pos, src.length()); return sb.toString(); } }
Java
from django.db import models from django.utils.html import format_html from sorl.thumbnail import get_thumbnail from sorl.thumbnail.fields import ImageField from sno.models import Sno class SnoGalleries(models.Model): class Meta: verbose_name = 'Фотография в галереи СНО' verbose_name_plural = 'Фотографии в галереи СНО' name = models.CharField('Название фото', max_length=255, blank=True, null=True) photo = ImageField(verbose_name='Фото', max_length=255) description = models.TextField('Описание', blank=True, null=True) sno = models.ForeignKey(Sno, verbose_name='СНО', on_delete=models.CASCADE) date_created = models.DateField('Дата', auto_now_add=True) def photo_preview(self): img = get_thumbnail(self.photo, '75x75', crop='center') return format_html('<a href="{}" target="_blank"><img style="width:75px; height:75px;" src="{}"></a>', self.photo.url, img.url) photo_preview.short_description = 'Фото' def __str__(self): return '%s (%s)' % (self.name, self.sno.short_name)
Java
/* * Copyright 2015 Namihiko Matsumura (https://github.com/n-i-e/) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.n_i_e.deepfolderview; import java.awt.Toolkit; import java.awt.datatransfer.Clipboard; import java.awt.datatransfer.StringSelection; import java.io.IOException; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import org.eclipse.core.databinding.DataBindingContext; import org.eclipse.core.databinding.beans.PojoProperties; import org.eclipse.core.databinding.observable.Realm; import org.eclipse.core.databinding.observable.value.IObservableValue; import org.eclipse.jface.databinding.swt.SWTObservables; import org.eclipse.jface.databinding.swt.WidgetProperties; import org.eclipse.jface.viewers.TableViewer; import org.eclipse.swt.SWT; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.MenuItem; import org.eclipse.swt.widgets.ProgressBar; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.forms.widgets.FormToolkit; import org.eclipse.wb.swt.SWTResourceManager; import com.github.n_i_e.dirtreedb.Assertion; import com.github.n_i_e.dirtreedb.DBPathEntry; import com.github.n_i_e.dirtreedb.PathEntry; import com.github.n_i_e.dirtreedb.lazy.LazyRunnable; import com.github.n_i_e.dirtreedb.lazy.LazyUpdater; import com.github.n_i_e.dirtreedb.lazy.LazyUpdater.Dispatcher; import com.ibm.icu.text.NumberFormat; import com.ibm.icu.text.SimpleDateFormat; public class SwtFileFolderMenu extends SwtCommonFileFolderMenu { @SuppressWarnings("unused") private DataBindingContext m_bindingContext; protected Shell shell; private FormToolkit formToolkit = new FormToolkit(Display.getDefault()); private Text txtLocation; private Composite compositeToolBar; private Table table; private Label lblStatusBar; private Composite compositeStatusBar; private ProgressBar progressBar; @Override protected Shell getShell() { return shell; } @Override protected Table getTable() { return table; } @Override protected Label getLblStatusBar() { return lblStatusBar; } @Override protected ProgressBar getProgressBar() { return progressBar; } public static void main(String[] args) { final Display display = Display.getDefault(); Realm.runWithDefault(SWTObservables.getRealm(display), new Runnable() { public void run() { try { final SwtFileFolderMenu window = new SwtFileFolderMenu(); window.open(); /* display.asyncExec(new Runnable() { public void run() { TableItem tableItem = new TableItem(window.table, SWT.NONE); tableItem.setText(new String[] {"C:\\", "2015-01-01 00:00:00", "1", "2", "3"}); TableItem tableItem_1 = new TableItem(window.table, SWT.NONE); tableItem_1.setText(new String[] {"D:\\", "2014-01-01 00:00:00", "100", "200", "1"}); } });*/ } catch (Exception e) { e.printStackTrace(); } } }); } public void open() { Display display = Display.getDefault(); //createContents(); //shell.open(); //shell.layout(); while (!shell.isDisposed()) { if (!display.readAndDispatch()) { display.sleep(); } } } public SwtFileFolderMenu() { createContents(); shell.open(); shell.layout(); location = new NavigatableList<Location>(); location.add(new Location()); } /** * Create contents of the window. */ private void createContents() { shell = new Shell(); shell.addDisposeListener(new DisposeListener() { public void widgetDisposed(DisposeEvent arg0) { Point p = shell.getSize(); PreferenceRW.setSwtFileFolderMenuWindowWidth(p.x); PreferenceRW.setSwtFileFolderMenuWindowHeight(p.y); } }); shell.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/drive-harddisk.png")); shell.setMinimumSize(new Point(300, 200)); shell.setSize(PreferenceRW.getSwtFileFolderMenuWindowWidth(), PreferenceRW.getSwtFileFolderMenuWindowHeight()); GridLayout gl_shell = new GridLayout(1, false); gl_shell.verticalSpacing = 6; gl_shell.marginWidth = 3; gl_shell.marginHeight = 3; gl_shell.horizontalSpacing = 6; shell.setLayout(gl_shell); Menu menu = new Menu(shell, SWT.BAR); shell.setMenuBar(menu); MenuItem mntmFile = new MenuItem(menu, SWT.CASCADE); mntmFile.setText(Messages.mntmFile_text); Menu menuFile = new Menu(mntmFile); mntmFile.setMenu(menuFile); MenuItem mntmOpen_1 = new MenuItem(menuFile, SWT.NONE); mntmOpen_1.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onOpenSelected(e); } }); mntmOpen_1.setText(Messages.mntmOpen_text); MenuItem mntmOpenInNew_1 = new MenuItem(menuFile, SWT.NONE); mntmOpenInNew_1.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onOpenInNewWindowSelected(e); } }); mntmOpenInNew_1.setText(Messages.mntmOpenInNewWindow_text); MenuItem mntmOpenDuplicateDetails_1 = new MenuItem(menuFile, SWT.NONE); mntmOpenDuplicateDetails_1.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onOpenDuplicateDetailsSelected(e); } }); mntmOpenDuplicateDetails_1.setText(Messages.mntmOpenDuplicateDetails_text); MenuItem mntmCopyTo_2 = new MenuItem(menuFile, SWT.NONE); mntmCopyTo_2.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onCopyToSelected(); } }); mntmCopyTo_2.setText(Messages.mntmCopyTo_text); MenuItem mntmClose = new MenuItem(menuFile, SWT.NONE); mntmClose.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onCloseSelected(); } }); mntmClose.setText(Messages.mntmClose_text); MenuItem mntmQuit = new MenuItem(menuFile, SWT.NONE); mntmQuit.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onQuitSelected(); } }); mntmQuit.setText(Messages.mntmQuit_text); MenuItem mntmEdit = new MenuItem(menu, SWT.CASCADE); mntmEdit.setText(Messages.mntmEdit_text); Menu menuEdit = new Menu(mntmEdit); mntmEdit.setMenu(menuEdit); MenuItem mntmRun_1 = new MenuItem(menuEdit, SWT.NONE); mntmRun_1.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onRunSelected(); } }); mntmRun_1.setText(Messages.mntmRun_text); MenuItem mntmCopyAsString_1 = new MenuItem(menuEdit, SWT.NONE); mntmCopyAsString_1.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onCopyAsStringSelected(); } }); mntmCopyAsString_1.setText(Messages.mntmCopyAsString_text); MenuItem mntmCopyTo_1 = new MenuItem(menuEdit, SWT.NONE); mntmCopyTo_1.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onCopyToSelected(); } }); mntmCopyTo_1.setText(Messages.mntmCopyTo_text); MenuItem mntmVisibility = new MenuItem(menu, SWT.CASCADE); mntmVisibility.setText(Messages.mntmVisibility_text); Menu menuVisibility = new Menu(mntmVisibility); mntmVisibility.setMenu(menuVisibility); final MenuItem mntmFoldersVisible = new MenuItem(menuVisibility, SWT.CHECK); mntmFoldersVisible.setSelection(true); mntmFoldersVisible.setText(Messages.mntmFoldersVisible_text); final MenuItem mntmFilesVisible = new MenuItem(menuVisibility, SWT.CHECK); mntmFilesVisible.setSelection(true); mntmFilesVisible.setText(Messages.mntmFilesVisible_text); final MenuItem mntmCompressedFoldersVisible = new MenuItem(menuVisibility, SWT.CHECK); mntmCompressedFoldersVisible.setSelection(true); mntmCompressedFoldersVisible.setText(Messages.mntmCompressedFoldersVisible_text); final MenuItem mntmCompressedFilesVisible = new MenuItem(menuVisibility, SWT.CHECK); mntmCompressedFilesVisible.setSelection(true); mntmCompressedFilesVisible.setText(Messages.mntmCompressedFilesVisible_text); MenuItem mntmHelp = new MenuItem(menu, SWT.CASCADE); mntmHelp.setText(Messages.mntmHelp_text); Menu menuHelp = new Menu(mntmHelp); mntmHelp.setMenu(menuHelp); MenuItem mntmOpenSourceLicenses = new MenuItem(menuHelp, SWT.NONE); mntmOpenSourceLicenses.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { new SwtOpenSourceLicenses(shell, SWT.TITLE|SWT.MIN|SWT.MAX|SWT.CLOSE).open(); } }); mntmOpenSourceLicenses.setText(Messages.mntmOpenSourceLicenses_text); compositeToolBar = new Composite(shell, SWT.NONE); compositeToolBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); compositeToolBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND)); compositeToolBar.setFont(SWTResourceManager.getFont("Meiryo UI", 12, SWT.NORMAL)); GridLayout gl_compositeToolBar = new GridLayout(5, false); gl_compositeToolBar.horizontalSpacing = 0; gl_compositeToolBar.verticalSpacing = 0; gl_compositeToolBar.marginWidth = 0; gl_compositeToolBar.marginHeight = 0; compositeToolBar.setLayout(gl_compositeToolBar); formToolkit.adapt(compositeToolBar); formToolkit.paintBordersFor(compositeToolBar); Button btnLeft = new Button(compositeToolBar, SWT.NONE); btnLeft.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-previous.png")); btnLeft.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onNavigatePreviousSelected(e); } }); btnLeft.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL)); formToolkit.adapt(btnLeft, true, true); Button btnRight = new Button(compositeToolBar, SWT.NONE); btnRight.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-next.png")); btnRight.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onNavigateNextSelected(e); } }); btnRight.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL)); formToolkit.adapt(btnRight, true, true); Button btnUp = new Button(compositeToolBar, SWT.NONE); btnUp.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-up.png")); btnUp.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onUpperFolderSelected(e); } }); formToolkit.adapt(btnUp, true, true); txtLocation = new Text(compositeToolBar, SWT.BORDER); txtLocation.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent arg0) { onLocationModified(arg0); } }); GridData gd_txtLocation = new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1); gd_txtLocation.widthHint = 200; txtLocation.setLayoutData(gd_txtLocation); txtLocation.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL)); formToolkit.adapt(txtLocation, true, true); Button btnRefresh = new Button(compositeToolBar, SWT.NONE); btnRefresh.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { refresh(); } }); btnRefresh.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/view-refresh.png")); formToolkit.adapt(btnRefresh, true, true); final TableViewer tableViewer = new TableViewer(shell, SWT.MULTI | SWT.BORDER | SWT.FULL_SELECTION | SWT.VIRTUAL); table = tableViewer.getTable(); table.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); //table = new Table(scrolledComposite, SWT.BORDER | SWT.FULL_SELECTION | SWT.VIRTUAL); table.setHeaderVisible(true); table.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onTableSelected(e); } @Override public void widgetDefaultSelected(SelectionEvent e) { onOpenSelected(e); } }); formToolkit.adapt(table); formToolkit.paintBordersFor(table); final TableColumn tblclmnPath = new TableColumn(table, SWT.LEFT); tblclmnPath.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { table.setSortColumn(tblclmnPath); if (table.getSortDirection() == SWT.UP) { table.setSortDirection(SWT.DOWN); } else { table.setSortDirection(SWT.UP); } onTblclmnPathSelected(tblclmnPath, e); } }); tblclmnPath.setWidth(230); tblclmnPath.setText(Messages.tblclmnPath_text); setTableSortDirection(tblclmnPath, "path", order); final TableColumn tblclmnDateLastModified = new TableColumn(table, SWT.LEFT); tblclmnDateLastModified.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { table.setSortColumn(tblclmnDateLastModified); if (table.getSortDirection() == SWT.UP) { table.setSortDirection(SWT.DOWN); } else { table.setSortDirection(SWT.UP); } onTblclmnDateLastModifiedSelected(tblclmnDateLastModified, e); } }); tblclmnDateLastModified.setWidth(136); tblclmnDateLastModified.setText(Messages.tblclmnDateLastModified_text); setTableSortDirection(tblclmnDateLastModified, "datelastmodified", order); final TableColumn tblclmnSize = new TableColumn(table, SWT.RIGHT); tblclmnSize.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { table.setSortColumn(tblclmnSize); if (table.getSortDirection() == SWT.UP) { table.setSortDirection(SWT.DOWN); } else { table.setSortDirection(SWT.UP); } onTblclmnSizeSelected(tblclmnSize, e); } }); tblclmnSize.setWidth(110); tblclmnSize.setText(Messages.tblclmnSize_text); setTableSortDirection(tblclmnSize, "size", order); final TableColumn tblclmnCompressedsize = new TableColumn(table, SWT.RIGHT); tblclmnCompressedsize.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { table.setSortColumn(tblclmnCompressedsize); if (table.getSortDirection() == SWT.UP) { table.setSortDirection(SWT.DOWN); } else { table.setSortDirection(SWT.UP); } onTblclmnCompressedsizeSelected(tblclmnCompressedsize, e); } }); tblclmnCompressedsize.setWidth(110); tblclmnCompressedsize.setText(Messages.tblclmnCompressedesize_text); setTableSortDirection(tblclmnCompressedsize, "compressedsize", order); final TableColumn tblclmnDuplicate = new TableColumn(table, SWT.NONE); tblclmnDuplicate.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { table.setSortColumn(tblclmnDuplicate); if (table.getSortDirection() == SWT.UP) { table.setSortDirection(SWT.DOWN); } else { table.setSortDirection(SWT.UP); } onTblclmnDuplicateSelected(tblclmnDuplicate, e); } }); tblclmnDuplicate.setWidth(35); tblclmnDuplicate.setText(Messages.tblclmnDuplicate_text); setTableSortDirection(tblclmnDuplicate, "duplicate", order); final TableColumn tblclmnDedupablesize = new TableColumn(table, SWT.RIGHT); tblclmnDedupablesize.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { table.setSortColumn(tblclmnDedupablesize); if (table.getSortDirection() == SWT.UP) { table.setSortDirection(SWT.DOWN); } else { table.setSortDirection(SWT.UP); } onTblclmnDedupablesizeSelected(tblclmnDedupablesize, e); } }); tblclmnDedupablesize.setWidth(110); tblclmnDedupablesize.setText(Messages.tblclmnDedupablesize_text); setTableSortDirection(tblclmnDedupablesize, "dedupablesize", order); Menu popupMenu = new Menu(table); table.setMenu(popupMenu); MenuItem mntmRun = new MenuItem(popupMenu, SWT.NONE); mntmRun.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onRunSelected(); } }); mntmRun.setText(Messages.mntmRun_text); MenuItem mntmOpen = new MenuItem(popupMenu, SWT.NONE); mntmOpen.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onOpenSelected(e); } }); mntmOpen.setText(Messages.mntmOpen_text); MenuItem mntmOpenInNew = new MenuItem(popupMenu, SWT.NONE); mntmOpenInNew.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onOpenInNewWindowSelected(e); } }); mntmOpenInNew.setText(Messages.mntmOpenInNewWindow_text); MenuItem mntmOpenDuplicateDetails = new MenuItem(popupMenu, SWT.NONE); mntmOpenDuplicateDetails.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onOpenDuplicateDetailsSelected(e); } }); mntmOpenDuplicateDetails.setText(Messages.mntmOpenDuplicateDetails_text); MenuItem mntmCopyAsString = new MenuItem(popupMenu, SWT.NONE); mntmCopyAsString.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onCopyAsStringSelected(); } }); mntmCopyAsString.setText(Messages.mntmCopyAsString_text); MenuItem mntmCopyTo = new MenuItem(popupMenu, SWT.NONE); mntmCopyTo.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { onCopyToSelected(); } }); mntmCopyTo.setText(Messages.mntmCopyTo_text); MenuItem menuItem = new MenuItem(popupMenu, SWT.SEPARATOR); menuItem.setText("Visibility"); final MenuItem mntmFoldersVisible_1 = new MenuItem(popupMenu, SWT.CHECK); mntmFoldersVisible_1.setSelection(true); mntmFoldersVisible_1.setText(Messages.mntmFoldersVisible_text); mntmFoldersVisible_1.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { mntmFoldersVisible.setSelection(mntmFoldersVisible_1.getSelection()); onFoldersVisibleChecked(mntmFoldersVisible.getSelection()); } }); final MenuItem mntmFilesVisible_1 = new MenuItem(popupMenu, SWT.CHECK); mntmFilesVisible_1.setSelection(true); mntmFilesVisible_1.setText(Messages.mntmFilesVisible_text); mntmFilesVisible_1.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { mntmFilesVisible.setSelection(mntmFilesVisible_1.getSelection()); onFilesVisibleChecked(mntmFilesVisible.getSelection()); } }); final MenuItem mntmCompressedFoldersVisible_1 = new MenuItem(popupMenu, SWT.CHECK); mntmCompressedFoldersVisible_1.setSelection(true); mntmCompressedFoldersVisible_1.setText(Messages.mntmCompressedFoldersVisible_text); mntmCompressedFoldersVisible_1.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { mntmCompressedFoldersVisible.setSelection(mntmCompressedFoldersVisible_1.getSelection()); onCompressedFoldersVisibleChecked(mntmCompressedFoldersVisible.getSelection()); } }); final MenuItem mntmCompressedFilesVisible_1 = new MenuItem(popupMenu, SWT.CHECK); mntmCompressedFilesVisible_1.setSelection(true); mntmCompressedFilesVisible_1.setText(Messages.mntmCompressedFilesVisible_text); mntmCompressedFilesVisible_1.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { mntmCompressedFilesVisible.setSelection(mntmCompressedFilesVisible_1.getSelection()); onCompressedFilesVisibleSelected(mntmCompressedFilesVisible.getSelection()); } }); mntmFoldersVisible.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { mntmFoldersVisible_1.setSelection(mntmFoldersVisible.getSelection()); onFoldersVisibleChecked(mntmFoldersVisible.getSelection()); } }); mntmFilesVisible.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { mntmFilesVisible_1.setSelection(mntmFilesVisible.getSelection()); onFilesVisibleChecked(mntmFilesVisible.getSelection()); } }); mntmCompressedFoldersVisible.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { mntmCompressedFoldersVisible_1.setSelection(mntmCompressedFoldersVisible.getSelection()); onCompressedFoldersVisibleChecked(mntmCompressedFoldersVisible.getSelection()); } }); mntmCompressedFilesVisible.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { mntmCompressedFilesVisible_1.setSelection(mntmCompressedFilesVisible.getSelection()); onCompressedFilesVisibleSelected(mntmCompressedFilesVisible.getSelection()); } }); compositeStatusBar = new Composite(shell, SWT.NONE); compositeStatusBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); compositeStatusBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND)); GridLayout gl_compositeStatusBar = new GridLayout(2, false); gl_compositeStatusBar.marginWidth = 0; gl_compositeStatusBar.marginHeight = 0; compositeStatusBar.setLayout(gl_compositeStatusBar); formToolkit.adapt(compositeStatusBar); formToolkit.paintBordersFor(compositeStatusBar); lblStatusBar = new Label(compositeStatusBar, SWT.NONE); lblStatusBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1)); lblStatusBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND)); formToolkit.adapt(lblStatusBar, true, true); lblStatusBar.setText(""); progressBar = new ProgressBar(compositeStatusBar, SWT.NONE); formToolkit.adapt(progressBar, true, true); m_bindingContext = initDataBindings(); } /* * event handlers */ protected void onCopyAsStringSelected() { ArrayList<String> s = new ArrayList<String>(); for (PathEntry p: getSelectedPathEntries()) { s.add(p.getPath()); } StringSelection ss = new StringSelection(String.join("\n", s)); Clipboard clip = Toolkit.getDefaultToolkit().getSystemClipboard(); clip.setContents(ss, ss); } protected void onOpenSelected(SelectionEvent e) { DBPathEntry entry = getSelectedPathEntry(); if (entry != null) { setLocationAndRefresh(entry); } } protected void onOpenInNewWindowSelected(SelectionEvent e) { DBPathEntry p = getSelectedPathEntry(); if (p == null) { p = location.get().getPathEntry(); } if (p != null) { new SwtFileFolderMenu().setLocationAndRefresh(p); } else if (location.get().getPathString() != null) { new SwtFileFolderMenu().setLocationAndRefresh(location.get().getPathString()); } else if (location.get().getSearchString() != null) { new SwtFileFolderMenu().setLocationAndRefresh(location.get().getSearchString()); } else if (location.get().getPathId() != 0L) { new SwtFileFolderMenu().setLocationAndRefresh(location.get().getPathId()); } } protected void onOpenDuplicateDetailsSelected(SelectionEvent e) { DBPathEntry p = getSelectedPathEntry(); if (p == null) { p = location.get().getPathEntry(); } if (p != null) { new SwtDuplicateMenu().setLocationAndRefresh(p); } else if (location.get().getPathString() != null) { new SwtDuplicateMenu().setLocationAndRefresh(location.get().getPathString()); } else if (location.get().getSearchString() != null) { new SwtDuplicateMenu().setLocationAndRefresh(location.get().getSearchString()); } else if (location.get().getPathId() != 0L) { new SwtDuplicateMenu().setLocationAndRefresh(location.get().getPathId()); } } protected void onNavigatePreviousSelected(SelectionEvent e) { location.navigatePrevious(); setLocationAndRefresh(location.get()); } protected void onNavigateNextSelected(SelectionEvent e) { location.navigateNext(); setLocationAndRefresh(location.get()); } protected void onUpperFolderSelected(SelectionEvent e) { DBPathEntry p = location.get().getPathEntry(); if (p != null && p.getParentId() != 0L) { setLocationAndRefresh(p.getParentId()); } else { writeStatusBar("Not ready for going up operation; be patient."); } } protected void onLocationModified(ModifyEvent arg0) { String newstring = txtLocation.getText(); Assertion.assertNullPointerException(newstring != null); writeStatusBar(String.format("New path string is: %s", newstring)); shell.setText(newstring); Location oldloc = location.get(); if (newstring.equals(oldloc.getPathString())) { // noop } else if (newstring.equals(oldloc.getSearchString())) { oldloc.setPathEntry(null); oldloc.setPathId(0L); oldloc.setPathString(null); } else { Location newloc = new Location(); newloc.setPathString(newstring); location.add(newloc); } refresh(); } protected void onTableSelected(SelectionEvent e) {} private String order = PreferenceRW.getSwtFileFolderMenuSortOrder(); private boolean isFolderChecked = true; private boolean isFileChecked = true; private boolean isCompressedFolderChecked = true; private boolean isCompressedFileChecked = true; protected void onTblclmnPathSelected(TableColumn tblclmnPath, SelectionEvent e) { if (table.getSortDirection() == SWT.UP) { order = "path"; } else { order = "path DESC"; } PreferenceRW.setSwtFileFolderMenuSortOrder(order); refresh(); } protected void onTblclmnDateLastModifiedSelected(TableColumn tblclmnDateLastModified, SelectionEvent e) { if (table.getSortDirection() == SWT.UP) { order = "datelastmodified"; } else { order = "datelastmodified DESC"; } PreferenceRW.setSwtFileFolderMenuSortOrder(order); refresh(); } protected void onTblclmnSizeSelected(TableColumn tblclmnSize, SelectionEvent e) { if (table.getSortDirection() == SWT.UP) { order = "size"; } else { order = "size DESC"; } PreferenceRW.setSwtFileFolderMenuSortOrder(order); refresh(); } protected void onTblclmnCompressedsizeSelected(TableColumn tblclmnCompressedesize, SelectionEvent e) { if (table.getSortDirection() == SWT.UP) { order = "compressedsize"; } else { order = "compressedsize DESC"; } PreferenceRW.setSwtFileFolderMenuSortOrder(order); refresh(); } protected void onTblclmnDuplicateSelected(TableColumn tblclmnDuplicate, SelectionEvent e) { if (table.getSortDirection() == SWT.UP) { order = "duplicate"; } else { order = "duplicate DESC"; } PreferenceRW.setSwtFileFolderMenuSortOrder(order); refresh(); } protected void onTblclmnDedupablesizeSelected(TableColumn tblclmnDedupablesize, SelectionEvent e) { if (table.getSortDirection() == SWT.UP) { order = "dedupablesize"; } else { order = "dedupablesize DESC"; } PreferenceRW.setSwtFileFolderMenuSortOrder(order); refresh(); } protected void onFoldersVisibleChecked(boolean checked) { isFolderChecked = checked; refresh(); } protected void onFilesVisibleChecked(boolean checked) { isFileChecked = checked; refresh(); } protected void onCompressedFoldersVisibleChecked(boolean checked) { isCompressedFolderChecked = checked; refresh(); } protected void onCompressedFilesVisibleSelected(boolean checked) { isCompressedFileChecked = checked; refresh(); } public void setLocationAndRefresh(final String text) { Display.getDefault().asyncExec(new Runnable() { public void run() { txtLocation.setText(text); // onLocationModified() is automatically called here. } }); } /* * setLocationAndRefresh and related */ public void setLocationAndRefresh(final DBPathEntry entry) { Assertion.assertNullPointerException(entry != null); Assertion.assertNullPointerException(location != null); Location oldloc = location.get(); if (oldloc.getPathEntry() != null && oldloc.getPathEntry().getPathId() == entry.getPathId()) { // noop } else if (oldloc.getPathString() != null && oldloc.getPathString().equals(entry.getPath())) { oldloc.setPathEntry(entry); oldloc.setPathId(entry.getPathId()); } else { Location newloc = new Location(); newloc.setPathEntry(entry); newloc.setPathId(entry.getPathId()); newloc.setPathString(entry.getPath()); location.add(newloc); } setLocationAndRefresh(entry.getPath()); } public void setLocationAndRefresh(long id) { writeStatusBar(String.format("Starting query; new ID is: %d", id)); Location oldloc = location.get(); if (oldloc.getPathId() == id) { // null } else { Location newloc = new Location(); newloc.setPathId(id); location.add(newloc); } refresh(new LazyRunnable() { @Override public void run() throws SQLException, InterruptedException { Debug.writelog("-- SwtFileFolderMenu SetLocationAndRefresh LOCAL PATTERN (id based) --"); Location loc = location.get(); DBPathEntry p = getDB().getDBPathEntryByPathId(loc.getPathId()); if (p != null) { loc.setPathEntry(p); loc.setPathString(p.getPath()); loc.setSearchString(null); setLocationAndRefresh(loc.getPathString()); } } }); } public void setLocationAndRefresh(final Location loc) { if (loc.getPathString() != null) { setLocationAndRefresh(loc.getPathString()); } else if (loc.getPathEntry() != null) { setLocationAndRefresh(loc.getPathEntry().getPath()); } else if (loc.getSearchString() != null) { setLocationAndRefresh(loc.getSearchString()); } else { setLocationAndRefresh(""); } } /* * normal refresh */ private Scenario scenario = new Scenario(); protected synchronized void refresh() { refresh(scenario); } class Scenario extends SwtCommonFileFolderMenu.Scenario { @Override public void run() throws SQLException, InterruptedException { writeProgress(10); Location loc = location.get(); if (loc.getPathEntry() == null && loc.getSearchString() == null && (loc.getPathEntry() != null || loc.getPathId() != 0L || (loc.getPathString() != null && !"".equals(loc.getPathString())))) { writeProgress(50); if (loc.getPathString() != null) { DBPathEntry p = getDB().getDBPathEntryByPath(loc.getPathString()); if (p != null) { loc.setPathEntry(p); loc.setPathId(p.getPathId()); Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 1 (path based entry detection) --"); } else { loc.setSearchString(loc.getPathString()); loc.setPathString(null); loc.setPathId(0L); loc.setPathEntry(null); Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 2 (searchstring=" + loc.getSearchString() + ") --"); } } else if (loc.getPathId() != 0L) { Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 3 (id based) --"); DBPathEntry p = getDB().getDBPathEntryByPathId(loc.getPathId()); assert(p != null); setLocationAndRefresh(p); return; } else { Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 4 (show all paths) --"); } } try { threadWait(); cleanupTable(); ArrayList<String> typelist = new ArrayList<String> (); if (isFolderChecked) { typelist.add("type=0"); } if (isFileChecked) { typelist.add("type=1"); } if (isCompressedFolderChecked) { typelist.add("type=2"); } if (isCompressedFileChecked) { typelist.add("type=3"); } String typeWhere = typelist.size() == 0 ? "" : String.join(" OR ", typelist); threadWait(); writeStatusBar("Querying..."); writeProgress(70); String searchSubSQL; ArrayList<String> searchStringElement = new ArrayList<String> (); if (loc.getSearchString() == null || "".equals(loc.getSearchString())) { searchSubSQL = ""; } else { ArrayList<String> p = new ArrayList<String> (); for (String s: loc.getSearchString().split(" ")) { if (! "".equals(s)) { p.add("path LIKE ?"); searchStringElement.add(s); } } searchSubSQL = " AND (" + String.join(" AND ", p) + ")"; } threadWait(); DBPathEntry locationPathEntry = null; PreparedStatement ps; if (loc.getPathString() == null || "".equals(loc.getPathString())) { String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL + " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))" + " ORDER BY " + order; Debug.writelog(sql); ps = getDB().prepareStatement(sql); int c = 1; for (String s: searchStringElement) { ps.setString(c, "%" + s + "%"); Debug.writelog(c + " %" + s + "%"); c++; } } else if ((locationPathEntry = loc.getPathEntry()) != null) { String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL + " AND (pathid=? OR EXISTS (SELECT * FROM upperlower WHERE upper=? AND lower=pathid))" + " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))" + " ORDER BY " + order; Debug.writelog(sql); ps = getDB().prepareStatement(sql); int c = 1; for (String s: searchStringElement) { ps.setString(c, "%" + s + "%"); Debug.writelog(c + " %" + s + "%"); c++; } ps.setLong(c++, locationPathEntry.getPathId()); ps.setLong(c++, locationPathEntry.getPathId()); Debug.writelog(locationPathEntry.getPath()); } else { String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL + " AND path LIKE ?" + " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))" + " ORDER BY " + order; Debug.writelog(sql); ps = getDB().prepareStatement(sql); int c = 1; for (String s: searchStringElement) { ps.setString(c, "%" + s + "%"); Debug.writelog(c + " %" + s + "%"); c++; } ps.setString(c++, loc.getPathString() + "%"); Debug.writelog(loc.getPathString()); } try { LazyUpdater.Dispatcher disp = getDB().getDispatcher(); disp.setList(Dispatcher.NONE); disp.setCsum(Dispatcher.NONE); ResultSet rs = ps.executeQuery(); try { threadWait(); Debug.writelog("QUERY FINISHED"); writeStatusBar("Listing..."); writeProgress(90); int count = 0; while (rs.next()) { threadWait(); DBPathEntry p1 = getDB().rsToPathEntry(rs); Assertion.assertAssertionError(p1 != null); Assertion.assertAssertionError(p1.getPath() != null); if (locationPathEntry != null) { Assertion.assertAssertionError(locationPathEntry.getPath() != null); Assertion.assertAssertionError(p1.getPath().startsWith(locationPathEntry.getPath()), p1.getPath() + " does not start with " + locationPathEntry.getPath() ); } PathEntry p2; try { p2 = disp.dispatch(p1); } catch (IOException e) { p2 = null; } if (p2 == null) { addRow(p1, rs.getInt("duplicate"), rs.getLong("dedupablesize"), true); getDB().unsetClean(p1.getParentId()); } else { Assertion.assertAssertionError(p1.getPath().equals(p2.getPath()), "!! " + p1.getPath() + " != " + p2.getPath()); if (!PathEntry.dscMatch(p1, p2)) { p1.setDateLastModified(p2.getDateLastModified()); p1.setSize(p2.getSize()); p1.setCompressedSize(p2.getCompressedSize()); p1.clearCsum(); getDB().unsetClean(p1.getParentId()); } addRow(p1, rs.getInt("duplicate"), rs.getLong("dedupablesize"), false); } count ++; } writeStatusBar(String.format("%d items", count)); } finally { rs.close(); } } finally { ps.close(); } writeProgress(0); } catch (WindowDisposedException e) {} } protected void cleanupTable() throws WindowDisposedException { if (table.isDisposed()) { throw new WindowDisposedException("!! Window disposed at cleanupTable"); } Display.getDefault().asyncExec(new Runnable() { public void run() { pathentrylist.clear(); table.removeAll();; } }); } protected void addRow(final DBPathEntry entry, final int duplicate, final long dedupablesize, final boolean grayout) throws WindowDisposedException { if (table.isDisposed()) { throw new WindowDisposedException("!! Window disposed at addRow"); } Display.getDefault().asyncExec(new Runnable() { public void run() { pathentrylist.add(entry); final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); final NumberFormat numf = NumberFormat.getNumberInstance(); Date d = new Date(entry.getDateLastModified()); String[] row = { entry.getPath(), sdf.format(d), numf.format(entry.getSize()), numf.format(entry.getCompressedSize()), (duplicate > 0 ? numf.format(duplicate) : null), (dedupablesize > 0 ? numf.format(dedupablesize) : null), }; final Display display = Display.getDefault(); final Color blue = new Color(display, 0, 0, 255); final Color red = new Color(display, 255, 0, 0); final Color black = new Color(display, 0, 0, 0); final Color gray = new Color(display, 127, 127, 127); try { TableItem tableItem = new TableItem(table, SWT.NONE); tableItem.setText(row); if (grayout) { tableItem.setForeground(gray); } else if (entry.isNoAccess()) { tableItem.setForeground(red); } else if (entry.isFile() && entry.getSize() != entry.getCompressedSize()) { tableItem.setForeground(blue); } else { tableItem.setForeground(black); } } catch (Exception e) { if (!table.isDisposed()) { e.printStackTrace(); } } } }); } } protected DataBindingContext initDataBindings() { DataBindingContext bindingContext = new DataBindingContext(); // IObservableValue observeBackgroundCompositeObserveWidget = WidgetProperties.background().observe(compositeToolBar); IObservableValue backgroundShellObserveValue = PojoProperties.value("background").observe(shell); bindingContext.bindValue(observeBackgroundCompositeObserveWidget, backgroundShellObserveValue, null, null); // IObservableValue observeBackgroundLblStatusBarObserveWidget = WidgetProperties.background().observe(lblStatusBar); bindingContext.bindValue(observeBackgroundLblStatusBarObserveWidget, backgroundShellObserveValue, null, null); // IObservableValue observeBackgroundCompositeStatusBarObserveWidget = WidgetProperties.background().observe(compositeStatusBar); bindingContext.bindValue(observeBackgroundCompositeStatusBarObserveWidget, backgroundShellObserveValue, null, null); // return bindingContext; } }
Java
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.medialive.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * Settings for the action to deactivate the image in a specific layer. * * @see <a * href="http://docs.aws.amazon.com/goto/WebAPI/medialive-2017-10-14/StaticImageDeactivateScheduleActionSettings" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class StaticImageDeactivateScheduleActionSettings implements Serializable, Cloneable, StructuredPojo { /** The time in milliseconds for the image to fade out. Default is 0 (no fade-out). */ private Integer fadeOut; /** The image overlay layer to deactivate, 0 to 7. Default is 0. */ private Integer layer; /** * The time in milliseconds for the image to fade out. Default is 0 (no fade-out). * * @param fadeOut * The time in milliseconds for the image to fade out. Default is 0 (no fade-out). */ public void setFadeOut(Integer fadeOut) { this.fadeOut = fadeOut; } /** * The time in milliseconds for the image to fade out. Default is 0 (no fade-out). * * @return The time in milliseconds for the image to fade out. Default is 0 (no fade-out). */ public Integer getFadeOut() { return this.fadeOut; } /** * The time in milliseconds for the image to fade out. Default is 0 (no fade-out). * * @param fadeOut * The time in milliseconds for the image to fade out. Default is 0 (no fade-out). * @return Returns a reference to this object so that method calls can be chained together. */ public StaticImageDeactivateScheduleActionSettings withFadeOut(Integer fadeOut) { setFadeOut(fadeOut); return this; } /** * The image overlay layer to deactivate, 0 to 7. Default is 0. * * @param layer * The image overlay layer to deactivate, 0 to 7. Default is 0. */ public void setLayer(Integer layer) { this.layer = layer; } /** * The image overlay layer to deactivate, 0 to 7. Default is 0. * * @return The image overlay layer to deactivate, 0 to 7. Default is 0. */ public Integer getLayer() { return this.layer; } /** * The image overlay layer to deactivate, 0 to 7. Default is 0. * * @param layer * The image overlay layer to deactivate, 0 to 7. Default is 0. * @return Returns a reference to this object so that method calls can be chained together. */ public StaticImageDeactivateScheduleActionSettings withLayer(Integer layer) { setLayer(layer); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getFadeOut() != null) sb.append("FadeOut: ").append(getFadeOut()).append(","); if (getLayer() != null) sb.append("Layer: ").append(getLayer()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof StaticImageDeactivateScheduleActionSettings == false) return false; StaticImageDeactivateScheduleActionSettings other = (StaticImageDeactivateScheduleActionSettings) obj; if (other.getFadeOut() == null ^ this.getFadeOut() == null) return false; if (other.getFadeOut() != null && other.getFadeOut().equals(this.getFadeOut()) == false) return false; if (other.getLayer() == null ^ this.getLayer() == null) return false; if (other.getLayer() != null && other.getLayer().equals(this.getLayer()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getFadeOut() == null) ? 0 : getFadeOut().hashCode()); hashCode = prime * hashCode + ((getLayer() == null) ? 0 : getLayer().hashCode()); return hashCode; } @Override public StaticImageDeactivateScheduleActionSettings clone() { try { return (StaticImageDeactivateScheduleActionSettings) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.medialive.model.transform.StaticImageDeactivateScheduleActionSettingsMarshaller.getInstance().marshall(this, protocolMarshaller); } }
Java
# Arkezostis tayuya Kuntze SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
/** * Copyright (C) 2010-2013 Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.rocketmq.broker.client; import io.netty.channel.Channel; import java.util.List; /** * @author shijia.wxr<vintage.wang@gmail.com> * @since 2013-6-24 */ public interface ConsumerIdsChangeListener { public void consumerIdsChanged(final String group, final List<Channel> channels); }
Java
# Lampocarya affinis Brongn. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
# Myriocladus maguirei Swallen SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
# Solanum apiculatibaccatum Bitter SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
# Gnaphalium angustifolium Lam. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in Encycl. 2:746. 1788 #### Original name null ### Remarks null
Java
# Entylomella microstigma (Sacc.) Cif., 1959 SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in Lejeunia Mém. 177 (1959) #### Original name null ### Remarks null
Java
# Acacia pennatula (Schltdl. & Cham.) Benth. SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in London J. Bot. 1:390. 1842 #### Original name null ### Remarks null
Java
# Niptera dilutella (Fr.) Rehm SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
// copyright (c) 2017 vmware, inc. all rights reserved. // // licensed under the apache license, version 2.0 (the "license"); // you may not use this file except in compliance with the license. // you may obtain a copy of the license at // // http://www.apache.org/licenses/license-2.0 // // unless required by applicable law or agreed to in writing, software // distributed under the license is distributed on an "as is" basis, // without warranties or conditions of any kind, either express or implied. // see the license for the specific language governing permissions and // limitations under the license. package dao import ( "github.com/astaxie/beego/orm" "github.com/vmware/harbor/src/common/models" "fmt" "time" ) // AddScanJob ... func AddScanJob(job models.ScanJob) (int64, error) { o := GetOrmer() if len(job.Status) == 0 { job.Status = models.JobPending } return o.Insert(&job) } // GetScanJob ... func GetScanJob(id int64) (*models.ScanJob, error) { o := GetOrmer() j := models.ScanJob{ID: id} err := o.Read(&j) if err == orm.ErrNoRows { return nil, nil } return &j, nil } // GetScanJobsByImage returns a list of scan jobs with given repository and tag func GetScanJobsByImage(repository, tag string, limit ...int) ([]*models.ScanJob, error) { var res []*models.ScanJob _, err := scanJobQs(limit...).Filter("repository", repository).Filter("tag", tag).OrderBy("-id").All(&res) return res, err } // GetScanJobsByDigest returns a list of scan jobs with given digest func GetScanJobsByDigest(digest string, limit ...int) ([]*models.ScanJob, error) { var res []*models.ScanJob _, err := scanJobQs(limit...).Filter("digest", digest).OrderBy("-id").All(&res) return res, err } // UpdateScanJobStatus updates the status of a scan job. func UpdateScanJobStatus(id int64, status string) error { o := GetOrmer() sj := models.ScanJob{ ID: id, Status: status, UpdateTime: time.Now(), } n, err := o.Update(&sj, "Status", "UpdateTime") if n == 0 { return fmt.Errorf("Failed to update scan job with id: %d, error: %v", id, err) } return err } func scanJobQs(limit ...int) orm.QuerySeter { o := GetOrmer() l := -1 if len(limit) == 1 { l = limit[0] } return o.QueryTable(models.ScanJobTable).Limit(l) }
Java
package com.bjorktech.cayman.idea.designpattern.structure.proxy; public class TargetClass implements TargetInterface { @Override public long add(long a, long b) { long temp = a + b; System.out.println(temp); return temp; } @Override public long sub(long a, long b) { long temp = a - b; System.out.println(temp); return temp; } }
Java
package com.badlogic.gdx.ingenuity.scene2d; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.ingenuity.GdxData; import com.badlogic.gdx.ingenuity.helper.PixmapHelper; import com.badlogic.gdx.ingenuity.utils.GdxUtilities; import com.badlogic.gdx.scenes.scene2d.Group; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.scenes.scene2d.actions.Actions; import com.badlogic.gdx.scenes.scene2d.ui.Image; import com.badlogic.gdx.utils.Align; import com.badlogic.gdx.utils.Disposable; /** * @作者 Mitkey * @时间 2017年3月24日 下午3:09:56 * @类说明: * @版本 xx */ public class Loading implements Disposable { private Group root = new Group(); private Image imgOut; private Image imgInner; public Loading() { root.setSize(GdxData.WIDTH, GdxData.HEIGHT); Image imgBg = new Image(PixmapHelper.getInstance().newTranslucentDrawable(5, 5)); imgBg.setFillParent(true); root.addActor(imgBg); imgOut = new Image(PixmapHelper.getInstance().newRectangleDrawable(Color.YELLOW, 40, 40)); imgOut.setOrigin(Align.center); imgInner = new Image(PixmapHelper.getInstance().newCircleDrawable(Color.RED, 18)); imgInner.setOrigin(Align.center); GdxUtilities.center(imgOut); GdxUtilities.center(imgInner); root.addActor(imgOut); root.addActor(imgInner); } public void show(Stage stage) { stage.addActor(root); root.toFront(); imgOut.clearActions(); imgOut.addAction(Actions.forever(Actions.rotateBy(-360, 1f))); imgInner.clearActions(); imgInner.addAction(Actions.forever(Actions.rotateBy(360, 2f))); } public void hide() { root.remove(); } @Override public void dispose() { hide(); } }
Java
<?php /** * amadeus-ws-client * * Copyright 2020 Amadeus Benelux NV * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @package Amadeus * @license https://opensource.org/licenses/Apache-2.0 Apache 2.0 */ namespace Amadeus\Client\RequestCreator\Converter\Fare; use Amadeus\Client\RequestCreator\Converter\BaseConverter; use Amadeus\Client\RequestOptions\FarePriceUpsellWithoutPnrOptions; use Amadeus\Client\Struct; /** * Fare_PriceUpsellWithoutPNR Request converter * * @package Amadeus\Client\RequestCreator\Converter\Fare * @author Valerii Nezhurov <valeriy.nezhuriov@gmail.com> */ class PriceUpsellWithoutPNRConv extends BaseConverter { /** * @param FarePriceUpsellWithoutPnrOptions $requestOptions * @param int|string $version * @return Struct\Fare\PriceUpsellWithoutPNR */ public function convert($requestOptions, $version) { return new Struct\Fare\PriceUpsellWithoutPNR($requestOptions); } }
Java
package com.concavenp.nanodegree.shared; import org.junit.Test; import static org.junit.Assert.*; /** * To work on unit tests, switch the Test Artifact in the Build Variants view. */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
Java
<!DOCTYPE HTML> <html> <head> <title>{#$site_title#}</title> <meta charset="utf-8"> <meta name="Keywords" content="{#$site_keywords#}" /> <meta name="Description" content="{#$site_description#}" /> <link href="{#$site_root#}themes/default/skin/nav.css" rel="stylesheet" type="text/css" /> <script type="text/javascript">var sitepath = '{#$site_root#}'; var rewrite = '{#$cfg.link_struct#}';</script> <script type="text/javascript" src="{#$site_root#}public/scripts/jquery.min.js"></script> <script type="text/javascript" src="{#$site_root#}public/scripts/common.js"></script> </head> <body> {#include file="topbar.html"#} <div id="wrapper"> <div id="header"> <div id="topbox"> <a href="{#$site_url#}" class="logo" title="{#$site_title#}"></a> <div id="sobox"> <form name="sofrm" class="sofrm" method="get" action="" onSubmit="return rewrite_search()"> <input name="mod" type="hidden" id="mod" value="search" /> <input name="type" type="hidden" id="type" value="name" /> <div id="selopt"> <div id="cursel">网站名称</div> <ul id="options"> <li><a href="javascript: void(0);" name="name">网站名称</a></li> <li><a href="javascript: void(0);" name="url">网站地址</a></li> <li><a href="javascript: void(0);" name="tags">TAG标签</a></li> <li><a href="javascript: void(0);" name="intro">网站描述</a></li> </ul> </div> <input name="query" type="text" class="sipt" id="query" onFocus="this.value='';" /><input type="submit" class="sbtn" value="搜 索" /> </form> </div> </div> <div id="navbox"> <ul class="navbar"> <li><a href="?mod=index">网站首页</a></li><li class="navline"></li> <li><a href="?mod=webdir">网站目录</a></li><li class="navline"></li> <li><a href="?mod=article">站长资讯</a></li><li class="navline"></li> <li><a href="?mod=weblink">链接交换</a></li><li class="navline"></li> <li><a href="?mod=category">分类浏览</a></li><li class="navline"></li> <li><a href="?mod=update">最新收录</a></li><li class="navline"></li> <li><a href="?mod=archives">数据归档</a></li><li class="navline"></li> <li><a href="?mod=top">TOP排行榜</a></li><li class="navline"></li> <li><a href="?mod=feedback">意见反馈</a></li><li class="navline"></li> </ul> </div> <div id="txtbox"> <div class="count">数据统计:<b>{#$stat.category#}</b>个主题分类,<b>{#$stat.website#}</b>个优秀站点,<b>{#$stat.article#}</b>篇站长资讯</div> <div class="link">快捷方式:<a href="{#$site_root#}member/?mod=website&act=add">网站提交</a> - <a href="{#$site_root#}member/?mod=article&act=add">软文投稿</a> - <a href="{#$site_root#}?mod=diypage&pid=1">帮助中心</a></div> </div> </div> <div class="blank10"></div> <div id="quickbox"><strong>快速审核:</strong>{#foreach from=get_websites(0, 12, true) item=quick#}<a href="{#$quick.web_link#}" title="{#$quick.web_name#}">{#$quick.web_name#}</a>{#/foreach#}</div> <div class="blank10"></div> <div id="homebox"> <div id="homebox-left"> <dl id="hcatebox" class="clearfix"> {#foreach from=get_categories() item=cate#} {#if $cate.cate_mod == 'webdir'#} <dt><a href="{#$cate.cate_link#}">{#$cate.cate_name#}</a></dt> <dd> <ul class="hcatelist"> {#foreach from=get_categories($cate.cate_id) item=scate#} <li><a href="{#$scate.cate_link#}">{#$scate.cate_name#}</a></li> {#/foreach#} </ul> </dd> {#/if#} {#/foreach#} </dl> <div class="blank10"></div> <div id="newbox"> <h3>最新收录</h3> <ul class="newlist"> {#foreach from=get_websites(0, 14) item=new#} <li><span>{#$new.web_ctime#}</span><a href="{#$new.web_link#}" title="{#$new.web_name#}">{#$new.web_name#}</a></li> {#/foreach#} </ul> </div> </div> <div id="homebox-right"> <div id="bestbox"> <h3><span>站长推荐</span></h3> <ul class="clearfix bestlist"> {#foreach from=get_websites(0, 35, false, true) item=best#} <li><a href="{#$best.web_link#}" title="{#$best.web_name#}">{#$best.web_name#}</a></li> {#/foreach#} </ul> </div> <div class="blank10"></div> <div id="coolbox" class="clearfix"> <h3>酷站导航</h3> <ul class="csitelist"> {#foreach from=get_best_categories() item=cate name=csite#} <li><h4><a href="{#$cate.cate_link#}">{#$cate.cate_name#}</a></h4><a href="{#$cate.cate_link#}" class="more">更多>></a>{#foreach from=get_websites($cate.cate_id, 5) item=cool#}<span><a href="{#$cool.web_link#}" title="{#$cool.web_name#}">{#$cool.web_name#}</a></span>{#/foreach#}</li> {#if $smarty.foreach.csite.iteration % 5 == 0 && $smarty.foreach.csite.iteration != 20#} <li class="sline"></li> {#/if#} {#/foreach#} </ul> </div> <div class="blank10"></div> <div id="rowbox" class="clearfix"> <div id="newsbox"> <h3>站点资讯</h3> <ul class="newslist"> {#foreach from=get_articles(0, 8, false) item=art#} <li><span>{#$art.art_ctime#}</span><a href="{#$art.art_link#}">{#$art.art_title#}</a></li> {#/foreach#} </ul> </div> <div class="line"></div> <div id="exlink"> <h3>链接交换</h3> <ul class="exlist"> {#foreach from=get_weblinks(0, 8) item=link#} <li><a href="{#$link.web_link#}">{#$link.link_name#} - PR{#$link.web_grank#},百度权重{#$link.web_brank#},{#$link.deal_type#}友情链接</a></li> {#/foreach#} </ul> </div> </div> </div> </div> <div class="blank10"></div> <div id="inbox" class="clearfix"> <h3>最新点入</h3> <ul class="inlist"> {#nocache#} {#foreach from=get_websites(0, 30, false, false, 'instat') item=instat#} <li><a href="{#$instat.web_link#}" title="{#$instat.web_name#}">{#$instat.web_name#}</a></li> {#/foreach#} {#/nocache#} </ul> </div> <div class="blank10"></div> <div id="linkbox" class="clearfix"> <h3>友情链接</h3> <ul class="linklist"> {#foreach from=get_links() item=link#} <li><a href="{#$link.link_url#}" target="_blank">{#$link.link_name#}</a></li> {#/foreach#} </ul> </div> {#include file="footer.html"#} </div> </body> </html>
Java
/* * Copyright (c) 2013-2015 Josef Hardi <josef.hardi@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.obidea.semantika.datatype; import com.obidea.semantika.datatype.exception.InvalidLexicalFormException; import com.obidea.semantika.datatype.primitive.XsdDecimal; public abstract class AbstractDerivedDecimalType extends AbstractXmlType<Number> { protected AbstractDerivedDecimalType(String name) { super(name); } @Override public IDatatype<?> getPrimitiveDatatype() { return XsdDecimal.getInstance(); } @Override public Number getValue(String lexicalForm) { return parseLexicalForm(lexicalForm); } @Override public boolean isPrimitive() { return false; } /** * Parse and validate a lexical form of the literal. * * @param lexicalForm * the typing form of the literal. * @return A <code>Number</code> representation of the literal * @throws InvalidLexicalFormException * if the literal form is invalid or the value is out of range */ protected abstract Number parseLexicalForm(String lexicalForm) throws InvalidLexicalFormException; }
Java
using System; namespace EtoTest.Model { public class DataFileVersion { /// <summary> /// The name of this station - writen to name of conlict files when resyncing. /// </summary> public String StationName { get; set; } /// <summary> /// If this ID is different from the current on google drive then updates have occured on google /// </summary> public int FromVersionId { get; set; } /// <summary> /// If we have made some of our own updates when offline, then this value is incremented from 0. /// </summary> public int? CurrentVersionId { get; set; } /// <summary> /// A description of the operation that we are about to attempt. If this value is set; then we crashed before the last operation completed. /// Set to null immediately after completing an operation. /// </summary> public String BeforeOperation { get; set; } } }
Java
/* Copyright (C) 2013-2020 Expedia Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.hotels.styx.support.matchers; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; import java.util.Objects; import java.util.Optional; /** * Provides matchers around the {@code Optional} class. * * @param <T> * @author john.butler * @see Optional */ public final class IsOptional<T> extends TypeSafeMatcher<Optional<? extends T>> { /** * Checks that the passed Optional is not present. */ public static IsOptional<Object> isAbsent() { return new IsOptional<>(false); } /** * Checks that the passed Optional is present. */ public static IsOptional<Object> isPresent() { return new IsOptional<>(true); } public static <T> IsOptional<T> isValue(T value) { return new IsOptional<>(value); } public static <T> IsOptional<T> matches(Matcher<T> matcher) { return new IsOptional<>(matcher); } public static <T extends Iterable> IsOptional<T> isIterable(Matcher<? extends Iterable> matcher) { return new IsOptional<>((Matcher) matcher); } private final boolean someExpected; private final Optional<T> expected; private final Optional<Matcher<T>> matcher; private IsOptional(boolean someExpected) { this.someExpected = someExpected; this.expected = Optional.empty(); this.matcher = Optional.empty(); } private IsOptional(T value) { this.someExpected = true; this.expected = Optional.of(value); this.matcher = Optional.empty(); } private IsOptional(Matcher<T> matcher) { this.someExpected = true; this.expected = Optional.empty(); this.matcher = Optional.of(matcher); } @Override public void describeTo(Description description) { if (!someExpected) { description.appendText("<Absent>"); } else if (expected.isPresent()) { description.appendValue(expected); } else if (matcher.isPresent()) { description.appendText("a present value matching "); matcher.get().describeTo(description); } else { description.appendText("<Present>"); } } @Override public boolean matchesSafely(Optional<? extends T> item) { if (!someExpected) { return !item.isPresent(); } else if (expected.isPresent()) { return item.isPresent() && Objects.equals(item.get(), expected.get()); } else if (matcher.isPresent()) { return item.isPresent() && matcher.get().matches(item.get()); } else { return item.isPresent(); } } }
Java
-- Start of IDN Tables -- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_BASE_TABLE]') AND TYPE IN (N'U')) CREATE TABLE IDN_BASE_TABLE ( PRODUCT_NAME VARCHAR(20), PRIMARY KEY (PRODUCT_NAME) ); INSERT INTO IDN_BASE_TABLE values ('WSO2 Identity Server'); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH_CONSUMER_APPS]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH_CONSUMER_APPS ( ID INTEGER IDENTITY, CONSUMER_KEY VARCHAR(255), CONSUMER_SECRET VARCHAR(2048), USERNAME VARCHAR(255), TENANT_ID INTEGER DEFAULT 0, USER_DOMAIN VARCHAR(50), APP_NAME VARCHAR(255), OAUTH_VERSION VARCHAR(128), CALLBACK_URL VARCHAR(2048), GRANT_TYPES VARCHAR(1024), PKCE_MANDATORY CHAR(1) DEFAULT '0', PKCE_SUPPORT_PLAIN CHAR(1) DEFAULT '0', APP_STATE VARCHAR (25) DEFAULT 'ACTIVE', USER_ACCESS_TOKEN_EXPIRE_TIME BIGINT DEFAULT 3600, APP_ACCESS_TOKEN_EXPIRE_TIME BIGINT DEFAULT 3600, REFRESH_TOKEN_EXPIRE_TIME BIGINT DEFAULT 84600, ID_TOKEN_EXPIRE_TIME BIGINT DEFAULT 3600, CONSTRAINT CONSUMER_KEY_CONSTRAINT UNIQUE (CONSUMER_KEY), PRIMARY KEY (ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_SCOPE_VALIDATORS]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_SCOPE_VALIDATORS ( APP_ID INTEGER NOT NULL, SCOPE_VALIDATOR VARCHAR (128) NOT NULL, PRIMARY KEY (APP_ID,SCOPE_VALIDATOR), FOREIGN KEY (APP_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH1A_REQUEST_TOKEN]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH1A_REQUEST_TOKEN ( REQUEST_TOKEN VARCHAR(512), REQUEST_TOKEN_SECRET VARCHAR(512), CONSUMER_KEY_ID INTEGER, CALLBACK_URL VARCHAR(2048), SCOPE VARCHAR(2048), AUTHORIZED VARCHAR(128), OAUTH_VERIFIER VARCHAR(512), AUTHZ_USER VARCHAR(512), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (REQUEST_TOKEN), FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH1A_ACCESS_TOKEN]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH1A_ACCESS_TOKEN ( ACCESS_TOKEN VARCHAR(512), ACCESS_TOKEN_SECRET VARCHAR(512), CONSUMER_KEY_ID INTEGER, SCOPE VARCHAR(2048), AUTHZ_USER VARCHAR(512), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (ACCESS_TOKEN), FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_ACCESS_TOKEN]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_ACCESS_TOKEN ( TOKEN_ID VARCHAR (255), ACCESS_TOKEN VARCHAR(2048), REFRESH_TOKEN VARCHAR(2048), CONSUMER_KEY_ID INTEGER, AUTHZ_USER VARCHAR (100), TENANT_ID INTEGER, USER_DOMAIN VARCHAR(50), USER_TYPE VARCHAR (25), GRANT_TYPE VARCHAR (50), TIME_CREATED DATETIME, REFRESH_TOKEN_TIME_CREATED DATETIME, VALIDITY_PERIOD BIGINT, REFRESH_TOKEN_VALIDITY_PERIOD BIGINT, TOKEN_SCOPE_HASH VARCHAR(32), TOKEN_STATE VARCHAR(25) DEFAULT 'ACTIVE', TOKEN_STATE_ID VARCHAR (128) DEFAULT 'NONE', SUBJECT_IDENTIFIER VARCHAR(255), ACCESS_TOKEN_HASH VARCHAR(512), REFRESH_TOKEN_HASH VARCHAR(512), IDP_ID INTEGER DEFAULT -1 NOT NULL, TOKEN_BINDING_REF VARCHAR (32) DEFAULT 'NONE', PRIMARY KEY (TOKEN_ID), FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE, CONSTRAINT CON_APP_KEY UNIQUE (CONSUMER_KEY_ID,AUTHZ_USER,TENANT_ID,USER_DOMAIN,USER_TYPE,TOKEN_SCOPE_HASH, TOKEN_STATE,TOKEN_STATE_ID,IDP_ID,TOKEN_BINDING_REF) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_TOKEN_BINDING]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_TOKEN_BINDING ( TOKEN_ID VARCHAR (255), TOKEN_BINDING_TYPE VARCHAR (32), TOKEN_BINDING_REF VARCHAR (32), TOKEN_BINDING_VALUE VARCHAR (1024), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (TOKEN_ID), FOREIGN KEY (TOKEN_ID) REFERENCES IDN_OAUTH2_ACCESS_TOKEN(TOKEN_ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_ACCESS_TOKEN_AUDIT]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_ACCESS_TOKEN_AUDIT ( TOKEN_ID VARCHAR (255), ACCESS_TOKEN VARCHAR(2048), REFRESH_TOKEN VARCHAR(2048), CONSUMER_KEY_ID INTEGER, AUTHZ_USER VARCHAR (100), TENANT_ID INTEGER, USER_DOMAIN VARCHAR(50), USER_TYPE VARCHAR (25), GRANT_TYPE VARCHAR (50), TIME_CREATED DATETIME, REFRESH_TOKEN_TIME_CREATED DATETIME, VALIDITY_PERIOD BIGINT, REFRESH_TOKEN_VALIDITY_PERIOD BIGINT, TOKEN_SCOPE_HASH VARCHAR(32), TOKEN_STATE VARCHAR(25), TOKEN_STATE_ID VARCHAR (128) , SUBJECT_IDENTIFIER VARCHAR(255), ACCESS_TOKEN_HASH VARCHAR(512), REFRESH_TOKEN_HASH VARCHAR(512), INVALIDATED_TIME DATETIME, IDP_ID INTEGER DEFAULT -1 NOT NULL ); IF EXISTS (SELECT NAME FROM SYSINDEXES WHERE NAME = 'IDX_AT_CK_AU') DROP INDEX IDN_OAUTH2_ACCESS_TOKEN.IDX_AT_CK_AU IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_AUTHORIZATION_CODE]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_AUTHORIZATION_CODE ( CODE_ID VARCHAR (255), AUTHORIZATION_CODE VARCHAR(2048), CONSUMER_KEY_ID INTEGER, CALLBACK_URL VARCHAR(2048), SCOPE VARCHAR(2048), AUTHZ_USER VARCHAR (100), TENANT_ID INTEGER, USER_DOMAIN VARCHAR(50), TIME_CREATED DATETIME, VALIDITY_PERIOD BIGINT, STATE VARCHAR (25) DEFAULT 'ACTIVE', TOKEN_ID VARCHAR(255), SUBJECT_IDENTIFIER VARCHAR(255), PKCE_CODE_CHALLENGE VARCHAR (255), PKCE_CODE_CHALLENGE_METHOD VARCHAR(128), AUTHORIZATION_CODE_HASH VARCHAR(512), IDP_ID INTEGER DEFAULT -1 NOT NULL, PRIMARY KEY (CODE_ID), FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_AUTHZ_CODE_SCOPE]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_AUTHZ_CODE_SCOPE ( CODE_ID VARCHAR (255), SCOPE VARCHAR (60), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (CODE_ID, SCOPE), FOREIGN KEY (CODE_ID) REFERENCES IDN_OAUTH2_AUTHORIZATION_CODE(CODE_ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_DEVICE_FLOW]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_DEVICE_FLOW ( CODE_ID VARCHAR(255), DEVICE_CODE VARCHAR(255), USER_CODE VARCHAR(25), CONSUMER_KEY_ID INTEGER, LAST_POLL_TIME DATETIME NOT NULL, EXPIRY_TIME DATETIME NOT NULL, TIME_CREATED DATETIME NOT NULL, POLL_TIME BIGINT, STATUS VARCHAR (25) DEFAULT 'PENDING', AUTHZ_USER VARCHAR (100), TENANT_ID INTEGER, USER_DOMAIN VARCHAR(50), IDP_ID INTEGER, PRIMARY KEY (DEVICE_CODE), UNIQUE (CODE_ID), UNIQUE (USER_CODE), FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_DEVICE_FLOW_SCOPES]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_DEVICE_FLOW_SCOPES ( ID INTEGER NOT NULL IDENTITY, SCOPE_ID VARCHAR(255), SCOPE VARCHAR(255), PRIMARY KEY (ID), FOREIGN KEY (SCOPE_ID) REFERENCES IDN_OAUTH2_DEVICE_FLOW(CODE_ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_ACCESS_TOKEN_SCOPE]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_ACCESS_TOKEN_SCOPE ( TOKEN_ID VARCHAR (255), TOKEN_SCOPE VARCHAR (60), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (TOKEN_ID, TOKEN_SCOPE), FOREIGN KEY (TOKEN_ID) REFERENCES IDN_OAUTH2_ACCESS_TOKEN(TOKEN_ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_SCOPE]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_SCOPE ( SCOPE_ID INTEGER IDENTITY, NAME VARCHAR(255) NOT NULL, DISPLAY_NAME VARCHAR(255) NOT NULL, DESCRIPTION VARCHAR(512), TENANT_ID INTEGER NOT NULL DEFAULT -1, SCOPE_TYPE VARCHAR(255) NOT NULL, PRIMARY KEY (SCOPE_ID), UNIQUE (NAME, TENANT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_SCOPE_BINDING]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_SCOPE_BINDING ( SCOPE_ID INTEGER NOT NULL, SCOPE_BINDING VARCHAR(255) NOT NULL, BINDING_TYPE VARCHAR(255) NOT NULL, FOREIGN KEY (SCOPE_ID) REFERENCES IDN_OAUTH2_SCOPE(SCOPE_ID) ON DELETE CASCADE, UNIQUE (SCOPE_ID, SCOPE_BINDING, BINDING_TYPE) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_RESOURCE_SCOPE]') AND TYPE IN (N'U')) CREATE TABLE IDN_OAUTH2_RESOURCE_SCOPE ( RESOURCE_PATH VARCHAR(255) NOT NULL, SCOPE_ID INTEGER NOT NULL, TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (RESOURCE_PATH), FOREIGN KEY (SCOPE_ID) REFERENCES IDN_OAUTH2_SCOPE (SCOPE_ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_SCIM_GROUP]') AND TYPE IN (N'U')) CREATE TABLE IDN_SCIM_GROUP ( ID INTEGER IDENTITY, TENANT_ID INTEGER NOT NULL, ROLE_NAME VARCHAR(255) NOT NULL, ATTR_NAME VARCHAR(1024) NOT NULL, ATTR_VALUE VARCHAR(1024), PRIMARY KEY (ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OPENID_REMEMBER_ME]') AND TYPE IN (N'U')) CREATE TABLE IDN_OPENID_REMEMBER_ME ( USER_NAME VARCHAR(255) NOT NULL, TENANT_ID INTEGER DEFAULT 0, COOKIE_VALUE VARCHAR(1024), CREATED_TIME DATETIME, PRIMARY KEY (USER_NAME, TENANT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OPENID_USER_RPS]') AND TYPE IN (N'U')) CREATE TABLE IDN_OPENID_USER_RPS ( USER_NAME VARCHAR(255) NOT NULL, TENANT_ID INTEGER DEFAULT 0, RP_URL VARCHAR(255) NOT NULL, TRUSTED_ALWAYS VARCHAR(128) DEFAULT 'FALSE', LAST_VISIT DATE NOT NULL, VISIT_COUNT INTEGER DEFAULT 0, DEFAULT_PROFILE_NAME VARCHAR(255) DEFAULT 'DEFAULT', PRIMARY KEY (USER_NAME, TENANT_ID, RP_URL) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OPENID_ASSOCIATIONS]') AND TYPE IN (N'U')) CREATE TABLE IDN_OPENID_ASSOCIATIONS ( HANDLE VARCHAR(255) NOT NULL, ASSOC_TYPE VARCHAR(255) NOT NULL, EXPIRE_IN DATETIME NOT NULL, MAC_KEY VARCHAR(255) NOT NULL, ASSOC_STORE VARCHAR(128) DEFAULT 'SHARED', TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (HANDLE) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_STS_STORE]') AND TYPE IN (N'U')) CREATE TABLE IDN_STS_STORE ( ID INTEGER IDENTITY, TOKEN_ID VARCHAR(255) NOT NULL, TOKEN_CONTENT VARBINARY(MAX) NOT NULL, CREATE_DATE DATETIME NOT NULL, EXPIRE_DATE DATETIME NOT NULL, STATE INTEGER DEFAULT 0, PRIMARY KEY (ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_IDENTITY_USER_DATA]') AND TYPE IN (N'U')) CREATE TABLE IDN_IDENTITY_USER_DATA ( TENANT_ID INTEGER DEFAULT -1234, USER_NAME VARCHAR(255) NOT NULL, DATA_KEY VARCHAR(255) NOT NULL, DATA_VALUE VARCHAR(2048), PRIMARY KEY (TENANT_ID, USER_NAME, DATA_KEY) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_IDENTITY_META_DATA]') AND TYPE IN (N'U')) CREATE TABLE IDN_IDENTITY_META_DATA ( USER_NAME VARCHAR(255) NOT NULL, TENANT_ID INTEGER DEFAULT -1234, METADATA_TYPE VARCHAR(255) NOT NULL, METADATA VARCHAR(255) NOT NULL, VALID VARCHAR(255) NOT NULL, PRIMARY KEY (TENANT_ID, USER_NAME, METADATA_TYPE,METADATA) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_THRIFT_SESSION]') AND TYPE IN (N'U')) CREATE TABLE IDN_THRIFT_SESSION ( SESSION_ID VARCHAR(255) NOT NULL, USER_NAME VARCHAR(255) NOT NULL, CREATED_TIME VARCHAR(255) NOT NULL, LAST_MODIFIED_TIME VARCHAR(255) NOT NULL, TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (SESSION_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_SESSION_STORE]') AND TYPE IN (N'U')) CREATE TABLE IDN_AUTH_SESSION_STORE ( SESSION_ID VARCHAR (100) NOT NULL, SESSION_TYPE VARCHAR(100) NOT NULL, OPERATION VARCHAR(10) NOT NULL, SESSION_OBJECT VARBINARY(MAX), TIME_CREATED BIGINT, TENANT_ID INTEGER DEFAULT -1, EXPIRY_TIME BIGINT, PRIMARY KEY (SESSION_ID, SESSION_TYPE, TIME_CREATED, OPERATION) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_SESSION_APP_INFO]') AND TYPE IN (N'U')) CREATE TABLE IDN_AUTH_SESSION_APP_INFO ( SESSION_ID VARCHAR (100) NOT NULL, SUBJECT VARCHAR (100) NOT NULL, APP_ID INTEGER NOT NULL, INBOUND_AUTH_TYPE VARCHAR (255) NOT NULL, PRIMARY KEY (SESSION_ID, SUBJECT, APP_ID, INBOUND_AUTH_TYPE) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_SESSION_META_DATA]') AND TYPE IN (N'U')) CREATE TABLE IDN_AUTH_SESSION_META_DATA ( SESSION_ID VARCHAR (100) NOT NULL, PROPERTY_TYPE VARCHAR (100) NOT NULL, VALUE VARCHAR (255) NOT NULL, PRIMARY KEY (SESSION_ID, PROPERTY_TYPE, VALUE) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_TEMP_SESSION_STORE]') AND TYPE IN (N'U')) CREATE TABLE IDN_AUTH_TEMP_SESSION_STORE ( SESSION_ID VARCHAR (100) NOT NULL, SESSION_TYPE VARCHAR(100) NOT NULL, OPERATION VARCHAR(10) NOT NULL, SESSION_OBJECT VARBINARY(MAX), TIME_CREATED BIGINT, TENANT_ID INTEGER DEFAULT -1, EXPIRY_TIME BIGINT, PRIMARY KEY (SESSION_ID, SESSION_TYPE, TIME_CREATED, OPERATION) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_USER]') AND TYPE IN (N'U')) CREATE TABLE IDN_AUTH_USER ( USER_ID VARCHAR(255) NOT NULL, USER_NAME VARCHAR(255) NOT NULL, TENANT_ID INTEGER NOT NULL, DOMAIN_NAME VARCHAR(255) NOT NULL, IDP_ID INTEGER NOT NULL, PRIMARY KEY (USER_ID), CONSTRAINT USER_STORE_CONSTRAINT UNIQUE (USER_NAME, TENANT_ID, DOMAIN_NAME, IDP_ID)); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_USER_SESSION_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE IDN_AUTH_USER_SESSION_MAPPING ( USER_ID VARCHAR(255) NOT NULL, SESSION_ID VARCHAR(255) NOT NULL, CONSTRAINT USER_SESSION_STORE_CONSTRAINT UNIQUE (USER_ID, SESSION_ID)); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_APP]') AND TYPE IN (N'U')) CREATE TABLE SP_APP ( ID INTEGER NOT NULL IDENTITY, TENANT_ID INTEGER NOT NULL, APP_NAME VARCHAR (255) NOT NULL , USER_STORE VARCHAR (255) NOT NULL, USERNAME VARCHAR (255) NOT NULL , DESCRIPTION VARCHAR (1024), ROLE_CLAIM VARCHAR (512), AUTH_TYPE VARCHAR (255) NOT NULL, PROVISIONING_USERSTORE_DOMAIN VARCHAR (512), IS_LOCAL_CLAIM_DIALECT CHAR(1) DEFAULT '1', IS_SEND_LOCAL_SUBJECT_ID CHAR(1) DEFAULT '0', IS_SEND_AUTH_LIST_OF_IDPS CHAR(1) DEFAULT '0', IS_USE_TENANT_DOMAIN_SUBJECT CHAR(1) DEFAULT '1', IS_USE_USER_DOMAIN_SUBJECT CHAR(1) DEFAULT '1', ENABLE_AUTHORIZATION CHAR(1) DEFAULT '0', SUBJECT_CLAIM_URI VARCHAR (512), IS_SAAS_APP CHAR(1) DEFAULT '0', IS_DUMB_MODE CHAR(1) DEFAULT '0', UUID CHAR(36), IMAGE_URL VARCHAR(1024), ACCESS_URL VARCHAR(1024), IS_DISCOVERABLE CHAR(1) DEFAULT '0', PRIMARY KEY (ID), CONSTRAINT APPLICATION_NAME_CONSTRAINT UNIQUE(APP_NAME, TENANT_ID), CONSTRAINT APPLICATION_UUID_CONSTRAINT UNIQUE(UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_METADATA]') AND TYPE IN (N'U')) CREATE TABLE SP_METADATA ( ID INTEGER IDENTITY, SP_ID INTEGER, NAME VARCHAR(255) NOT NULL, VALUE VARCHAR(255) NOT NULL, DISPLAY_NAME VARCHAR(255), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (ID), CONSTRAINT SP_METADATA_CONSTRAINT UNIQUE (SP_ID, NAME), FOREIGN KEY (SP_ID) REFERENCES SP_APP(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_INBOUND_AUTH]') AND TYPE IN (N'U')) CREATE TABLE SP_INBOUND_AUTH ( ID INTEGER NOT NULL IDENTITY, TENANT_ID INTEGER NOT NULL, INBOUND_AUTH_KEY VARCHAR (255), INBOUND_AUTH_TYPE VARCHAR (255) NOT NULL, INBOUND_CONFIG_TYPE VARCHAR (255) NOT NULL, PROP_NAME VARCHAR (255), PROP_VALUE VARCHAR (1024) , APP_ID INTEGER NOT NULL, PRIMARY KEY (ID), CONSTRAINT APPLICATION_ID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_AUTH_STEP]') AND TYPE IN (N'U')) CREATE TABLE SP_AUTH_STEP ( ID INTEGER NOT NULL IDENTITY, TENANT_ID INTEGER NOT NULL, STEP_ORDER INTEGER DEFAULT 1, APP_ID INTEGER NOT NULL, IS_SUBJECT_STEP CHAR(1) DEFAULT '0', IS_ATTRIBUTE_STEP CHAR(1) DEFAULT '0', PRIMARY KEY (ID), CONSTRAINT APPLICATION_ID_CONSTRAINT_STEP FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_FEDERATED_IDP]') AND TYPE IN (N'U')) CREATE TABLE SP_FEDERATED_IDP ( ID INTEGER NOT NULL, TENANT_ID INTEGER NOT NULL, AUTHENTICATOR_ID INTEGER NOT NULL, PRIMARY KEY (ID, AUTHENTICATOR_ID), CONSTRAINT STEP_ID_CONSTRAINT FOREIGN KEY (ID) REFERENCES SP_AUTH_STEP (ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_CLAIM_DIALECT]') AND TYPE IN (N'U')) CREATE TABLE SP_CLAIM_DIALECT ( ID INTEGER NOT NULL IDENTITY, TENANT_ID INTEGER NOT NULL, SP_DIALECT VARCHAR (512) NOT NULL, APP_ID INTEGER NOT NULL, PRIMARY KEY (ID), CONSTRAINT DIALECTID_APPID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_CLAIM_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE SP_CLAIM_MAPPING ( ID INTEGER NOT NULL IDENTITY, TENANT_ID INTEGER NOT NULL, IDP_CLAIM VARCHAR (512) NOT NULL , SP_CLAIM VARCHAR (512) NOT NULL , APP_ID INTEGER NOT NULL, IS_REQUESTED VARCHAR(128) DEFAULT '0', IS_MANDATORY VARCHAR(128) DEFAULT '0', DEFAULT_VALUE VARCHAR(255), PRIMARY KEY (ID), CONSTRAINT CLAIMID_APPID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_ROLE_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE SP_ROLE_MAPPING ( ID INTEGER NOT NULL IDENTITY, TENANT_ID INTEGER NOT NULL, IDP_ROLE VARCHAR (255) NOT NULL , SP_ROLE VARCHAR (255) NOT NULL , APP_ID INTEGER NOT NULL, PRIMARY KEY (ID), CONSTRAINT ROLEID_APPID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_REQ_PATH_AUTHENTICATOR]') AND TYPE IN (N'U')) CREATE TABLE SP_REQ_PATH_AUTHENTICATOR ( ID INTEGER NOT NULL IDENTITY, TENANT_ID INTEGER NOT NULL, AUTHENTICATOR_NAME VARCHAR (255) NOT NULL , APP_ID INTEGER NOT NULL, PRIMARY KEY (ID), CONSTRAINT REQ_AUTH_APPID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_PROVISIONING_CONNECTOR]') AND TYPE IN (N'U')) CREATE TABLE SP_PROVISIONING_CONNECTOR ( ID INTEGER NOT NULL IDENTITY, TENANT_ID INTEGER NOT NULL, IDP_NAME VARCHAR (255) NOT NULL , CONNECTOR_NAME VARCHAR (255) NOT NULL , APP_ID INTEGER NOT NULL, IS_JIT_ENABLED CHAR(1) NOT NULL DEFAULT '0', BLOCKING CHAR(1) NOT NULL DEFAULT '0', RULE_ENABLED CHAR(1) NOT NULL DEFAULT '0', PRIMARY KEY (ID), CONSTRAINT PRO_CONNECTOR_APPID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE ); IF NOT EXISTS(SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_AUTH_SCRIPT]') AND TYPE IN (N'U')) CREATE TABLE SP_AUTH_SCRIPT ( ID INTEGER IDENTITY NOT NULL, TENANT_ID INTEGER NOT NULL, APP_ID INTEGER NOT NULL, TYPE VARCHAR(255) NOT NULL, CONTENT VARBINARY(MAX) DEFAULT NULL, IS_ENABLED CHAR(1) NOT NULL DEFAULT '0', PRIMARY KEY (ID) ); IF NOT EXISTS(SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_TEMPLATE]') AND TYPE IN (N'U')) CREATE TABLE SP_TEMPLATE ( ID INTEGER NOT NULL IDENTITY, TENANT_ID INTEGER NOT NULL, NAME VARCHAR(255) NOT NULL, DESCRIPTION VARCHAR(1023), CONTENT VARBINARY(MAX) DEFAULT NULL, PRIMARY KEY (ID), CONSTRAINT SP_TEMPLATE_CONSTRAINT UNIQUE (TENANT_ID, NAME) ); IF NOT EXISTS(SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_WAIT_STATUS]') AND TYPE IN (N'U')) CREATE TABLE IDN_AUTH_WAIT_STATUS ( ID INTEGER IDENTITY NOT NULL, TENANT_ID INTEGER NOT NULL, LONG_WAIT_KEY VARCHAR(255) NOT NULL, WAIT_STATUS CHAR(1) NOT NULL DEFAULT '1', TIME_CREATED DATETIME, EXPIRE_TIME DATETIME, PRIMARY KEY (ID), CONSTRAINT IDN_AUTH_WAIT_STATUS_KEY UNIQUE (LONG_WAIT_KEY) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP]') AND TYPE IN (N'U')) CREATE TABLE IDP ( ID INTEGER IDENTITY, TENANT_ID INTEGER, NAME VARCHAR(254) NOT NULL, IS_ENABLED CHAR(1) NOT NULL DEFAULT '1', IS_PRIMARY CHAR(1) NOT NULL DEFAULT '0', HOME_REALM_ID VARCHAR(254), IMAGE VARBINARY(MAX), CERTIFICATE VARBINARY(MAX), ALIAS VARCHAR(254), INBOUND_PROV_ENABLED CHAR(1) NOT NULL DEFAULT '0', INBOUND_PROV_USER_STORE_ID VARCHAR(254), USER_CLAIM_URI VARCHAR(254), ROLE_CLAIM_URI VARCHAR(254), DESCRIPTION VARCHAR(1024), DEFAULT_AUTHENTICATOR_NAME VARCHAR(254), DEFAULT_PRO_CONNECTOR_NAME VARCHAR(254), PROVISIONING_ROLE VARCHAR(128), IS_FEDERATION_HUB CHAR(1) NOT NULL DEFAULT '0', IS_LOCAL_CLAIM_DIALECT CHAR(1) NOT NULL DEFAULT '0', PRIMARY KEY (ID), DISPLAY_NAME VARCHAR(255), IMAGE_URL VARCHAR(1024), UUID CHAR(36) NOT NULL, UNIQUE (TENANT_ID, NAME), UNIQUE (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_ROLE]') AND TYPE IN (N'U')) CREATE TABLE IDP_ROLE ( ID INTEGER IDENTITY, IDP_ID INTEGER, TENANT_ID INTEGER, ROLE VARCHAR(254), PRIMARY KEY (ID), UNIQUE (IDP_ID, ROLE), FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_ROLE_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE IDP_ROLE_MAPPING ( ID INTEGER IDENTITY, IDP_ROLE_ID INTEGER, TENANT_ID INTEGER, USER_STORE_ID VARCHAR (253), LOCAL_ROLE VARCHAR(253), PRIMARY KEY (ID), UNIQUE (IDP_ROLE_ID, TENANT_ID, USER_STORE_ID, LOCAL_ROLE), FOREIGN KEY (IDP_ROLE_ID) REFERENCES IDP_ROLE(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_CLAIM]') AND TYPE IN (N'U')) CREATE TABLE IDP_CLAIM ( ID INTEGER IDENTITY, IDP_ID INTEGER, TENANT_ID INTEGER, CLAIM VARCHAR(254), PRIMARY KEY (ID), UNIQUE (IDP_ID, CLAIM), FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_CLAIM_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE IDP_CLAIM_MAPPING ( ID INTEGER IDENTITY, IDP_CLAIM_ID INTEGER, TENANT_ID INTEGER, LOCAL_CLAIM VARCHAR(253), DEFAULT_VALUE VARCHAR(255), IS_REQUESTED VARCHAR(128) DEFAULT '0', PRIMARY KEY (ID), UNIQUE (IDP_CLAIM_ID, TENANT_ID, LOCAL_CLAIM), FOREIGN KEY (IDP_CLAIM_ID) REFERENCES IDP_CLAIM(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_AUTHENTICATOR]') AND TYPE IN (N'U')) CREATE TABLE IDP_AUTHENTICATOR ( ID INTEGER IDENTITY, TENANT_ID INTEGER, IDP_ID INTEGER, NAME VARCHAR(255) NOT NULL, IS_ENABLED CHAR (1) DEFAULT '1', DISPLAY_NAME VARCHAR(255), PRIMARY KEY (ID), UNIQUE (TENANT_ID, IDP_ID, NAME), FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_METADATA]') AND TYPE IN (N'U')) CREATE TABLE IDP_METADATA ( ID INTEGER IDENTITY, IDP_ID INTEGER, NAME VARCHAR(255) NOT NULL, VALUE VARCHAR(255) NOT NULL, DISPLAY_NAME VARCHAR(255), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (ID), CONSTRAINT IDP_METADATA_CONSTRAINT UNIQUE (IDP_ID, NAME), FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_AUTHENTICATOR_PROPERTY]') AND TYPE IN (N'U')) CREATE TABLE IDP_AUTHENTICATOR_PROPERTY ( ID INTEGER IDENTITY, TENANT_ID INTEGER, AUTHENTICATOR_ID INTEGER, PROPERTY_KEY VARCHAR(255) NOT NULL, PROPERTY_VALUE VARCHAR(2047), IS_SECRET CHAR (1) DEFAULT '0', PRIMARY KEY (ID), UNIQUE (TENANT_ID, AUTHENTICATOR_ID, PROPERTY_KEY), FOREIGN KEY (AUTHENTICATOR_ID) REFERENCES IDP_AUTHENTICATOR(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_PROVISIONING_CONFIG]') AND TYPE IN (N'U')) CREATE TABLE IDP_PROVISIONING_CONFIG ( ID INTEGER IDENTITY, TENANT_ID INTEGER, IDP_ID INTEGER, PROVISIONING_CONNECTOR_TYPE VARCHAR(255) NOT NULL, IS_ENABLED CHAR (1) DEFAULT '0', IS_BLOCKING CHAR (1) DEFAULT '0', IS_RULES_ENABLED CHAR (1) DEFAULT '0', PRIMARY KEY (ID), UNIQUE (TENANT_ID, IDP_ID, PROVISIONING_CONNECTOR_TYPE), FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_PROV_CONFIG_PROPERTY]') AND TYPE IN (N'U')) CREATE TABLE IDP_PROV_CONFIG_PROPERTY ( ID INTEGER IDENTITY, TENANT_ID INTEGER, PROVISIONING_CONFIG_ID INTEGER, PROPERTY_KEY VARCHAR(255) NOT NULL, PROPERTY_VALUE VARCHAR(2048), PROPERTY_BLOB_VALUE VARBINARY(MAX), PROPERTY_TYPE CHAR(32) NOT NULL, IS_SECRET CHAR (1) DEFAULT '0', PRIMARY KEY (ID), UNIQUE (TENANT_ID, PROVISIONING_CONFIG_ID, PROPERTY_KEY), FOREIGN KEY (PROVISIONING_CONFIG_ID) REFERENCES IDP_PROVISIONING_CONFIG(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_PROVISIONING_ENTITY]') AND TYPE IN (N'U')) CREATE TABLE IDP_PROVISIONING_ENTITY ( ID INTEGER IDENTITY, PROVISIONING_CONFIG_ID INTEGER, ENTITY_TYPE VARCHAR(255) NOT NULL, ENTITY_LOCAL_USERSTORE VARCHAR(255) NOT NULL, ENTITY_NAME VARCHAR(255) NOT NULL, ENTITY_VALUE VARCHAR(255), TENANT_ID INTEGER, ENTITY_LOCAL_ID VARCHAR(255), PRIMARY KEY (ID), UNIQUE (ENTITY_TYPE, TENANT_ID, ENTITY_LOCAL_USERSTORE, ENTITY_NAME, PROVISIONING_CONFIG_ID), UNIQUE (PROVISIONING_CONFIG_ID, ENTITY_TYPE, ENTITY_VALUE), FOREIGN KEY (PROVISIONING_CONFIG_ID) REFERENCES IDP_PROVISIONING_CONFIG(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_LOCAL_CLAIM]') AND TYPE IN (N'U')) CREATE TABLE IDP_LOCAL_CLAIM ( ID INTEGER IDENTITY, TENANT_ID INTEGER, IDP_ID INTEGER, CLAIM_URI VARCHAR(255) NOT NULL, DEFAULT_VALUE VARCHAR(255), IS_REQUESTED VARCHAR(128) DEFAULT '0', PRIMARY KEY (ID), UNIQUE (TENANT_ID, IDP_ID, CLAIM_URI), FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_ASSOCIATED_ID]') AND TYPE IN (N'U')) CREATE TABLE IDN_ASSOCIATED_ID ( ID INTEGER IDENTITY, IDP_USER_ID VARCHAR(255) NOT NULL, TENANT_ID INTEGER DEFAULT -1234, IDP_ID INTEGER NOT NULL, DOMAIN_NAME VARCHAR(255) NOT NULL, USER_NAME VARCHAR(255) NOT NULL, ASSOCIATION_ID CHAR(36) NOT NULL, PRIMARY KEY (ID), UNIQUE(IDP_USER_ID, TENANT_ID, IDP_ID), FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_USER_ACCOUNT_ASSOCIATION]') AND TYPE IN (N'U')) CREATE TABLE IDN_USER_ACCOUNT_ASSOCIATION ( ASSOCIATION_KEY VARCHAR(255) NOT NULL, TENANT_ID INTEGER, DOMAIN_NAME VARCHAR(255) NOT NULL, USER_NAME VARCHAR(255) NOT NULL, PRIMARY KEY (TENANT_ID, DOMAIN_NAME, USER_NAME) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[FIDO_DEVICE_STORE]') AND TYPE IN (N'U')) CREATE TABLE FIDO_DEVICE_STORE ( TENANT_ID INTEGER, DOMAIN_NAME VARCHAR(255) NOT NULL, USER_NAME VARCHAR(45) NOT NULL, TIME_REGISTERED DATETIME, KEY_HANDLE VARCHAR(200) NOT NULL, DEVICE_DATA VARCHAR(2048) NOT NULL, PRIMARY KEY (TENANT_ID, DOMAIN_NAME, USER_NAME, KEY_HANDLE) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[FIDO2_DEVICE_STORE]') AND TYPE IN (N'U')) CREATE TABLE FIDO2_DEVICE_STORE ( TENANT_ID INTEGER, DOMAIN_NAME VARCHAR(255) NOT NULL, USER_NAME VARCHAR(45) NOT NULL, TIME_REGISTERED DATETIME, USER_HANDLE VARCHAR(64) NOT NULL, CREDENTIAL_ID VARCHAR(200) NOT NULL, PUBLIC_KEY_COSE VARCHAR(1024) NOT NULL, SIGNATURE_COUNT BIGINT, USER_IDENTITY VARCHAR(512) NOT NULL, DISPLAY_NAME VARCHAR(255), IS_USERNAMELESS_SUPPORTED CHAR(1) DEFAULT '0', PRIMARY KEY (CREDENTIAL_ID, USER_HANDLE) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_REQUEST]') AND TYPE IN (N'U')) CREATE TABLE WF_REQUEST ( UUID VARCHAR (45), CREATED_BY VARCHAR (255), TENANT_ID INTEGER DEFAULT -1, OPERATION_TYPE VARCHAR (50), CREATED_AT DATETIME, UPDATED_AT DATETIME, STATUS VARCHAR (30), REQUEST VARBINARY(MAX), PRIMARY KEY (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_BPS_PROFILE]') AND TYPE IN (N'U')) CREATE TABLE WF_BPS_PROFILE ( PROFILE_NAME VARCHAR(45), HOST_URL_MANAGER VARCHAR(255), HOST_URL_WORKER VARCHAR(255), USERNAME VARCHAR(45), PASSWORD VARCHAR(1023), CALLBACK_HOST VARCHAR (45), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (PROFILE_NAME, TENANT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_WORKFLOW]') AND TYPE IN (N'U')) CREATE TABLE WF_WORKFLOW( ID VARCHAR (45), WF_NAME VARCHAR (45), DESCRIPTION VARCHAR (255), TEMPLATE_ID VARCHAR (45), IMPL_ID VARCHAR (45), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_WORKFLOW_ASSOCIATION]') AND TYPE IN (N'U')) CREATE TABLE WF_WORKFLOW_ASSOCIATION( ID INTEGER NOT NULL IDENTITY , ASSOC_NAME VARCHAR (45), EVENT_ID VARCHAR(45), ASSOC_CONDITION VARCHAR (2000), WORKFLOW_ID VARCHAR (45), IS_ENABLED CHAR (1) DEFAULT '1', TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY(ID), FOREIGN KEY (WORKFLOW_ID) REFERENCES WF_WORKFLOW(ID)ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_WORKFLOW_CONFIG_PARAM]') AND TYPE IN (N'U')) CREATE TABLE WF_WORKFLOW_CONFIG_PARAM( WORKFLOW_ID VARCHAR (45), PARAM_NAME VARCHAR (45), PARAM_VALUE VARCHAR (1000), PARAM_QNAME VARCHAR (45), PARAM_HOLDER VARCHAR (45), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (WORKFLOW_ID, PARAM_NAME, PARAM_QNAME, PARAM_HOLDER), FOREIGN KEY (WORKFLOW_ID) REFERENCES WF_WORKFLOW(ID)ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_REQUEST_ENTITY_RELATIONSHIP]') AND TYPE IN (N'U')) CREATE TABLE WF_REQUEST_ENTITY_RELATIONSHIP( REQUEST_ID VARCHAR (45), ENTITY_NAME VARCHAR (255), ENTITY_TYPE VARCHAR (50), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY(REQUEST_ID, ENTITY_NAME, ENTITY_TYPE, TENANT_ID), FOREIGN KEY (REQUEST_ID) REFERENCES WF_REQUEST(UUID)ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_WORKFLOW_REQUEST_RELATION]') AND TYPE IN (N'U')) CREATE TABLE WF_WORKFLOW_REQUEST_RELATION( RELATIONSHIP_ID VARCHAR (45), WORKFLOW_ID VARCHAR (45), REQUEST_ID VARCHAR (45), UPDATED_AT DATETIME, STATUS VARCHAR (30), TENANT_ID INTEGER DEFAULT -1, PRIMARY KEY (RELATIONSHIP_ID), FOREIGN KEY (WORKFLOW_ID) REFERENCES WF_WORKFLOW(ID)ON DELETE CASCADE, FOREIGN KEY (REQUEST_ID) REFERENCES WF_REQUEST(UUID)ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_RECOVERY_DATA]') AND TYPE IN (N'U')) CREATE TABLE IDN_RECOVERY_DATA ( USER_NAME VARCHAR(255) NOT NULL, USER_DOMAIN VARCHAR(127) NOT NULL, TENANT_ID INTEGER DEFAULT -1, CODE VARCHAR(255) NOT NULL, SCENARIO VARCHAR(255) NOT NULL, STEP VARCHAR(127) NOT NULL, TIME_CREATED DATETIME NOT NULL, REMAINING_SETS VARCHAR(2500) DEFAULT NULL, PRIMARY KEY(USER_NAME, USER_DOMAIN, TENANT_ID, SCENARIO,STEP), UNIQUE(CODE) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_PASSWORD_HISTORY_DATA]') AND TYPE IN (N'U')) CREATE TABLE IDN_PASSWORD_HISTORY_DATA ( ID INTEGER NOT NULL IDENTITY , USER_NAME VARCHAR(255) NOT NULL, USER_DOMAIN VARCHAR(127) NOT NULL, TENANT_ID INTEGER DEFAULT -1, SALT_VALUE VARCHAR(255), HASH VARCHAR(255) NOT NULL, TIME_CREATED DATETIME NOT NULL, PRIMARY KEY (ID), UNIQUE (USER_NAME,USER_DOMAIN,TENANT_ID,SALT_VALUE,HASH), ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CLAIM_DIALECT]') AND TYPE IN (N'U')) CREATE TABLE IDN_CLAIM_DIALECT ( ID INTEGER NOT NULL IDENTITY, DIALECT_URI VARCHAR (255) NOT NULL, TENANT_ID INTEGER NOT NULL, PRIMARY KEY (ID), CONSTRAINT DIALECT_URI_CONSTRAINT UNIQUE (DIALECT_URI, TENANT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CLAIM]') AND TYPE IN (N'U')) CREATE TABLE IDN_CLAIM ( ID INTEGER NOT NULL IDENTITY, DIALECT_ID INTEGER NOT NULL, CLAIM_URI VARCHAR (255) NOT NULL, TENANT_ID INTEGER NOT NULL, PRIMARY KEY (ID), FOREIGN KEY (DIALECT_ID) REFERENCES IDN_CLAIM_DIALECT(ID) ON DELETE CASCADE, CONSTRAINT CLAIM_URI_CONSTRAINT UNIQUE (DIALECT_ID, CLAIM_URI, TENANT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CLAIM_MAPPED_ATTRIBUTE]') AND TYPE IN (N'U')) CREATE TABLE IDN_CLAIM_MAPPED_ATTRIBUTE ( ID INTEGER NOT NULL IDENTITY, LOCAL_CLAIM_ID INTEGER, USER_STORE_DOMAIN_NAME VARCHAR (255) NOT NULL, ATTRIBUTE_NAME VARCHAR (255) NOT NULL, TENANT_ID INTEGER NOT NULL, PRIMARY KEY (ID), FOREIGN KEY (LOCAL_CLAIM_ID) REFERENCES IDN_CLAIM(ID) ON DELETE CASCADE, CONSTRAINT USER_STORE_DOMAIN_CONSTRAINT UNIQUE (LOCAL_CLAIM_ID, USER_STORE_DOMAIN_NAME, TENANT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CLAIM_PROPERTY]') AND TYPE IN (N'U')) CREATE TABLE IDN_CLAIM_PROPERTY ( ID INTEGER NOT NULL IDENTITY, LOCAL_CLAIM_ID INTEGER, PROPERTY_NAME VARCHAR (255) NOT NULL, PROPERTY_VALUE VARCHAR (255) NOT NULL, TENANT_ID INTEGER NOT NULL, PRIMARY KEY (ID), FOREIGN KEY (LOCAL_CLAIM_ID) REFERENCES IDN_CLAIM(ID) ON DELETE CASCADE, CONSTRAINT PROPERTY_NAME_CONSTRAINT UNIQUE (LOCAL_CLAIM_ID, PROPERTY_NAME, TENANT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CLAIM_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE IDN_CLAIM_MAPPING ( ID INTEGER NOT NULL IDENTITY, EXT_CLAIM_ID INTEGER NOT NULL, MAPPED_LOCAL_CLAIM_ID INTEGER NOT NULL, TENANT_ID INTEGER NOT NULL, PRIMARY KEY (ID), FOREIGN KEY (EXT_CLAIM_ID) REFERENCES IDN_CLAIM(ID) ON DELETE CASCADE , FOREIGN KEY (MAPPED_LOCAL_CLAIM_ID) REFERENCES IDN_CLAIM(ID) ON DELETE NO ACTION , CONSTRAINT EXT_TO_LOC_MAPPING_CONSTRN UNIQUE (EXT_CLAIM_ID, TENANT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_SAML2_ASSERTION_STORE]') AND TYPE IN (N'U')) CREATE TABLE IDN_SAML2_ASSERTION_STORE ( ID INTEGER NOT NULL IDENTITY, SAML2_ID VARCHAR(255) , SAML2_ISSUER VARCHAR(255) , SAML2_SUBJECT VARCHAR(255) , SAML2_SESSION_INDEX VARCHAR(255) , SAML2_AUTHN_CONTEXT_CLASS_REF VARCHAR(255) , SAML2_ASSERTION VARCHAR(4096) , ASSERTION VARBINARY(MAX) , PRIMARY KEY (ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_SAML2_ARTIFACT_STORE]') AND TYPE IN (N'U')) CREATE TABLE IDN_SAML2_ARTIFACT_STORE ( ID INTEGER NOT NULL IDENTITY, SOURCE_ID VARCHAR(255) NOT NULL, MESSAGE_HANDLER VARCHAR(255) NOT NULL, AUTHN_REQ_DTO VARBINARY(MAX) NOT NULL, SESSION_ID VARCHAR(255) NOT NULL, INIT_TIMESTAMP DATETIME NOT NULL, EXP_TIMESTAMP DATETIME NOT NULL, ASSERTION_ID VARCHAR(255), PRIMARY KEY (ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_JTI]') AND TYPE IN (N'U')) CREATE TABLE IDN_OIDC_JTI ( JWT_ID VARCHAR(255) NOT NULL, EXP_TIME DATETIME NOT NULL, TIME_CREATED DATETIME NOT NULL, PRIMARY KEY (JWT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_PROPERTY]') AND TYPE IN (N'U')) CREATE TABLE IDN_OIDC_PROPERTY ( ID INTEGER NOT NULL IDENTITY, TENANT_ID INTEGER , CONSUMER_KEY VARCHAR(255) , PROPERTY_KEY VARCHAR(255) NOT NULL , PROPERTY_VALUE VARCHAR(2047) , PRIMARY KEY (ID), FOREIGN KEY (CONSUMER_KEY) REFERENCES IDN_OAUTH_CONSUMER_APPS(CONSUMER_KEY) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_REQ_OBJECT_REFERENCE]') AND TYPE IN (N'U')) CREATE TABLE IDN_OIDC_REQ_OBJECT_REFERENCE ( ID INTEGER NOT NULL IDENTITY, CONSUMER_KEY_ID INTEGER , CODE_ID VARCHAR(255) , TOKEN_ID VARCHAR(255) , SESSION_DATA_KEY VARCHAR(255), PRIMARY KEY (ID), FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE , FOREIGN KEY (TOKEN_ID) REFERENCES IDN_OAUTH2_ACCESS_TOKEN(TOKEN_ID), FOREIGN KEY (CODE_ID) REFERENCES IDN_OAUTH2_AUTHORIZATION_CODE(CODE_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_REQ_OBJECT_CLAIMS]') AND TYPE IN (N'U')) CREATE TABLE IDN_OIDC_REQ_OBJECT_CLAIMS ( ID INTEGER NOT NULL IDENTITY, REQ_OBJECT_ID INTEGER, CLAIM_ATTRIBUTE VARCHAR(255) , ESSENTIAL CHAR(1) NOT NULL DEFAULT '0' , VALUE VARCHAR(255) , IS_USERINFO CHAR(1) NOT NULL DEFAULT '0', PRIMARY KEY (ID), FOREIGN KEY (REQ_OBJECT_ID) REFERENCES IDN_OIDC_REQ_OBJECT_REFERENCE (ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_REQ_OBJ_CLAIM_VALUES]') AND TYPE IN (N'U')) CREATE TABLE IDN_OIDC_REQ_OBJ_CLAIM_VALUES ( ID INTEGER NOT NULL IDENTITY, REQ_OBJECT_CLAIMS_ID INTEGER , CLAIM_VALUES VARCHAR(255) , PRIMARY KEY (ID), FOREIGN KEY (REQ_OBJECT_CLAIMS_ID) REFERENCES IDN_OIDC_REQ_OBJECT_CLAIMS(ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CERTIFICATE]') AND TYPE IN (N'U')) CREATE TABLE IDN_CERTIFICATE ( ID INTEGER IDENTITY, NAME VARCHAR(100), CERTIFICATE_IN_PEM VARBINARY(MAX), TENANT_ID INTEGER DEFAULT 0, PRIMARY KEY(ID), CONSTRAINT CERTIFICATE_UNIQUE_KEY UNIQUE (NAME, TENANT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_SCOPE_CLAIM_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE IDN_OIDC_SCOPE_CLAIM_MAPPING ( ID INTEGER IDENTITY, SCOPE_ID INTEGER NOT NULL, EXTERNAL_CLAIM_ID INTEGER NOT NULL, PRIMARY KEY (ID), FOREIGN KEY (SCOPE_ID) REFERENCES IDN_OAUTH2_SCOPE(SCOPE_ID) ON DELETE CASCADE, FOREIGN KEY (EXTERNAL_CLAIM_ID) REFERENCES IDN_CLAIM(ID) ON DELETE CASCADE, UNIQUE (SCOPE_ID, EXTERNAL_CLAIM_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_FUNCTION_LIBRARY]') AND TYPE IN (N'U')) CREATE TABLE IDN_FUNCTION_LIBRARY ( NAME VARCHAR(255) NOT NULL, DESCRIPTION VARCHAR(1023), TYPE VARCHAR(255) NOT NULL, TENANT_ID INTEGER NOT NULL, DATA VARBINARY(MAX) NOT NULL, PRIMARY KEY (TENANT_ID,NAME) ); CREATE TABLE IDN_OAUTH2_CIBA_AUTH_CODE ( AUTH_CODE_KEY CHAR (36), AUTH_REQ_ID CHAR (36), ISSUED_TIME DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, CONSUMER_KEY VARCHAR(255), LAST_POLLED_TIME DATETIME NOT NULL, POLLING_INTERVAL INTEGER, EXPIRES_IN INTEGER, AUTHENTICATED_USER_NAME VARCHAR(255), USER_STORE_DOMAIN VARCHAR(100), TENANT_ID INTEGER, AUTH_REQ_STATUS VARCHAR (100) DEFAULT 'REQUESTED', IDP_ID INTEGER, UNIQUE(AUTH_REQ_ID), PRIMARY KEY (AUTH_CODE_KEY), FOREIGN KEY (CONSUMER_KEY) REFERENCES IDN_OAUTH_CONSUMER_APPS(CONSUMER_KEY) ON DELETE CASCADE ); CREATE TABLE IDN_OAUTH2_CIBA_REQUEST_SCOPES ( AUTH_CODE_KEY CHAR (36), SCOPE VARCHAR (255), FOREIGN KEY (AUTH_CODE_KEY) REFERENCES IDN_OAUTH2_CIBA_AUTH_CODE(AUTH_CODE_KEY) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_FED_AUTH_SESSION_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE IDN_FED_AUTH_SESSION_MAPPING ( IDP_SESSION_ID VARCHAR(255) NOT NULL, SESSION_ID VARCHAR(255) NOT NULL, IDP_NAME VARCHAR(255) NOT NULL, AUTHENTICATOR_ID VARCHAR(255), PROTOCOL_TYPE VARCHAR(255), TIME_CREATED DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (IDP_SESSION_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CONFIG_TYPE]') AND TYPE IN (N'U')) CREATE TABLE IDN_CONFIG_TYPE ( ID VARCHAR(255) NOT NULL, NAME VARCHAR(255) NOT NULL, DESCRIPTION VARCHAR(1023) NULL, PRIMARY KEY (ID), CONSTRAINT TYPE_NAME_CONSTRAINT UNIQUE (NAME) ); INSERT INTO IDN_CONFIG_TYPE (ID, NAME, DESCRIPTION) VALUES ('9ab0ef95-13e9-4ed5-afaf-d29bed62f7bd', 'IDP_TEMPLATE', 'Template type to uniquely identify IDP templates'), ('3c4ac3d0-5903-4e3d-aaca-38df65b33bfd', 'APPLICATION_TEMPLATE', 'Template type to uniquely identify Application templates'), ('8ec6dbf1-218a-49bf-bc34-0d2db52d151c', 'CORS_CONFIGURATION', 'A resource type to keep the tenant CORS configurations'); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CONFIG_RESOURCE]') AND TYPE IN (N'U')) CREATE TABLE IDN_CONFIG_RESOURCE ( ID VARCHAR(255) NOT NULL, TENANT_ID INT NOT NULL, NAME VARCHAR(255) NOT NULL, CREATED_TIME DATETIME NOT NULL, LAST_MODIFIED DATETIME NOT NULL, HAS_FILE BIT DEFAULT 0 NOT NULL, HAS_ATTRIBUTE BIT DEFAULT 0 NOT NULL, TYPE_ID VARCHAR(255) NOT NULL, UNIQUE (NAME, TENANT_ID, TYPE_ID), PRIMARY KEY (ID) ); ALTER TABLE IDN_CONFIG_RESOURCE ADD CONSTRAINT TYPE_ID_FOREIGN_CONSTRAINT FOREIGN KEY (TYPE_ID) REFERENCES IDN_CONFIG_TYPE (ID) ON DELETE CASCADE ON UPDATE CASCADE; IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CONFIG_ATTRIBUTE]') AND TYPE IN (N'U')) CREATE TABLE IDN_CONFIG_ATTRIBUTE ( ID VARCHAR(255) NOT NULL, RESOURCE_ID VARCHAR(255) NOT NULL, ATTR_KEY VARCHAR(255) NOT NULL, ATTR_VALUE VARCHAR(1023) NULL, PRIMARY KEY (ID), UNIQUE (RESOURCE_ID, ATTR_KEY) ); ALTER TABLE IDN_CONFIG_ATTRIBUTE ADD CONSTRAINT RESOURCE_ID_ATTRIBUTE_FOREIGN_CONSTRAINT FOREIGN KEY (RESOURCE_ID) REFERENCES IDN_CONFIG_RESOURCE (ID) ON DELETE CASCADE ON UPDATE CASCADE; IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CONFIG_FILE]') AND TYPE IN (N'U')) CREATE TABLE IDN_CONFIG_FILE ( ID VARCHAR(255) NOT NULL, VALUE VARBINARY(MAX) NULL, NAME VARCHAR(255) NULL, RESOURCE_ID VARCHAR(255) NOT NULL, PRIMARY KEY (ID) ); ALTER TABLE IDN_CONFIG_FILE ADD CONSTRAINT RESOURCE_ID_FILE_FOREIGN_CONSTRAINT FOREIGN KEY (RESOURCE_ID) REFERENCES IDN_CONFIG_RESOURCE (ID) ON DELETE CASCADE ON UPDATE CASCADE; IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_REMOTE_FETCH_CONFIG]') AND TYPE IN (N'U')) CREATE TABLE IDN_REMOTE_FETCH_CONFIG ( ID VARCHAR(255) NOT NULL, TENANT_ID INTEGER NOT NULL, IS_ENABLED CHAR(1) NOT NULL, REPO_MANAGER_TYPE VARCHAR(255) NOT NULL, ACTION_LISTENER_TYPE VARCHAR(255) NOT NULL, CONFIG_DEPLOYER_TYPE VARCHAR(255) NOT NULL, REMOTE_FETCH_NAME VARCHAR(255), REMOTE_RESOURCE_URI VARCHAR(255) NOT NULL, ATTRIBUTES_JSON TEXT NOT NULL, PRIMARY KEY (ID), CONSTRAINT UC_REMOTE_RESOURCE_TYPE UNIQUE (TENANT_ID, CONFIG_DEPLOYER_TYPE) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_REMOTE_FETCH_REVISIONS]') AND TYPE IN (N'U')) CREATE TABLE IDN_REMOTE_FETCH_REVISIONS ( ID VARCHAR(255) NOT NULL, CONFIG_ID VARCHAR(255) NOT NULL, FILE_PATH VARCHAR(255) NOT NULL, FILE_HASH VARCHAR(255), DEPLOYED_DATE DATETIME, LAST_SYNC_TIME DATETIME, DEPLOYMENT_STATUS VARCHAR(255), ITEM_NAME VARCHAR(255), DEPLOY_ERR_LOG TEXT, PRIMARY KEY (ID), FOREIGN KEY (CONFIG_ID) REFERENCES IDN_REMOTE_FETCH_CONFIG(ID) ON DELETE CASCADE, CONSTRAINT UC_REVISIONS UNIQUE (CONFIG_ID, ITEM_NAME) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_USER_FUNCTIONALITY_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE IDN_USER_FUNCTIONALITY_MAPPING ( ID VARCHAR(255) NOT NULL, USER_ID VARCHAR(255) NOT NULL, TENANT_ID INTEGER NOT NULL, FUNCTIONALITY_ID VARCHAR(255) NOT NULL, IS_FUNCTIONALITY_LOCKED BIT NOT NULL, FUNCTIONALITY_UNLOCK_TIME BIGINT NOT NULL, FUNCTIONALITY_LOCK_REASON VARCHAR(1023), FUNCTIONALITY_LOCK_REASON_CODE VARCHAR(255), PRIMARY KEY (ID), CONSTRAINT IDN_USER_FUNCTIONALITY_MAPPING_CONSTRAINT UNIQUE (TENANT_ID, USER_ID, FUNCTIONALITY_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_USER_FUNCTIONALITY_PROPERTY]') AND TYPE IN (N'U')) CREATE TABLE IDN_USER_FUNCTIONALITY_PROPERTY ( ID VARCHAR(255) NOT NULL, USER_ID VARCHAR(255) NOT NULL, TENANT_ID INTEGER NOT NULL, FUNCTIONALITY_ID VARCHAR(255) NOT NULL, PROPERTY_NAME VARCHAR(255), PROPERTY_VALUE VARCHAR(255), PRIMARY KEY (ID), CONSTRAINT IDN_USER_FUNCTIONALITY_PROPERTY_CONSTRAINT UNIQUE (USER_ID, TENANT_ID, FUNCTIONALITY_ID, PROPERTY_NAME) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CORS_ORIGIN]') AND TYPE IN (N'U')) CREATE TABLE IDN_CORS_ORIGIN ( ID INT NOT NULL IDENTITY, TENANT_ID INT NOT NULL, ORIGIN VARCHAR(2048) NOT NULL, UUID CHAR(36) NOT NULL, PRIMARY KEY (ID), UNIQUE (TENANT_ID, ORIGIN), UNIQUE (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CORS_ASSOCIATION]') AND TYPE IN (N'U')) CREATE TABLE IDN_CORS_ASSOCIATION ( IDN_CORS_ORIGIN_ID INT NOT NULL, SP_APP_ID INT NOT NULL, PRIMARY KEY (IDN_CORS_ORIGIN_ID, SP_APP_ID), FOREIGN KEY (IDN_CORS_ORIGIN_ID) REFERENCES IDN_CORS_ORIGIN (ID) ON DELETE CASCADE, FOREIGN KEY (SP_APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE ); -- --------------------------- INDEX CREATION ----------------------------- -- IDN_OAUTH2_ACCESS_TOKEN -- CREATE INDEX IDX_TC ON IDN_OAUTH2_ACCESS_TOKEN(TIME_CREATED); CREATE INDEX IDX_ATH ON IDN_OAUTH2_ACCESS_TOKEN(ACCESS_TOKEN_HASH); CREATE INDEX IDX_AT_CK_AU ON IDN_OAUTH2_ACCESS_TOKEN(CONSUMER_KEY_ID, AUTHZ_USER, TOKEN_STATE, USER_TYPE); CREATE INDEX IDX_AT_TI_UD ON IDN_OAUTH2_ACCESS_TOKEN(AUTHZ_USER, TENANT_ID, TOKEN_STATE, USER_DOMAIN); CREATE INDEX IDX_AT_AU_TID_UD_TS_CKID ON IDN_OAUTH2_ACCESS_TOKEN(AUTHZ_USER, TENANT_ID, USER_DOMAIN, TOKEN_STATE, CONSUMER_KEY_ID); CREATE INDEX IDX_AT_AT ON IDN_OAUTH2_ACCESS_TOKEN (AUTHZ_USER) INCLUDE (ACCESS_TOKEN); CREATE INDEX IDX_AT_AU_CKID_TS_UT ON IDN_OAUTH2_ACCESS_TOKEN(AUTHZ_USER, CONSUMER_KEY_ID, TOKEN_STATE, USER_TYPE); CREATE INDEX IDX_AT_RTH ON IDN_OAUTH2_ACCESS_TOKEN(REFRESH_TOKEN_HASH); CREATE INDEX IDX_AT_RT ON IDN_OAUTH2_ACCESS_TOKEN (AUTHZ_USER) INCLUDE (REFRESH_TOKEN); CREATE INDEX IDX_AT_CKID_AU_TID_UD_TSH_TS ON IDN_OAUTH2_ACCESS_TOKEN(CONSUMER_KEY_ID, AUTHZ_USER, TENANT_ID, USER_DOMAIN, TOKEN_SCOPE_HASH, TOKEN_STATE); -- IDN_OAUTH2_AUTHORIZATION_CODE -- CREATE INDEX IDX_AUTHORIZATION_CODE_HASH ON IDN_OAUTH2_AUTHORIZATION_CODE (AUTHORIZATION_CODE_HASH, CONSUMER_KEY_ID); CREATE INDEX IDX_AUTHORIZATION_CODE_AU_TI ON IDN_OAUTH2_AUTHORIZATION_CODE (AUTHZ_USER, TENANT_ID, USER_DOMAIN, STATE); CREATE INDEX IDX_AC_CKID ON IDN_OAUTH2_AUTHORIZATION_CODE(CONSUMER_KEY_ID); CREATE INDEX IDX_AC_TID ON IDN_OAUTH2_AUTHORIZATION_CODE(TOKEN_ID); CREATE INDEX IDX_AC_AC_CKID ON IDN_OAUTH2_AUTHORIZATION_CODE (AUTHZ_USER) INCLUDE (AUTHORIZATION_CODE, CONSUMER_KEY_ID); -- IDN_SCIM_GROUP -- CREATE INDEX IDX_IDN_SCIM_GROUP_TI_RN ON IDN_SCIM_GROUP (TENANT_ID, ROLE_NAME); CREATE INDEX IDX_IDN_SCIM_GROUP_TI_RN_AN ON IDN_SCIM_GROUP (TENANT_ID, ROLE_NAME, ATTR_NAME); -- IDN_AUTH_SESSION_STORE -- CREATE INDEX IDX_IDN_AUTH_SESSION_TIME ON IDN_AUTH_SESSION_STORE (TIME_CREATED); -- IDN_AUTH_TEMP_SESSION_STORE -- CREATE INDEX IDX_IDN_AUTH_TMP_SESSION_TIME ON IDN_AUTH_TEMP_SESSION_STORE (TIME_CREATED); -- IDN_OIDC_SCOPE_CLAIM_MAPPING -- CREATE INDEX IDX_AT_SI_ECI ON IDN_OIDC_SCOPE_CLAIM_MAPPING(SCOPE_ID, EXTERNAL_CLAIM_ID); -- IDN_OAUTH2_SCOPE -- CREATE INDEX IDX_SC_TID ON IDN_OAUTH2_SCOPE(TENANT_ID); -- IDN_OAUTH2_SCOPE_BINDING -- CREATE INDEX IDX_SB_SCPID ON IDN_OAUTH2_SCOPE_BINDING(SCOPE_ID); -- IDN_OIDC_REQ_OBJECT_REFERENCE -- CREATE INDEX IDX_OROR_TID ON IDN_OIDC_REQ_OBJECT_REFERENCE(TOKEN_ID); -- IDN_OAUTH2_ACCESS_TOKEN_SCOPE -- CREATE INDEX IDX_ATS_TID ON IDN_OAUTH2_ACCESS_TOKEN_SCOPE(TOKEN_ID); -- SP_TEMPLATE -- CREATE INDEX IDX_SP_TEMPLATE ON SP_TEMPLATE (TENANT_ID, NAME); -- IDN_AUTH_USER -- CREATE INDEX IDX_AUTH_USER_UN_TID_DN ON IDN_AUTH_USER (USER_NAME, TENANT_ID, DOMAIN_NAME); CREATE INDEX IDX_AUTH_USER_DN_TOD ON IDN_AUTH_USER (DOMAIN_NAME, TENANT_ID); -- IDN_AUTH_USER_SESSION_MAPPING -- CREATE INDEX IDX_USER_ID ON IDN_AUTH_USER_SESSION_MAPPING (USER_ID); CREATE INDEX IDX_SESSION_ID ON IDN_AUTH_USER_SESSION_MAPPING (SESSION_ID); -- IDN_OAUTH_CONSUMER_APPS -- CREATE INDEX IDX_OCA_UM_TID_UD_APN ON IDN_OAUTH_CONSUMER_APPS(USERNAME,TENANT_ID,USER_DOMAIN, APP_NAME); -- IDX_SPI_APP -- CREATE INDEX IDX_SPI_APP ON SP_INBOUND_AUTH(APP_ID); -- IDN_OIDC_PROPERTY -- CREATE INDEX IDX_IOP_TID_CK ON IDN_OIDC_PROPERTY(TENANT_ID,CONSUMER_KEY); -- IDN_FIDO2_PROPERTY -- CREATE INDEX IDX_FIDO2_STR ON FIDO2_DEVICE_STORE(USER_NAME, TENANT_ID, DOMAIN_NAME, CREDENTIAL_ID, USER_HANDLE); -- IDN_ASSOCIATED_ID -- CREATE INDEX IDX_AI_DN_UN_AI ON IDN_ASSOCIATED_ID(DOMAIN_NAME, USER_NAME, ASSOCIATION_ID); -- IDN_OAUTH2_TOKEN_BINDING -- CREATE INDEX IDX_IDN_AUTH_BIND ON IDN_OAUTH2_TOKEN_BINDING (TOKEN_BINDING_REF); -- IDN_FED_AUTH_SESSION_MAPPING -- CREATE INDEX IDX_FEDERATED_AUTH_SESSION_ID ON IDN_FED_AUTH_SESSION_MAPPING (SESSION_ID); -- IDN_REMOTE_FETCH_REVISIONS -- CREATE INDEX IDX_REMOTE_FETCH_REVISION_CONFIG_ID ON IDN_REMOTE_FETCH_REVISIONS (CONFIG_ID); -- IDN_CORS_ASSOCIATION -- CREATE INDEX IDX_CORS_SP_APP_ID ON IDN_CORS_ASSOCIATION (SP_APP_ID); -- IDN_CORS_ASSOCIATION -- CREATE INDEX IDX_CORS_ORIGIN_ID ON IDN_CORS_ASSOCIATION (IDN_CORS_ORIGIN_ID); -- Start of CONSENT-MGT Tables -- IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_PII_CATEGORY]') AND TYPE IN (N'U')) CREATE TABLE CM_PII_CATEGORY ( ID INTEGER NOT NULL IDENTITY, NAME VARCHAR(255) NOT NULL, DESCRIPTION VARCHAR(1023), DISPLAY_NAME VARCHAR(255), IS_SENSITIVE INTEGER NOT NULL, TENANT_ID INTEGER DEFAULT '-1234', CONSTRAINT CM_PII_CATEGORY_CNT UNIQUE (NAME, TENANT_ID), PRIMARY KEY (ID) ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_RECEIPT]') AND TYPE IN (N'U')) CREATE TABLE CM_RECEIPT ( CONSENT_RECEIPT_ID VARCHAR(255) NOT NULL, VERSION VARCHAR(255) NOT NULL, JURISDICTION VARCHAR(255) NOT NULL, CONSENT_TIMESTAMP DATETIME NOT NULL, COLLECTION_METHOD VARCHAR(255) NOT NULL, LANGUAGE VARCHAR(255) NOT NULL, PII_PRINCIPAL_ID VARCHAR(255) NOT NULL, PRINCIPAL_TENANT_ID INTEGER DEFAULT '-1234', POLICY_URL VARCHAR(255) NOT NULL, STATE VARCHAR(255) NOT NULL, PII_CONTROLLER VARCHAR(2048) NOT NULL, PRIMARY KEY (CONSENT_RECEIPT_ID) ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_PURPOSE]') AND TYPE IN (N'U')) CREATE TABLE CM_PURPOSE ( ID INTEGER NOT NULL IDENTITY, NAME VARCHAR(255) NOT NULL, DESCRIPTION VARCHAR(1023), PURPOSE_GROUP VARCHAR(255) NOT NULL, GROUP_TYPE VARCHAR(255) NOT NULL, TENANT_ID INTEGER DEFAULT '-1234', CONSTRAINT CM_PURPOSE_CNT UNIQUE (NAME, TENANT_ID, PURPOSE_GROUP, GROUP_TYPE), PRIMARY KEY (ID) ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_PURPOSE_CATEGORY]') AND TYPE IN (N'U')) CREATE TABLE CM_PURPOSE_CATEGORY ( ID INTEGER NOT NULL IDENTITY, NAME VARCHAR(255) NOT NULL, DESCRIPTION VARCHAR(1023), TENANT_ID INTEGER DEFAULT '-1234', CONSTRAINT CM_PURPOSE_CATEGORY_CNT UNIQUE (NAME, TENANT_ID), PRIMARY KEY (ID) ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_RECEIPT_SP_ASSOC]') AND TYPE IN (N'U')) CREATE TABLE CM_RECEIPT_SP_ASSOC ( ID INTEGER NOT NULL IDENTITY, CONSENT_RECEIPT_ID VARCHAR(255) NOT NULL, SP_NAME VARCHAR(255) NOT NULL, SP_DISPLAY_NAME VARCHAR(255), SP_DESCRIPTION VARCHAR(255), SP_TENANT_ID INTEGER DEFAULT '-1234', CONSTRAINT CM_RECEIPT_SP_ASSOC_CNT UNIQUE (CONSENT_RECEIPT_ID, SP_NAME, SP_TENANT_ID), FOREIGN KEY (CONSENT_RECEIPT_ID) REFERENCES CM_RECEIPT (CONSENT_RECEIPT_ID), PRIMARY KEY (ID) ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_SP_PURPOSE_ASSOC]') AND TYPE IN (N'U')) CREATE TABLE CM_SP_PURPOSE_ASSOC ( ID INTEGER NOT NULL IDENTITY, RECEIPT_SP_ASSOC INTEGER NOT NULL, PURPOSE_ID INTEGER NOT NULL, CONSENT_TYPE VARCHAR(255) NOT NULL, IS_PRIMARY_PURPOSE INTEGER NOT NULL, TERMINATION VARCHAR(255) NOT NULL, THIRD_PARTY_DISCLOSURE INTEGER NOT NULL, THIRD_PARTY_NAME VARCHAR(255), CONSTRAINT CM_SP_PURPOSE_ASSOC_CNT UNIQUE (RECEIPT_SP_ASSOC, PURPOSE_ID), FOREIGN KEY (RECEIPT_SP_ASSOC) REFERENCES CM_RECEIPT_SP_ASSOC (ID), FOREIGN KEY (PURPOSE_ID) REFERENCES CM_PURPOSE (ID), PRIMARY KEY (ID) ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_SP_PURPOSE_PURPOSE_CAT_ASSC]') AND TYPE IN (N'U')) CREATE TABLE CM_SP_PURPOSE_PURPOSE_CAT_ASSC ( SP_PURPOSE_ASSOC_ID INTEGER NOT NULL, PURPOSE_CATEGORY_ID INTEGER NOT NULL, CONSTRAINT CM_SP_PURPOSE_PURPOSE_CAT_ASSC_CNT UNIQUE (SP_PURPOSE_ASSOC_ID, PURPOSE_CATEGORY_ID), FOREIGN KEY (SP_PURPOSE_ASSOC_ID) REFERENCES CM_SP_PURPOSE_ASSOC (ID), FOREIGN KEY (PURPOSE_CATEGORY_ID) REFERENCES CM_PURPOSE_CATEGORY (ID) ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_PURPOSE_PII_CAT_ASSOC]') AND TYPE IN (N'U')) CREATE TABLE CM_PURPOSE_PII_CAT_ASSOC ( PURPOSE_ID INTEGER NOT NULL, CM_PII_CATEGORY_ID INTEGER NOT NULL, IS_MANDATORY INTEGER NOT NULL, CONSTRAINT CM_PURPOSE_PII_CAT_ASSOC_CNT UNIQUE (PURPOSE_ID, CM_PII_CATEGORY_ID) ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_SP_PURPOSE_PII_CAT_ASSOC]') AND TYPE IN (N'U')) CREATE TABLE CM_SP_PURPOSE_PII_CAT_ASSOC ( SP_PURPOSE_ASSOC_ID INTEGER NOT NULL, PII_CATEGORY_ID INTEGER NOT NULL, VALIDITY VARCHAR(1023), CONSTRAINT CM_SP_PURPOSE_PII_CAT_ASSOC_CNT UNIQUE (SP_PURPOSE_ASSOC_ID, PII_CATEGORY_ID), FOREIGN KEY (PII_CATEGORY_ID) REFERENCES CM_PII_CATEGORY (ID), FOREIGN KEY (SP_PURPOSE_ASSOC_ID) REFERENCES CM_SP_PURPOSE_ASSOC (ID) ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_CONSENT_RECEIPT_PROPERTY]') AND TYPE IN (N'U')) CREATE TABLE CM_CONSENT_RECEIPT_PROPERTY ( CONSENT_RECEIPT_ID VARCHAR(255) NOT NULL, NAME VARCHAR(255) NOT NULL, VALUE VARCHAR(1023) NOT NULL, CONSTRAINT CM_CONSENT_RECEIPT_PROPERTY_CNT UNIQUE (CONSENT_RECEIPT_ID, NAME), FOREIGN KEY (CONSENT_RECEIPT_ID) REFERENCES CM_RECEIPT (CONSENT_RECEIPT_ID) ); INSERT INTO CM_PURPOSE (NAME, DESCRIPTION, PURPOSE_GROUP, GROUP_TYPE, TENANT_ID) VALUES ('DEFAULT', 'For core functionalities of the product', 'DEFAULT', 'SP', '-1234'); INSERT INTO CM_PURPOSE_CATEGORY (NAME, DESCRIPTION, TENANT_ID) VALUES ('DEFAULT','For core functionalities of the product', '-1234'); -- End of CONSENT-MGT Tables -- -- UMA tables -- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_RESOURCE]') AND TYPE IN (N'U')) CREATE TABLE IDN_UMA_RESOURCE ( ID INTEGER IDENTITY NOT NULL, RESOURCE_ID VARCHAR(255), RESOURCE_NAME VARCHAR(255), TIME_CREATED DATETIME NOT NULL, RESOURCE_OWNER_NAME VARCHAR(255), CLIENT_ID VARCHAR(255), TENANT_ID INTEGER DEFAULT -1234, USER_DOMAIN VARCHAR(50), PRIMARY KEY (ID) ); CREATE INDEX IDX_RID ON IDN_UMA_RESOURCE (RESOURCE_ID); CREATE INDEX IDX_USER ON IDN_UMA_RESOURCE (RESOURCE_OWNER_NAME, USER_DOMAIN); CREATE INDEX IDX_USER_RID ON IDN_UMA_RESOURCE (RESOURCE_ID, RESOURCE_OWNER_NAME, USER_DOMAIN, CLIENT_ID); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_RESOURCE_META_DATA]') AND TYPE IN (N'U')) CREATE TABLE IDN_UMA_RESOURCE_META_DATA ( ID INTEGER IDENTITY NOT NULL, RESOURCE_IDENTITY INTEGER NOT NULL, PROPERTY_KEY VARCHAR(40), PROPERTY_VALUE VARCHAR(255), PRIMARY KEY (ID), FOREIGN KEY (RESOURCE_IDENTITY) REFERENCES IDN_UMA_RESOURCE (ID) ON DELETE CASCADE ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_RESOURCE_SCOPE]') AND TYPE IN (N'U')) CREATE TABLE IDN_UMA_RESOURCE_SCOPE ( ID INTEGER IDENTITY NOT NULL, RESOURCE_IDENTITY INTEGER NOT NULL, SCOPE_NAME VARCHAR(255), PRIMARY KEY (ID), FOREIGN KEY (RESOURCE_IDENTITY) REFERENCES IDN_UMA_RESOURCE (ID) ON DELETE CASCADE ); CREATE INDEX IDX_RS ON IDN_UMA_RESOURCE_SCOPE (SCOPE_NAME); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_PERMISSION_TICKET]') AND TYPE IN (N'U')) CREATE TABLE IDN_UMA_PERMISSION_TICKET ( ID INTEGER IDENTITY NOT NULL, PT VARCHAR(255) NOT NULL, TIME_CREATED DATETIME NOT NULL, EXPIRY_TIME DATETIME NOT NULL, TICKET_STATE VARCHAR(25) DEFAULT 'ACTIVE', TENANT_ID INTEGER DEFAULT -1234, TOKEN_ID VARCHAR(255), PRIMARY KEY (ID) ); CREATE INDEX IDX_PT ON IDN_UMA_PERMISSION_TICKET (PT); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_PT_RESOURCE]') AND TYPE IN (N'U')) CREATE TABLE IDN_UMA_PT_RESOURCE ( ID INTEGER IDENTITY NOT NULL, PT_RESOURCE_ID INTEGER NOT NULL, PT_ID INTEGER NOT NULL, PRIMARY KEY (ID), FOREIGN KEY (PT_ID) REFERENCES IDN_UMA_PERMISSION_TICKET (ID) ON DELETE CASCADE, FOREIGN KEY (PT_RESOURCE_ID) REFERENCES IDN_UMA_RESOURCE (ID) ); IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_PT_RESOURCE_SCOPE]') AND TYPE IN (N'U')) CREATE TABLE IDN_UMA_PT_RESOURCE_SCOPE ( ID INTEGER IDENTITY NOT NULL, PT_RESOURCE_ID INTEGER NOT NULL, PT_SCOPE_ID INTEGER NOT NULL, PRIMARY KEY (ID), FOREIGN KEY (PT_RESOURCE_ID) REFERENCES IDN_UMA_PT_RESOURCE (ID) ON DELETE CASCADE, FOREIGN KEY (PT_SCOPE_ID) REFERENCES IDN_UMA_RESOURCE_SCOPE (ID) ); -- Start of API-Mgt Tables -- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SUBSCRIBER]') AND TYPE IN (N'U')) CREATE TABLE AM_SUBSCRIBER ( SUBSCRIBER_ID INTEGER IDENTITY(1,1), USER_ID VARCHAR(50) NOT NULL, TENANT_ID INTEGER NOT NULL, EMAIL_ADDRESS VARCHAR(256) NULL, DATE_SUBSCRIBED DATETIME NOT NULL, CREATED_BY VARCHAR(100), CREATED_TIME DATETIME, UPDATED_BY VARCHAR(100), UPDATED_TIME DATETIME, PRIMARY KEY (SUBSCRIBER_ID), UNIQUE (TENANT_ID,USER_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION]') AND TYPE IN (N'U')) CREATE TABLE AM_APPLICATION ( APPLICATION_ID INTEGER IDENTITY(1,1), NAME VARCHAR(100) COLLATE Latin1_General_CS_AS, SUBSCRIBER_ID INTEGER, APPLICATION_TIER VARCHAR(50) DEFAULT 'Unlimited', CALLBACK_URL VARCHAR(512), DESCRIPTION VARCHAR(512), APPLICATION_STATUS VARCHAR(50) DEFAULT 'APPROVED', GROUP_ID VARCHAR(100), CREATED_BY VARCHAR(100), CREATED_TIME DATETIME, UPDATED_BY VARCHAR(100), UPDATED_TIME DATETIME, UUID VARCHAR(256), TOKEN_TYPE VARCHAR(10), FOREIGN KEY(SUBSCRIBER_ID) REFERENCES AM_SUBSCRIBER(SUBSCRIBER_ID) ON UPDATE CASCADE, PRIMARY KEY(APPLICATION_ID), UNIQUE (NAME,SUBSCRIBER_ID), UNIQUE (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_MONETIZATION_USAGE]') AND TYPE IN (N'U')) CREATE TABLE AM_MONETIZATION_USAGE ( ID VARCHAR(100) NOT NULL, STATE VARCHAR(50) NOT NULL, STATUS VARCHAR(50) NOT NULL, STARTED_TIME VARCHAR(50) NOT NULL, PUBLISHED_TIME VARCHAR(50) NOT NULL, PRIMARY KEY(ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API]') AND TYPE IN (N'U')) CREATE TABLE AM_API ( API_ID INTEGER IDENTITY(1,1), API_UUID VARCHAR(256), API_PROVIDER VARCHAR(200), API_NAME VARCHAR(200), API_VERSION VARCHAR(30), CONTEXT VARCHAR(256), CONTEXT_TEMPLATE VARCHAR(256), API_TIER VARCHAR(256), API_TYPE VARCHAR(10), CREATED_BY VARCHAR(100), CREATED_TIME DATETIME, UPDATED_BY VARCHAR(100), UPDATED_TIME DATETIME, STATUS VARCHAR(30), PRIMARY KEY(API_ID), UNIQUE (API_PROVIDER,API_NAME,API_VERSION), UNIQUE (API_UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GRAPHQL_COMPLEXITY]') AND TYPE IN (N'U')) CREATE TABLE AM_GRAPHQL_COMPLEXITY ( UUID VARCHAR(256), API_ID INTEGER NOT NULL, TYPE VARCHAR(256), FIELD VARCHAR(256), COMPLEXITY_VALUE INTEGER, REVISION_UUID VARCHAR(255), FOREIGN KEY (API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE ON DELETE CASCADE, PRIMARY KEY(UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_URL_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE AM_API_URL_MAPPING ( URL_MAPPING_ID INTEGER IDENTITY(1,1), API_ID INTEGER NOT NULL, HTTP_METHOD VARCHAR(20) NULL, AUTH_SCHEME VARCHAR(50) NULL, URL_PATTERN VARCHAR(512) NULL, THROTTLING_TIER varchar(512) DEFAULT NULL, MEDIATION_SCRIPT VARBINARY(MAX), REVISION_UUID VARCHAR(255), PRIMARY KEY (URL_MAPPING_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_RESOURCE_SCOPE_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE AM_API_RESOURCE_SCOPE_MAPPING ( SCOPE_NAME VARCHAR(255) NOT NULL, URL_MAPPING_ID INTEGER NOT NULL, TENANT_ID INTEGER NOT NULL, FOREIGN KEY (URL_MAPPING_ID) REFERENCES AM_API_URL_MAPPING(URL_MAPPING_ID) ON DELETE CASCADE, PRIMARY KEY(SCOPE_NAME, URL_MAPPING_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SECURITY_AUDIT_UUID_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE AM_SECURITY_AUDIT_UUID_MAPPING ( API_ID INTEGER NOT NULL, AUDIT_UUID VARCHAR(255) NOT NULL, FOREIGN KEY (API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE ON DELETE NO ACTION, PRIMARY KEY (API_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_PRODUCT_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE AM_API_PRODUCT_MAPPING ( API_PRODUCT_MAPPING_ID INTEGER IDENTITY(1,1), API_ID INTEGER, URL_MAPPING_ID INTEGER, REVISION_UUID VARCHAR(255), FOREIGN KEY (API_ID) REFERENCES AM_API(API_ID) ON DELETE CASCADE, FOREIGN KEY (URL_MAPPING_ID) REFERENCES AM_API_URL_MAPPING(URL_MAPPING_ID) ON DELETE CASCADE, PRIMARY KEY(API_PRODUCT_MAPPING_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SUBSCRIPTION]') AND TYPE IN (N'U')) CREATE TABLE AM_SUBSCRIPTION ( SUBSCRIPTION_ID INTEGER IDENTITY(1,1), TIER_ID VARCHAR(50), TIER_ID_PENDING VARCHAR(50), API_ID INTEGER, LAST_ACCESSED DATETIME NULL, APPLICATION_ID INTEGER, SUB_STATUS VARCHAR(50), SUBS_CREATE_STATE VARCHAR(50) DEFAULT 'SUBSCRIBE', CREATED_BY VARCHAR(100), CREATED_TIME DATETIME, UPDATED_BY VARCHAR(100), UPDATED_TIME DATETIME, UUID VARCHAR(256), FOREIGN KEY(APPLICATION_ID) REFERENCES AM_APPLICATION(APPLICATION_ID) ON UPDATE CASCADE, FOREIGN KEY(API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE, PRIMARY KEY (SUBSCRIPTION_ID), UNIQUE (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SUBSCRIPTION_KEY_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE AM_SUBSCRIPTION_KEY_MAPPING ( SUBSCRIPTION_ID INTEGER, ACCESS_TOKEN VARCHAR(512), KEY_TYPE VARCHAR(512) NOT NULL, FOREIGN KEY(SUBSCRIPTION_ID) REFERENCES AM_SUBSCRIPTION(SUBSCRIPTION_ID) ON UPDATE CASCADE, PRIMARY KEY(SUBSCRIPTION_ID,ACCESS_TOKEN) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION_KEY_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE AM_APPLICATION_KEY_MAPPING ( UUID VARCHAR(100), APPLICATION_ID INTEGER, CONSUMER_KEY VARCHAR(512), KEY_TYPE VARCHAR(512) NOT NULL, STATE VARCHAR(30) NOT NULL, CREATE_MODE VARCHAR(30) DEFAULT 'CREATED', KEY_MANAGER VARCHAR(100), APP_INFO VARBINARY(MAX) DEFAULT NULL, FOREIGN KEY(APPLICATION_ID) REFERENCES AM_APPLICATION(APPLICATION_ID) ON UPDATE CASCADE, PRIMARY KEY(APPLICATION_ID,KEY_TYPE,KEY_MANAGER) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION_REGISTRATION]') AND TYPE IN (N'U')) CREATE TABLE AM_APPLICATION_REGISTRATION ( REG_ID INTEGER IDENTITY(1,1), SUBSCRIBER_ID INTEGER, WF_REF VARCHAR(255) NOT NULL, APP_ID INTEGER, TOKEN_TYPE VARCHAR(30), TOKEN_SCOPE VARCHAR(1500) DEFAULT 'default', INPUTS VARCHAR(1000), ALLOWED_DOMAINS VARCHAR(256), VALIDITY_PERIOD BIGINT, KEY_MANAGER VARCHAR(255) NOT NULL, UNIQUE (SUBSCRIBER_ID,APP_ID,TOKEN_TYPE,KEY_MANAGER), FOREIGN KEY(SUBSCRIBER_ID) REFERENCES AM_SUBSCRIBER(SUBSCRIBER_ID) ON DELETE NO ACTION, FOREIGN KEY(APP_ID) REFERENCES AM_APPLICATION(APPLICATION_ID) ON UPDATE CASCADE ON DELETE NO ACTION, PRIMARY KEY (REG_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_LC_EVENT]') AND TYPE IN (N'U')) CREATE TABLE AM_API_LC_EVENT ( EVENT_ID INTEGER IDENTITY(1,1), API_ID INTEGER NOT NULL, PREVIOUS_STATE VARCHAR(50), NEW_STATE VARCHAR(50) NOT NULL, USER_ID VARCHAR(50) NOT NULL, TENANT_ID INTEGER NOT NULL, EVENT_DATE DATETIME NOT NULL, FOREIGN KEY(API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE, PRIMARY KEY (EVENT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APP_KEY_DOMAIN_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE AM_APP_KEY_DOMAIN_MAPPING ( CONSUMER_KEY VARCHAR(512), AUTHZ_DOMAIN VARCHAR(255) DEFAULT 'ALL', PRIMARY KEY (CONSUMER_KEY,AUTHZ_DOMAIN) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_COMMENTS]') AND TYPE IN (N'U')) CREATE TABLE AM_API_COMMENTS ( COMMENT_ID VARCHAR(255) NOT NULL, COMMENT_TEXT VARCHAR(512), CREATED_BY VARCHAR(255), CREATED_TIME DATETIME NOT NULL, UPDATED_TIME DATETIME DEFAULT NULL, API_ID INTEGER, PARENT_COMMENT_ID VARCHAR(255) DEFAULT NULL, ENTRY_POINT VARCHAR(20), CATEGORY VARCHAR(20) DEFAULT 'general', FOREIGN KEY(API_ID) REFERENCES AM_API(API_ID) ON DELETE CASCADE, FOREIGN KEY(PARENT_COMMENT_ID) REFERENCES AM_API_COMMENTS(COMMENT_ID) ON DELETE CASCADE, PRIMARY KEY (COMMENT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_RATINGS]') AND TYPE IN (N'U')) CREATE TABLE AM_API_RATINGS ( RATING_ID VARCHAR(255) NOT NULL, API_ID INTEGER, RATING INTEGER, SUBSCRIBER_ID INTEGER, FOREIGN KEY(API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE, FOREIGN KEY(SUBSCRIBER_ID) REFERENCES AM_SUBSCRIBER(SUBSCRIBER_ID) ON UPDATE CASCADE, PRIMARY KEY (RATING_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_TIER_PERMISSIONS]') AND TYPE IN (N'U')) CREATE TABLE AM_TIER_PERMISSIONS ( TIER_PERMISSIONS_ID INTEGER IDENTITY(1,1), TIER VARCHAR(50) NOT NULL, PERMISSIONS_TYPE VARCHAR(50) NOT NULL, ROLES VARCHAR(512) NOT NULL, TENANT_ID INTEGER NOT NULL, PRIMARY KEY(TIER_PERMISSIONS_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_EXTERNAL_STORES]') AND TYPE IN (N'U')) CREATE TABLE AM_EXTERNAL_STORES ( APISTORE_ID INTEGER IDENTITY(1,1), API_ID INTEGER, STORE_ID VARCHAR(255) NOT NULL, STORE_DISPLAY_NAME VARCHAR(255) NOT NULL, STORE_ENDPOINT VARCHAR(255) NOT NULL, STORE_TYPE VARCHAR(255) NOT NULL, LAST_UPDATED_TIME DATETIME, FOREIGN KEY(API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE, PRIMARY KEY (APISTORE_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_WORKFLOWS]') AND TYPE IN (N'U')) CREATE TABLE AM_WORKFLOWS( WF_ID INTEGER IDENTITY(1,1), WF_REFERENCE VARCHAR(255) NOT NULL, WF_TYPE VARCHAR(255) NOT NULL, WF_STATUS VARCHAR(255) NOT NULL, WF_CREATED_TIME DATETIME DEFAULT GETDATE(), WF_UPDATED_TIME DATETIME DEFAULT GETDATE(), WF_STATUS_DESC VARCHAR(1000), TENANT_ID INTEGER, TENANT_DOMAIN VARCHAR(255), WF_EXTERNAL_REFERENCE VARCHAR(255) NOT NULL UNIQUE, WF_METADATA VARBINARY(MAX) DEFAULT NULL, WF_PROPERTIES VARBINARY(MAX) DEFAULT NULL, PRIMARY KEY (WF_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION_REGISTRATION]') AND TYPE IN (N'U')) CREATE TABLE AM_APPLICATION_REGISTRATION ( REG_ID INTEGER IDENTITY(1,1), SUBSCRIBER_ID INTEGER, WF_REF VARCHAR(255) NOT NULL, APP_ID INTEGER, TOKEN_TYPE VARCHAR(30), TOKEN_SCOPE VARCHAR(1500) DEFAULT 'default', INPUTS VARCHAR(1000), ALLOWED_DOMAINS VARCHAR(256), VALIDITY_PERIOD BIGINT, KEY_MANAGER VARCHAR(255) NOT NULL, UNIQUE (SUBSCRIBER_ID,APP_ID,TOKEN_TYPE,KEY_MANAGER), FOREIGN KEY(SUBSCRIBER_ID) REFERENCES AM_SUBSCRIBER(SUBSCRIBER_ID) ON DELETE NO ACTION, FOREIGN KEY(APP_ID) REFERENCES AM_APPLICATION(APPLICATION_ID) ON UPDATE CASCADE ON DELETE NO ACTION, PRIMARY KEY (REG_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SHARED_SCOPE]') AND TYPE IN (N'U')) CREATE TABLE AM_SHARED_SCOPE ( NAME VARCHAR(255), UUID VARCHAR (256), TENANT_ID INTEGER, PRIMARY KEY (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_DEFAULT_VERSION]') AND TYPE IN (N'U')) CREATE TABLE AM_API_DEFAULT_VERSION ( DEFAULT_VERSION_ID INTEGER NOT NULL IDENTITY, API_NAME VARCHAR(256) NOT NULL , API_PROVIDER VARCHAR(256) NOT NULL , DEFAULT_API_VERSION VARCHAR(30) , PUBLISHED_DEFAULT_API_VERSION VARCHAR(30) , PRIMARY KEY (DEFAULT_VERSION_ID) ); CREATE INDEX IDX_SUB_APP_ID ON AM_SUBSCRIPTION (APPLICATION_ID, SUBSCRIPTION_ID); CREATE TABLE AM_ALERT_TYPES ( ALERT_TYPE_ID INTEGER NOT NULL IDENTITY, ALERT_TYPE_NAME VARCHAR(255) NOT NULL , STAKE_HOLDER VARCHAR(10) NOT NULL, PRIMARY KEY (ALERT_TYPE_ID) ); CREATE TABLE AM_ALERT_TYPES_VALUES ( ALERT_TYPE_ID INTEGER, USER_NAME VARCHAR(255) NOT NULL , STAKE_HOLDER VARCHAR(100) NOT NULL , PRIMARY KEY (ALERT_TYPE_ID,USER_NAME,STAKE_HOLDER), CONSTRAINT AM_ALERT_TYPES_VALUES_CONST UNIQUE (ALERT_TYPE_ID,USER_NAME,STAKE_HOLDER) ); CREATE TABLE AM_ALERT_EMAILLIST ( EMAIL_LIST_ID INTEGER NOT NULL IDENTITY, USER_NAME VARCHAR(255) NOT NULL , STAKE_HOLDER VARCHAR(100) NOT NULL , CONSTRAINT AM_ALERT_EMAILLIST_CONST UNIQUE (EMAIL_LIST_ID,USER_NAME,STAKE_HOLDER), PRIMARY KEY (EMAIL_LIST_ID) ); CREATE TABLE AM_ALERT_EMAILLIST_DETAILS ( EMAIL_LIST_ID INTEGER, EMAIL VARCHAR(255), PRIMARY KEY (EMAIL_LIST_ID,EMAIL), CONSTRAINT AM_ALERT_EMAILLIST_DETAILS_CONST UNIQUE (EMAIL_LIST_ID,EMAIL) ); INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('AbnormalResponseTime', 'publisher'); INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('AbnormalBackendTime', 'publisher'); INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('AbnormalRequestsPerMin', 'subscriber'); INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('AbnormalRequestPattern', 'subscriber'); INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('UnusualIPAccess', 'subscriber'); INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('FrequentTierLimitHitting', 'subscriber'); INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('ApiHealthMonitor', 'publisher'); -- AM Throttling tables -- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_POLICY_SUBSCRIPTION]') AND TYPE IN (N'U')) CREATE TABLE AM_POLICY_SUBSCRIPTION ( POLICY_ID INTEGER IDENTITY(1,1), NAME VARCHAR(512) NOT NULL, DISPLAY_NAME VARCHAR(512) NULL DEFAULT NULL, TENANT_ID INTEGER NOT NULL, DESCRIPTION VARCHAR(1024) NULL DEFAULT NULL, QUOTA_TYPE VARCHAR(25) NOT NULL, QUOTA INTEGER NOT NULL, QUOTA_UNIT VARCHAR(10) NULL, UNIT_TIME INTEGER NOT NULL, TIME_UNIT VARCHAR(25) NOT NULL, RATE_LIMIT_COUNT INTEGER NULL DEFAULT NULL, RATE_LIMIT_TIME_UNIT VARCHAR(25) NULL DEFAULT NULL, IS_DEPLOYED BIT NOT NULL DEFAULT 0, CUSTOM_ATTRIBUTES VARBINARY(MAX) DEFAULT NULL, STOP_ON_QUOTA_REACH BIT NOT NULL DEFAULT 0, BILLING_PLAN VARCHAR(20) NOT NULL, UUID VARCHAR(256), MONETIZATION_PLAN VARCHAR(25) NULL DEFAULT NULL, FIXED_RATE VARCHAR(15) NULL DEFAULT NULL, BILLING_CYCLE VARCHAR(15) NULL DEFAULT NULL, PRICE_PER_REQUEST VARCHAR(15) NULL DEFAULT NULL, CURRENCY VARCHAR(15) NULL DEFAULT NULL, MAX_COMPLEXITY INTEGER NOT NULL DEFAULT 0, MAX_DEPTH INTEGER NOT NULL DEFAULT 0, PRIMARY KEY (POLICY_ID), UNIQUE (NAME, TENANT_ID), UNIQUE (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_POLICY_APPLICATION]') AND TYPE IN (N'U')) CREATE TABLE AM_POLICY_APPLICATION ( POLICY_ID INTEGER IDENTITY(1,1), NAME VARCHAR(512) NOT NULL, DISPLAY_NAME VARCHAR(512) NULL DEFAULT NULL, TENANT_ID INTEGER NOT NULL, DESCRIPTION VARCHAR(1024) NULL DEFAULT NULL, QUOTA_TYPE VARCHAR(25) NOT NULL, QUOTA INTEGER NOT NULL, QUOTA_UNIT VARCHAR(10) NULL DEFAULT NULL, UNIT_TIME INTEGER NOT NULL, TIME_UNIT VARCHAR(25) NOT NULL, IS_DEPLOYED BIT NOT NULL DEFAULT 0, CUSTOM_ATTRIBUTES VARBINARY(MAX) DEFAULT NULL, UUID VARCHAR(256), PRIMARY KEY (POLICY_ID), UNIQUE (NAME, TENANT_ID), UNIQUE (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_POLICY_HARD_THROTTLING]') AND TYPE IN (N'U')) CREATE TABLE AM_POLICY_HARD_THROTTLING ( POLICY_ID INTEGER IDENTITY(1,1), NAME VARCHAR(512) NOT NULL, TENANT_ID INTEGER NOT NULL, DESCRIPTION VARCHAR(1024) NULL DEFAULT NULL, QUOTA_TYPE VARCHAR(25) NOT NULL, QUOTA INTEGER NOT NULL, QUOTA_UNIT VARCHAR(10) NULL DEFAULT NULL, UNIT_TIME INTEGER NOT NULL, TIME_UNIT VARCHAR(25) NOT NULL, IS_DEPLOYED BIT NOT NULL DEFAULT 0, PRIMARY KEY (POLICY_ID), UNIQUE (NAME, TENANT_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_THROTTLE_POLICY]') AND TYPE IN (N'U')) CREATE TABLE AM_API_THROTTLE_POLICY ( POLICY_ID INTEGER IDENTITY(1,1), NAME VARCHAR(512) NOT NULL, DISPLAY_NAME VARCHAR(512) NULL DEFAULT NULL, TENANT_ID INTEGER NOT NULL, DESCRIPTION VARCHAR (1024), DEFAULT_QUOTA_TYPE VARCHAR(25) NOT NULL, DEFAULT_QUOTA INTEGER NOT NULL, DEFAULT_QUOTA_UNIT VARCHAR(10) NULL, DEFAULT_UNIT_TIME INTEGER NOT NULL, DEFAULT_TIME_UNIT VARCHAR(25) NOT NULL, APPLICABLE_LEVEL VARCHAR(25) NOT NULL, IS_DEPLOYED BIT NOT NULL DEFAULT 0, UUID VARCHAR(256), PRIMARY KEY (POLICY_ID), UNIQUE (NAME, TENANT_ID), UNIQUE (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_CONDITION_GROUP]') AND TYPE IN (N'U')) CREATE TABLE AM_CONDITION_GROUP ( CONDITION_GROUP_ID INTEGER IDENTITY(1,1), POLICY_ID INTEGER NOT NULL, QUOTA_TYPE VARCHAR(25), QUOTA INTEGER NOT NULL, QUOTA_UNIT VARCHAR(10) NULL DEFAULT NULL, UNIT_TIME INTEGER NOT NULL, TIME_UNIT VARCHAR(25) NOT NULL, DESCRIPTION VARCHAR (1024) NULL DEFAULT NULL, PRIMARY KEY (CONDITION_GROUP_ID), FOREIGN KEY (POLICY_ID) REFERENCES AM_API_THROTTLE_POLICY(POLICY_ID) ON DELETE CASCADE ON UPDATE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_QUERY_PARAMETER_CONDITION]') AND TYPE IN (N'U')) CREATE TABLE AM_QUERY_PARAMETER_CONDITION ( QUERY_PARAMETER_ID INTEGER IDENTITY(1,1), CONDITION_GROUP_ID INTEGER NOT NULL, PARAMETER_NAME VARCHAR(255) DEFAULT NULL, PARAMETER_VALUE VARCHAR(255) DEFAULT NULL, IS_PARAM_MAPPING BIT DEFAULT 1, PRIMARY KEY (QUERY_PARAMETER_ID), FOREIGN KEY (CONDITION_GROUP_ID) REFERENCES AM_CONDITION_GROUP(CONDITION_GROUP_ID) ON DELETE CASCADE ON UPDATE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_HEADER_FIELD_CONDITION]') AND TYPE IN (N'U')) CREATE TABLE AM_HEADER_FIELD_CONDITION ( HEADER_FIELD_ID INTEGER IDENTITY(1,1), CONDITION_GROUP_ID INTEGER NOT NULL, HEADER_FIELD_NAME VARCHAR(255) DEFAULT NULL, HEADER_FIELD_VALUE VARCHAR(255) DEFAULT NULL, IS_HEADER_FIELD_MAPPING BIT DEFAULT 1, PRIMARY KEY (HEADER_FIELD_ID), FOREIGN KEY (CONDITION_GROUP_ID) REFERENCES AM_CONDITION_GROUP(CONDITION_GROUP_ID) ON DELETE CASCADE ON UPDATE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_JWT_CLAIM_CONDITION]') AND TYPE IN (N'U')) CREATE TABLE AM_JWT_CLAIM_CONDITION ( JWT_CLAIM_ID INTEGER IDENTITY(1,1), CONDITION_GROUP_ID INTEGER NOT NULL, CLAIM_URI VARCHAR(512) DEFAULT NULL, CLAIM_ATTRIB VARCHAR(1024) DEFAULT NULL, IS_CLAIM_MAPPING BIT DEFAULT 1, PRIMARY KEY (JWT_CLAIM_ID), FOREIGN KEY (CONDITION_GROUP_ID) REFERENCES AM_CONDITION_GROUP(CONDITION_GROUP_ID) ON DELETE CASCADE ON UPDATE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_IP_CONDITION]') AND TYPE IN (N'U')) CREATE TABLE AM_IP_CONDITION ( AM_IP_CONDITION_ID INTEGER IDENTITY(1,1), STARTING_IP VARCHAR(45) NULL, ENDING_IP VARCHAR(45) NULL, SPECIFIC_IP VARCHAR(45) NULL, WITHIN_IP_RANGE BIT DEFAULT 1, CONDITION_GROUP_ID INT NULL, PRIMARY KEY (AM_IP_CONDITION_ID), FOREIGN KEY (CONDITION_GROUP_ID) REFERENCES AM_CONDITION_GROUP (CONDITION_GROUP_ID) ON DELETE CASCADE ON UPDATE CASCADE); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_POLICY_GLOBAL]') AND TYPE IN (N'U')) CREATE TABLE AM_POLICY_GLOBAL ( POLICY_ID INTEGER IDENTITY(1,1), NAME VARCHAR(512) NOT NULL, KEY_TEMPLATE VARCHAR(512) NOT NULL, TENANT_ID INTEGER NOT NULL, DESCRIPTION VARCHAR(1024) NULL DEFAULT NULL, SIDDHI_QUERY VARBINARY(MAX) DEFAULT NULL, IS_DEPLOYED BIT NOT NULL DEFAULT 0, UUID VARCHAR(256), PRIMARY KEY (POLICY_ID), UNIQUE (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_THROTTLE_TIER_PERMISSIONS]') AND TYPE IN (N'U')) CREATE TABLE AM_THROTTLE_TIER_PERMISSIONS ( THROTTLE_TIER_PERMISSIONS_ID INTEGER IDENTITY(1,1), TIER VARCHAR(50) NULL, PERMISSIONS_TYPE VARCHAR(50) NULL, ROLES VARCHAR(512) NULL, TENANT_ID INTEGER NULL, PRIMARY KEY (THROTTLE_TIER_PERMISSIONS_ID)); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_BLOCK_CONDITIONS]') AND TYPE IN (N'U')) CREATE TABLE AM_BLOCK_CONDITIONS ( CONDITION_ID INTEGER IDENTITY(1,1), TYPE varchar(45) DEFAULT NULL, VALUE varchar(512) DEFAULT NULL, ENABLED varchar(45) DEFAULT NULL, DOMAIN varchar(45) DEFAULT NULL, UUID VARCHAR(256), PRIMARY KEY (CONDITION_ID), UNIQUE (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_CERTIFICATE_METADATA]') AND TYPE IN (N'U')) CREATE TABLE AM_CERTIFICATE_METADATA ( TENANT_ID INTEGER NOT NULL, ALIAS VARCHAR(255) NOT NULL, END_POINT VARCHAR(255) NOT NULL, CERTIFICATE VARBINARY(MAX) DEFAULT NULL, CONSTRAINT PK_ALIAS PRIMARY KEY (ALIAS) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_CLIENT_CERTIFICATE]') AND TYPE IN (N'U')) CREATE TABLE AM_API_CLIENT_CERTIFICATE ( TENANT_ID INTEGER NOT NULL, ALIAS VARCHAR(45) NOT NULL, API_ID INTEGER NOT NULL, CERTIFICATE VARBINARY(MAX) NOT NULL, REMOVED BIT NOT NULL DEFAULT 0, TIER_NAME VARCHAR(512), REVISION_UUID VARCHAR(255) NOT NULL DEFAULT 'Current API', PRIMARY KEY (ALIAS, TENANT_ID, REMOVED, REVISION_UUID), FOREIGN KEY (API_ID) REFERENCES AM_API(API_ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION_GROUP_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE AM_APPLICATION_GROUP_MAPPING ( APPLICATION_ID INTEGER NOT NULL, GROUP_ID VARCHAR(512), TENANT VARCHAR(255), PRIMARY KEY (APPLICATION_ID,GROUP_ID,TENANT), FOREIGN KEY (APPLICATION_ID) REFERENCES AM_APPLICATION(APPLICATION_ID) ON DELETE CASCADE ON UPDATE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_USAGE_UPLOADED_FILES]') AND TYPE IN (N'U')) CREATE TABLE AM_USAGE_UPLOADED_FILES ( TENANT_DOMAIN VARCHAR(255) NOT NULL, FILE_NAME VARCHAR(255) NOT NULL, FILE_TIMESTAMP DATETIME DEFAULT GETDATE(), FILE_PROCESSED INTEGER DEFAULT 0, FILE_CONTENT VARBINARY(MAX) DEFAULT NULL, PRIMARY KEY (TENANT_DOMAIN, FILE_NAME, FILE_TIMESTAMP) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_LC_PUBLISH_EVENTS]') AND TYPE IN (N'U')) CREATE TABLE AM_API_LC_PUBLISH_EVENTS ( ID INTEGER NOT NULL IDENTITY, TENANT_DOMAIN VARCHAR(255) NOT NULL, API_ID VARCHAR(500) NOT NULL, EVENT_TIME DATETIME DEFAULT GETDATE(), PRIMARY KEY (ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION_ATTRIBUTES]') AND TYPE IN (N'U')) CREATE TABLE AM_APPLICATION_ATTRIBUTES ( APPLICATION_ID INTEGER NOT NULL, NAME VARCHAR(255) NOT NULL, VALUE VARCHAR(1024) NOT NULL, TENANT_ID INTEGER NOT NULL, PRIMARY KEY (APPLICATION_ID,NAME), FOREIGN KEY (APPLICATION_ID) REFERENCES AM_APPLICATION (APPLICATION_ID) ON DELETE CASCADE ON UPDATE CASCADE ) ; IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_LABELS]') AND TYPE IN (N'U')) CREATE TABLE AM_LABELS ( LABEL_ID VARCHAR(50), NAME VARCHAR(255), DESCRIPTION VARCHAR(1024), TENANT_DOMAIN VARCHAR(255), UNIQUE (NAME,TENANT_DOMAIN), PRIMARY KEY (LABEL_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_LABEL_URLS]') AND TYPE IN (N'U')) CREATE TABLE AM_LABEL_URLS ( LABEL_ID VARCHAR(50), ACCESS_URL VARCHAR(255), PRIMARY KEY (LABEL_ID,ACCESS_URL), FOREIGN KEY (LABEL_ID) REFERENCES AM_LABELS(LABEL_ID) ON UPDATE CASCADE ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'AM_SYSTEM_APPS') AND TYPE IN (N'U')) CREATE TABLE AM_SYSTEM_APPS ( ID INTEGER IDENTITY, NAME VARCHAR(50) NOT NULL, CONSUMER_KEY VARCHAR(512) NOT NULL, CONSUMER_SECRET VARCHAR(512) NOT NULL, TENANT_DOMAIN VARCHAR(255) DEFAULT 'carbon.super', CREATED_TIME DATETIME2(6) DEFAULT CURRENT_TIMESTAMP, UNIQUE (CONSUMER_KEY), PRIMARY KEY (ID) ); -- BotDATA Email table-- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_NOTIFICATION_SUBSCRIBER]') AND TYPE IN (N'U')) CREATE TABLE AM_NOTIFICATION_SUBSCRIBER ( UUID VARCHAR(255), CATEGORY VARCHAR(255), NOTIFICATION_METHOD VARCHAR(255), SUBSCRIBER_ADDRESS VARCHAR(255) NOT NULL, PRIMARY KEY(UUID, SUBSCRIBER_ADDRESS) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_REVOKED_JWT]') AND TYPE IN (N'U')) CREATE TABLE AM_REVOKED_JWT ( UUID VARCHAR(255) NOT NULL, SIGNATURE VARCHAR(2048) NOT NULL, EXPIRY_TIMESTAMP BIGINT NOT NULL, TENANT_ID INTEGER DEFAULT -1, TOKEN_TYPE VARCHAR(15) DEFAULT 'DEFAULT', TIME_CREATED DATETIME DEFAULT GETDATE(), PRIMARY KEY (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_CATEGORIES]') AND TYPE IN (N'U')) CREATE TABLE AM_API_CATEGORIES ( UUID VARCHAR(50), NAME VARCHAR(255), DESCRIPTION VARCHAR(1024), TENANT_ID INTEGER DEFAULT -1, UNIQUE (NAME,TENANT_ID), PRIMARY KEY (UUID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_USER]') AND TYPE IN (N'U')) CREATE TABLE AM_USER ( USER_ID VARCHAR(255) NOT NULL, USER_NAME VARCHAR(255) NOT NULL, PRIMARY KEY(USER_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_KEY_MANAGER]') AND TYPE IN (N'U')) CREATE TABLE AM_KEY_MANAGER ( UUID VARCHAR(50) NOT NULL, NAME VARCHAR(100) NULL, DISPLAY_NAME VARCHAR(100) NULL, DESCRIPTION VARCHAR(256) NULL, TYPE VARCHAR(45) NULL, CONFIGURATION VARBINARY(MAX) NULL, ENABLED BIT DEFAULT 1, TENANT_DOMAIN VARCHAR(100) NULL, PRIMARY KEY (UUID), UNIQUE (NAME,TENANT_DOMAIN) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GW_PUBLISHED_API_DETAILS]') AND TYPE IN (N'U')) CREATE TABLE AM_GW_PUBLISHED_API_DETAILS ( API_ID varchar(255) NOT NULL, TENANT_DOMAIN varchar(255), API_PROVIDER varchar(255), API_NAME varchar(255), API_VERSION varchar(255), API_TYPE varchar(50), PRIMARY KEY (API_ID) ); -- AM_GW_PUBLISHED_API_DETAILS & AM_GW_API_ARTIFACTS are independent tables for Artifact synchronizer feature which -- -- should not have any referential integrity constraints with other tables in AM database-- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GW_API_ARTIFACTS]') AND TYPE IN (N'U')) CREATE TABLE AM_GW_API_ARTIFACTS ( API_ID varchar(255) NOT NULL, REVISION_ID varchar(255) NOT NULL, ARTIFACT VARBINARY(MAX), TIME_STAMP DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (REVISION_ID, API_ID), FOREIGN KEY (API_ID) REFERENCES AM_GW_PUBLISHED_API_DETAILS(API_ID) ON UPDATE CASCADE ON DELETE NO ACTION ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GW_API_DEPLOYMENTS]') AND TYPE IN (N'U')) CREATE TABLE AM_GW_API_DEPLOYMENTS ( API_ID VARCHAR(255) NOT NULL, REVISION_ID VARCHAR(255) NOT NULL, LABEL VARCHAR(255) NOT NULL, PRIMARY KEY (REVISION_ID, API_ID,LABEL), FOREIGN KEY (API_ID) REFERENCES AM_GW_PUBLISHED_API_DETAILS(API_ID) ON UPDATE CASCADE ON DELETE NO ACTION ) ; -- Tenant Themes Table -- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_TENANT_THEMES]') AND TYPE IN (N'U')) CREATE TABLE AM_TENANT_THEMES ( TENANT_ID INTEGER NOT NULL, THEME VARBINARY(MAX) NOT NULL, PRIMARY KEY (TENANT_ID) ); -- End of API-MGT Tables -- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SCOPE]') AND TYPE IN (N'U')) CREATE TABLE AM_SCOPE ( SCOPE_ID INTEGER IDENTITY, NAME VARCHAR(255) NOT NULL, DISPLAY_NAME VARCHAR(255) NOT NULL, DESCRIPTION VARCHAR(512), TENANT_ID INTEGER NOT NULL DEFAULT -1, SCOPE_TYPE VARCHAR(255) NOT NULL, PRIMARY KEY (SCOPE_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SCOPE_BINDING]') AND TYPE IN (N'U')) CREATE TABLE AM_SCOPE_BINDING ( SCOPE_ID INTEGER NOT NULL, SCOPE_BINDING VARCHAR(255) NOT NULL, BINDING_TYPE VARCHAR(255) NOT NULL, FOREIGN KEY (SCOPE_ID) REFERENCES AM_SCOPE(SCOPE_ID) ON DELETE CASCADE ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SCOPE_BINDING]') AND TYPE IN (N'U')) CREATE TABLE AM_REVISION ( ID INTEGER NOT NULL, API_UUID VARCHAR(256) NOT NULL, REVISION_UUID VARCHAR(255) NOT NULL, DESCRIPTION VARCHAR(255), CREATED_TIME DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, CREATED_BY VARCHAR(255), PRIMARY KEY (ID, API_UUID), UNIQUE(REVISION_UUID) )ENGINE INNODB; IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_DEPLOYMENT_REVISION_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE AM_DEPLOYMENT_REVISION_MAPPING ( NAME VARCHAR(255) NOT NULL, VHOST VARCHAR(255) NULL, REVISION_UUID VARCHAR(255) NOT NULL, DISPLAY_ON_DEVPORTAL BIT DEFAULT 0, DEPLOYED_TIME DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (NAME, REVISION_UUID), FOREIGN KEY (REVISION_UUID) REFERENCES AM_REVISION(REVISION_UUID) ON UPDATE CASCADE ON DELETE CASCADE )ENGINE INNODB; -- Gateway Environments Table -- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GATEWAY_ENVIRONMENT]') AND TYPE IN (N'U')) CREATE TABLE AM_GATEWAY_ENVIRONMENT ( ID INTEGER IDENTITY, UUID VARCHAR(45) NOT NULL, NAME VARCHAR(255) NOT NULL, TENANT_DOMAIN VARCHAR(255), DISPLAY_NAME VARCHAR(255) NULL, DESCRIPTION VARCHAR(1023) NULL, UNIQUE (NAME, TENANT_DOMAIN), UNIQUE (UUID), PRIMARY KEY (ID) )ENGINE INNODB; -- Virtual Hosts Table -- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GW_VHOST]') AND TYPE IN (N'U')) CREATE TABLE AM_GW_VHOST ( GATEWAY_ENV_ID INTEGER, HOST VARCHAR(255) NOT NULL, HTTP_CONTEXT VARCHAR(255) NULL, HTTP_PORT VARCHAR(5) NOT NULL, HTTPS_PORT VARCHAR(5) NOT NULL, WS_PORT VARCHAR(5) NOT NULL, WSS_PORT VARCHAR(5) NOT NULL, FOREIGN KEY (GATEWAY_ENV_ID) REFERENCES AM_GATEWAY_ENVIRONMENT(ID) ON UPDATE CASCADE ON DELETE CASCADE, PRIMARY KEY (GATEWAY_ENV_ID, HOST) )ENGINE INNODB; -- Service Catalog Tables -- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SERVICE_CATALOG]') AND TYPE IN (N'U')) CREATE TABLE AM_SERVICE_CATALOG ( UUID VARCHAR(36) NOT NULL, SERVICE_KEY VARCHAR(100) NOT NULL, MD5 VARCHAR(100) NOT NULL, SERVICE_NAME VARCHAR(255) NOT NULL, DISPLAY_NAME VARCHAR(255) NOT NULL, SERVICE_VERSION VARCHAR(30) NOT NULL, SERVICE_URL VARCHAR(2048) NOT NULL, TENANT_ID INTEGER NOT NULL, DEFINITION_TYPE VARCHAR(20), DEFINITION_URL VARCHAR(2048), DESCRIPTION VARCHAR(1024), SECURITY_TYPE VARCHAR(50), MUTUAL_SSL_ENABLED BIT DEFAULT 0, CREATED_TIME DATETIME NULL, LAST_UPDATED_TIME DATETIME NULL, CREATED_BY VARCHAR(255), UPDATED_BY VARCHAR(255), SERVICE_DEFINITION VARBINARY(MAX) NOT NULL, METADATA VARBINARY(MAX) NOT NULL, PRIMARY KEY (UUID), CONSTRAINT SERVICE_KEY_TENANT UNIQUE(SERVICE_KEY, TENANT_ID), CONSTRAINT SERVICE_NAME_VERSION_TENANT UNIQUE (SERVICE_NAME, SERVICE_VERSION, TENANT_ID) ); -- Webhooks -- IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_WEBHOOKS_SUBSCRIPTION]') AND TYPE IN (N'U')) CREATE TABLE AM_WEBHOOKS_SUBSCRIPTION ( WH_SUBSCRIPTION_ID INTEGER IDENTITY, API_UUID VARCHAR(255) NOT NULL, APPLICATION_ID VARCHAR(20) NOT NULL, TENANT_DOMAIN VARCHAR(255) NOT NULL, HUB_CALLBACK_URL VARCHAR(1024) NOT NULL, HUB_TOPIC VARCHAR(255) NOT NULL, HUB_SECRET VARCHAR(2048), HUB_LEASE_SECONDS INTEGER, UPDATED_AT TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, EXPIRY_AT BIGINT, DELIVERED_AT TIMESTAMP NULL, DELIVERY_STATE INTEGER, PRIMARY KEY (WH_SUBSCRIPTION_ID) ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_WEBHOOKS_UNSUBSCRIPTION]') AND TYPE IN (N'U')) CREATE TABLE AM_WEBHOOKS_UNSUBSCRIPTION ( API_UUID VARCHAR(255) NOT NULL, APPLICATION_ID VARCHAR(20) NOT NULL, TENANT_DOMAIN VARCHAR(255) NOT NULL, HUB_CALLBACK_URL VARCHAR(1024) NOT NULL, HUB_TOPIC VARCHAR(255) NOT NULL, HUB_SECRET VARCHAR(2048), HUB_LEASE_SECONDS INTEGER, ADDED_AT TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ); IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_SERVICE_MAPPING]') AND TYPE IN (N'U')) CREATE TABLE AM_API_SERVICE_MAPPING ( API_ID INTEGER NOT NULL, SERVICE_KEY VARCHAR(256) NOT NULL, MD5 VARCHAR(100) NOT NULL, TENANT_ID INTEGER NOT NULL, PRIMARY KEY (API_ID, SERVICE_KEY), FOREIGN KEY (API_ID) REFERENCES AM_API(API_ID) ON DELETE CASCADE ); --Performance indexes start-- create index IDX_ITS_LMT on IDN_THRIFT_SESSION (LAST_MODIFIED_TIME); create index IDX_IOAT_UT on IDN_OAUTH2_ACCESS_TOKEN (USER_TYPE); create index IDX_AAI_CTX on AM_API (CONTEXT); create index IDX_AAKM_CK on AM_APPLICATION_KEY_MAPPING (CONSUMER_KEY); create index IDX_AAUM_AI on AM_API_URL_MAPPING (API_ID); create index IDX_AAPM_AI on AM_API_PRODUCT_MAPPING (API_ID); create index IDX_AAUM_TT on AM_API_URL_MAPPING (THROTTLING_TIER); create index IDX_AATP_DQT on AM_API_THROTTLE_POLICY (DEFAULT_QUOTA_TYPE); create index IDX_ACG_QT on AM_CONDITION_GROUP (QUOTA_TYPE); create index IDX_APS_QT on AM_POLICY_SUBSCRIPTION (QUOTA_TYPE); create index IDX_AS_AITIAI on AM_SUBSCRIPTION (API_ID,TIER_ID,APPLICATION_ID); create index IDX_APA_QT on AM_POLICY_APPLICATION (QUOTA_TYPE); create index IDX_AA_AT_CB on AM_APPLICATION (APPLICATION_TIER,CREATED_BY); -- Performance indexes end--
Java
<?php namespace Deliveryboy\V1\Rest\Orderproducts; class OrderproductsEntity { public $order_product_id; public $order_id; public $order_product_name; public $order_item_id; public $order_offer_id; public $order_type; public $package_id; public $unit_price; public $total_price; public $quantity; public function getArrayCopy() { return array( 'order_product_id' => $this->order_product_id, 'order_id' => $this->order_id, 'order_product_name' => $this->order_product_name, 'order_item_id' => $this->order_item_id, 'order_offer_id' => $this->order_offer_id, 'order_type' => $this->order_type, 'package_id' => $this->package_id, 'unit_price' => $this->unit_price, 'total_price' => $this->total_price, 'quantity' => $this->quantity, ); } public function exchangeArray(array $array) { $this->order_product_id = $array['order_product_id']; $this->order_id = $array['order_id']; $this->order_product_name = $array['order_product_name']; $this->order_item_id = $array['order_item_id']; $this->order_offer_id = $array['order_offer_id']; $this->order_type = $array['order_type']; $this->package_id = $array['package_id']; $this->unit_price = $array['unit_price']; $this->total_price = $array['total_price']; $this->quantity = $array['quantity']; } }
Java
/* * Copyright (C) 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.googlecode.android_scripting.language; import com.googlecode.android_scripting.rpc.ParameterDescriptor; /** * Represents the BeanShell programming language. * * @author igor.v.karp@gmail.com (Igor Karp) */ public class BeanShellLanguage extends Language { @Override protected String getImportStatement() { // FIXME(igor.v.karp): this is interpreter specific return "source(\"/sdcard/com.googlecode.bshforandroid/extras/bsh/android.bsh\");\n"; } @Override protected String getRpcReceiverDeclaration(String rpcReceiver) { return rpcReceiver + " = Android();\n"; } @Override protected String getMethodCallText(String receiver, String method, ParameterDescriptor[] parameters) { StringBuilder result = new StringBuilder().append(getApplyReceiverText(receiver)).append(getApplyOperatorText()) .append(method); if (parameters.length > 0) { result.append(getLeftParametersText()); } else { result.append(getQuote()); } String separator = ""; for (ParameterDescriptor parameter : parameters) { result.append(separator).append(getValueText(parameter)); separator = getParameterSeparator(); } result.append(getRightParametersText()); return result.toString(); } @Override protected String getApplyOperatorText() { return ".call(\""; } @Override protected String getLeftParametersText() { return "\", "; } @Override protected String getRightParametersText() { return ")"; } }
Java
/* * Copyright [2017] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netpet.spools.book.insidethejavavirtualmachine.chapter18; /** * @Desc javap -verbose / javap -c Hello.class 查看字节码文件 * Created by woncz on 2017/8/18. */ public class Hello { }
Java
<?php /******************************************************************************* * Copyright 2009-2014 Amazon Services. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * * You may not use this file except in compliance with the License. * You may obtain a copy of the License at: http://aws.amazon.com/apache2.0 * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. ******************************************************************************* * PHP Version 5 * @category Amazon * @package Marketplace Web Service Sellers * @version 2011-07-01 * Library Version: 2014-10-20 * Generated: Fri Oct 17 18:34:06 GMT 2014 */ class MarketplaceWebServiceSellers_Mock implements MarketplaceWebServiceSellers_Interface { // Public API ------------------------------------------------------------// /** * Get Service Status * Returns the service status of a particular MWS API section. The operation * takes no input. * All API sections within the API are required to implement this operation. * * @param mixed $request array of parameters for MarketplaceWebServiceSellers_Model_GetServiceStatus request or MarketplaceWebServiceSellers_Model_GetServiceStatus object itself * @see MarketplaceWebServiceSellers_Model_GetServiceStatus * @return MarketplaceWebServiceSellers_Model_GetServiceStatusResponse * * @throws MarketplaceWebServiceSellers_Exception */ public function getServiceStatus($request) { return MarketplaceWebServiceSellers_Model_GetServiceStatusResponse::fromXML($this->_invoke('GetServiceStatus')); } /** * List Marketplace Participations * Returns a list of marketplaces that the seller submitting the request can sell in, * and a list of participations that include seller-specific information in that marketplace. * * @param mixed $request array of parameters for MarketplaceWebServiceSellers_Model_ListMarketplaceParticipations request or MarketplaceWebServiceSellers_Model_ListMarketplaceParticipations object itself * @see MarketplaceWebServiceSellers_Model_ListMarketplaceParticipations * @return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsResponse * * @throws MarketplaceWebServiceSellers_Exception */ public function listMarketplaceParticipations($request) { return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsResponse::fromXML($this->_invoke('ListMarketplaceParticipations')); } /** * List Marketplace Participations By Next Token * Returns the next page of marketplaces and participations using the NextToken value * that was returned by your previous request to either ListMarketplaceParticipations or * ListMarketplaceParticipationsByNextToken. * * @param mixed $request array of parameters for MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextToken request or MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextToken object itself * @see MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextToken * @return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextTokenResponse * * @throws MarketplaceWebServiceSellers_Exception */ public function listMarketplaceParticipationsByNextToken($request) { return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextTokenResponse::fromXML($this->_invoke('ListMarketplaceParticipationsByNextToken')); } // Private API ------------------------------------------------------------// private function _invoke($actionName) { return $xml = file_get_contents(dirname(__FILE__) . '/Mock/' . $actionName . 'Response.xml', /** search include path */ true); } }
Java
using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace Trifolia.DB { public enum Conformance { SHALL = 1, SHALL_NOT = 2, SHOULD = 3, SHOULD_NOT = 4, MAY = 5, MAY_NOT = 6, UNKNOWN = 999 } }
Java
package com.ctrip.xpipe.redis.checker.alert.manager; import com.ctrip.xpipe.redis.checker.alert.ALERT_TYPE; import com.ctrip.xpipe.redis.checker.alert.AlertChannel; import com.ctrip.xpipe.redis.checker.alert.AlertConfig; import com.ctrip.xpipe.redis.checker.alert.AlertEntity; import com.ctrip.xpipe.redis.checker.alert.message.AlertEntityHolderManager; import com.ctrip.xpipe.redis.checker.alert.policy.channel.ChannelSelector; import com.ctrip.xpipe.redis.checker.alert.policy.channel.DefaultChannelSelector; import com.ctrip.xpipe.redis.checker.alert.policy.receiver.*; import com.ctrip.xpipe.redis.checker.alert.policy.timing.RecoveryTimeSlotControl; import com.ctrip.xpipe.redis.checker.alert.policy.timing.TimeSlotControl; import com.ctrip.xpipe.redis.checker.config.CheckerDbConfig; import com.ctrip.xpipe.redis.core.meta.MetaCache; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.LongSupplier; /** * @author chen.zhu * <p> * Oct 18, 2017 */ @Component public class AlertPolicyManager { @Autowired private AlertConfig alertConfig; @Autowired private CheckerDbConfig checkerDbConfig; @Autowired private MetaCache metaCache; private EmailReceiver emailReceiver; private GroupEmailReceiver groupEmailReceiver; private ChannelSelector channelSelector; private TimeSlotControl recoveryTimeController; @PostConstruct public void initPolicies() { emailReceiver = new DefaultEmailReceiver(alertConfig, checkerDbConfig, metaCache); groupEmailReceiver = new DefaultGroupEmailReceiver(alertConfig, checkerDbConfig, metaCache); channelSelector = new DefaultChannelSelector(); if(recoveryTimeController == null) { recoveryTimeController = new RecoveryTimeSlotControl(alertConfig); } } public List<AlertChannel> queryChannels(AlertEntity alert) { return channelSelector.alertChannels(alert); } public long queryRecoverMilli(AlertEntity alert) { return recoveryTimeController.durationMilli(alert); } public long querySuspendMilli(AlertEntity alert) { return TimeUnit.MINUTES.toMillis(alertConfig.getAlertSystemSuspendMinute()); } public EmailReceiverModel queryEmailReceivers(AlertEntity alert) { return emailReceiver.receivers(alert); } public void markCheckInterval(ALERT_TYPE alertType, LongSupplier checkInterval) { if(recoveryTimeController == null) { recoveryTimeController = new RecoveryTimeSlotControl(alertConfig); } recoveryTimeController.mark(alertType, checkInterval); } public Map<EmailReceiverModel, Map<ALERT_TYPE, Set<AlertEntity>>> queryGroupedEmailReceivers( AlertEntityHolderManager alerts) { return groupEmailReceiver.getGroupedEmailReceiver(alerts); } }
Java
package com.coolweather.android.util; import okhttp3.OkHttpClient; import okhttp3.Request; /** * Created by fengj on 2017/1/27. */ public class HttpUtil { public static void sendOkHttpRequest(String address,okhttp3.Callback callback){ OkHttpClient client=new OkHttpClient(); Request request=new Request.Builder().url(address).build(); client.newCall(request).enqueue(callback); } }
Java
# Copyright 2018 Flight Lab authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Library for network related helpers.""" import socket def get_ip(): """Get primary IP (the one with a default route) of local machine. This works on both Linux and Windows platforms, and doesn't require working internet connection. """ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: # doesn't even have to be reachable s.connect(('10.255.255.255', 1)) return s.getsockname()[0] except: return '127.0.0.1' finally: s.close()
Java
/* * Copyright 2013 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.openntf.domino.iterators; import java.util.Iterator; import org.openntf.domino.Base; import org.openntf.domino.Database; import org.openntf.domino.DocumentCollection; import org.openntf.domino.Session; import org.openntf.domino.View; import org.openntf.domino.ViewEntryCollection; import org.openntf.domino.utils.DominoUtils; import org.openntf.domino.utils.Factory; // TODO: Auto-generated Javadoc /** * The Class AbstractDominoIterator. * * @param <T> * the generic type */ public abstract class AbstractDominoIterator<T> implements Iterator<T> { /** The server name_. */ private String serverName_; /** The file path_. */ private String filePath_; /** The collection_. */ private Base<?> collection_; /** The session_. */ private transient Session session_; /** The database_. */ private transient Database database_; /** * Instantiates a new abstract domino iterator. * * @param collection * the collection */ protected AbstractDominoIterator(final Base<?> collection) { setCollection(collection); } /** * Gets the session. * * @return the session */ protected Session getSession() { if (session_ == null) { try { session_ = Factory.getSession(); } catch (Throwable e) { DominoUtils.handleException(e); return null; } } return session_; } /** * Gets the database. * * @return the database */ protected Database getDatabase() { if (database_ == null) { Session session = getSession(); try { database_ = session.getDatabase(getServerName(), getFilePath()); } catch (Throwable e) { DominoUtils.handleException(e); return null; } } return database_; } /** * Gets the file path. * * @return the file path */ protected String getFilePath() { return filePath_; } /** * Gets the server name. * * @return the server name */ protected String getServerName() { return serverName_; } /** * Sets the database. * * @param database * the new database */ protected void setDatabase(final Database database) { if (database != null) { try { setFilePath(database.getFilePath()); setServerName(database.getServer()); } catch (Throwable e) { DominoUtils.handleException(e); } } } /** * Sets the file path. * * @param filePath * the new file path */ protected void setFilePath(final String filePath) { filePath_ = filePath; } /** * Sets the server name. * * @param serverName * the new server name */ protected void setServerName(final String serverName) { serverName_ = serverName; } /** * Gets the collection. * * @return the collection */ public Base<?> getCollection() { return collection_; } /** * Sets the collection. * * @param collection * the new collection */ public void setCollection(final Base<?> collection) { if (collection != null) { if (collection instanceof DocumentCollection) { org.openntf.domino.Database parent = ((org.openntf.domino.DocumentCollection) collection).getParent(); session_ = Factory.fromLotus(parent.getParent(), Session.SCHEMA, null); // FIXME NTF - this is suboptimal, database_ = Factory.fromLotus(parent, Database.SCHEMA, session_); // but we still need to // sort out the parent/child pattern } else if (collection instanceof ViewEntryCollection) { View vw = ((ViewEntryCollection) collection).getParent(); database_ = vw.getParent(); session_ = Factory.getSession(database_); } if (database_ != null) { setDatabase(database_); } } collection_ = collection; } }
Java
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>Class triagens\ArangoDb\Vertex | ArangoDB-PHP API Documentation</title> <link rel="stylesheet" href="resources/bootstrap.min.css?973e37a8502921d56bc02bb55321f45b072b6f71"> <link rel="stylesheet" href="resources/style.css?49f43d3208c5d7e33fa16d36107a345bf11cc00d"> </head> <body> <nav id="navigation" class="navbar navbar-default navbar-fixed-top"> <div class="container-fluid"> <div class="navbar-header"> <a href="index.html" class="navbar-brand">ArangoDB-PHP API Documentation</a> </div> <div class="collapse navbar-collapse"> <form id="search" class="navbar-form navbar-left" role="search"> <input type="hidden" name="cx" value=""> <input type="hidden" name="ie" value="UTF-8"> <div class="form-group"> <input type="text" name="q" class="search-query form-control" placeholder="Search"> </div> </form> <ul class="nav navbar-nav"> <li> <a href="namespace-triagens.ArangoDb.html" title="Summary of triagens\ArangoDb"><span>Namespace</span></a> </li> <li class="active"> <span>Class</span> </li> <li class="divider-vertical"></li> <li> <a href="annotation-group-deprecated.html" title="List of elements with deprecated annotation"> <span>Deprecated</span> </a> </li> </ul> </div> </div> </nav> <div id="left"> <div id="menu"> <div id="groups"> <h3>Namespaces</h3> <ul> <li class="active"> <a href="namespace-triagens.html"> triagens<span></span> </a> <ul> <li class="active"> <a href="namespace-triagens.ArangoDb.html"> ArangoDb </a> </li> </ul></li> </ul> </div> <div id="elements"> <h3>Classes</h3> <ul> <li><a href="class-triagens.ArangoDb.AdminHandler.html">AdminHandler</a></li> <li><a href="class-triagens.ArangoDb.AqlUserFunction.html">AqlUserFunction</a></li> <li><a href="class-triagens.ArangoDb.Autoloader.html">Autoloader</a></li> <li><a href="class-triagens.ArangoDb.Batch.html">Batch</a></li> <li><a href="class-triagens.ArangoDb.BatchPart.html">BatchPart</a></li> <li><a href="class-triagens.ArangoDb.BindVars.html">BindVars</a></li> <li><a href="class-triagens.ArangoDb.Collection.html">Collection</a></li> <li><a href="class-triagens.ArangoDb.CollectionHandler.html">CollectionHandler</a></li> <li><a href="class-triagens.ArangoDb.Connection.html">Connection</a></li> <li><a href="class-triagens.ArangoDb.ConnectionOptions.html">ConnectionOptions</a></li> <li><a href="class-triagens.ArangoDb.Cursor.html">Cursor</a></li> <li><a href="class-triagens.ArangoDb.Database.html">Database</a></li> <li><a href="class-triagens.ArangoDb.DefaultValues.html">DefaultValues</a></li> <li><a href="class-triagens.ArangoDb.Document.html">Document</a></li> <li><a href="class-triagens.ArangoDb.DocumentHandler.html">DocumentHandler</a></li> <li><a href="class-triagens.ArangoDb.Edge.html">Edge</a></li> <li><a href="class-triagens.ArangoDb.EdgeDefinition.html">EdgeDefinition</a></li> <li><a href="class-triagens.ArangoDb.EdgeHandler.html">EdgeHandler</a></li> <li><a href="class-triagens.ArangoDb.Endpoint.html">Endpoint</a></li> <li><a href="class-triagens.ArangoDb.Export.html">Export</a></li> <li><a href="class-triagens.ArangoDb.ExportCursor.html">ExportCursor</a></li> <li><a href="class-triagens.ArangoDb.Graph.html">Graph</a></li> <li><a href="class-triagens.ArangoDb.GraphHandler.html">GraphHandler</a></li> <li><a href="class-triagens.ArangoDb.Handler.html">Handler</a></li> <li><a href="class-triagens.ArangoDb.HttpHelper.html">HttpHelper</a></li> <li><a href="class-triagens.ArangoDb.HttpResponse.html">HttpResponse</a></li> <li><a href="class-triagens.ArangoDb.QueryCacheHandler.html">QueryCacheHandler</a></li> <li><a href="class-triagens.ArangoDb.QueryHandler.html">QueryHandler</a></li> <li><a href="class-triagens.ArangoDb.Statement.html">Statement</a></li> <li><a href="class-triagens.ArangoDb.TraceRequest.html">TraceRequest</a></li> <li><a href="class-triagens.ArangoDb.TraceResponse.html">TraceResponse</a></li> <li><a href="class-triagens.ArangoDb.Transaction.html">Transaction</a></li> <li><a href="class-triagens.ArangoDb.Traversal.html">Traversal</a></li> <li><a href="class-triagens.ArangoDb.UpdatePolicy.html">UpdatePolicy</a></li> <li><a href="class-triagens.ArangoDb.UrlHelper.html">UrlHelper</a></li> <li><a href="class-triagens.ArangoDb.Urls.html">Urls</a></li> <li><a href="class-triagens.ArangoDb.User.html">User</a></li> <li><a href="class-triagens.ArangoDb.UserHandler.html">UserHandler</a></li> <li><a href="class-triagens.ArangoDb.ValueValidator.html">ValueValidator</a></li> <li class="active"><a href="class-triagens.ArangoDb.Vertex.html">Vertex</a></li> <li><a href="class-triagens.ArangoDb.VertexHandler.html">VertexHandler</a></li> </ul> <h3>Exceptions</h3> <ul> <li><a href="class-triagens.ArangoDb.ClientException.html">ClientException</a></li> <li><a href="class-triagens.ArangoDb.ConnectException.html">ConnectException</a></li> <li><a href="class-triagens.ArangoDb.Exception.html">Exception</a></li> <li><a href="class-triagens.ArangoDb.ServerException.html">ServerException</a></li> </ul> </div> </div> </div> <div id="splitter"></div> <div id="right"> <div id="rightInner"> <div id="content" class="class"> <h1>Class Vertex</h1> <div class="description"> <p>Value object representing a single vertex document</p> </div> <dl class="tree well"> <dd style="padding-left:0px"> <a href="class-triagens.ArangoDb.Document.html"><span>triagens\ArangoDb\Document</span></a> </dd> <dd style="padding-left:30px"> <img src="resources/inherit.png" alt="Extended by"> <b><span>triagens\ArangoDb\Vertex</span></b> </dd> </dl> <div class="alert alert-info"> <b>Namespace:</b> <a href="namespace-triagens.html">triagens</a>\<a href="namespace-triagens.ArangoDb.html">ArangoDb</a><br> <b>Package:</b> triagens\ArangoDb<br> <b>Since:</b> 1.2<br> <b>Located at</b> <a href="source-class-triagens.ArangoDb.Vertex.html#15-25" title="Go to source code">Vertex.php</a> <br> </div> <div class="panel panel-default"> <div class="panel-heading"><h2>Methods summary</h2></div> </div> <div class="panel panel-default"> <div class="panel-heading"><h3>Methods inherited from <a href="class-triagens.ArangoDb.Document.html#methods">triagens\ArangoDb\Document</a></h3></div> <p class="elementList"> <code><a href="class-triagens.ArangoDb.Document.html#___clone">__clone()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#___construct">__construct()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#___get">__get()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#___isset">__isset()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#___set">__set()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#___toString">__toString()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#___unset">__unset()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_createFromArray">createFromArray()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_filterHiddenAttributes">filterHiddenAttributes()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_get">get()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getAll">getAll()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getAllAsObject">getAllAsObject()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getAllForInsertUpdate">getAllForInsertUpdate()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getChanged">getChanged()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getCollectionId">getCollectionId()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getHandle">getHandle()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getHiddenAttributes">getHiddenAttributes()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getId">getId()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getInternalId">getInternalId()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getInternalKey">getInternalKey()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getIsNew">getIsNew()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getKey">getKey()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_getRevision">getRevision()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_isIgnoreHiddenAttributes">isIgnoreHiddenAttributes()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_set">set()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_setChanged">setChanged()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_setHiddenAttributes">setHiddenAttributes()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_setIgnoreHiddenAttributes">setIgnoreHiddenAttributes()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_setInternalId">setInternalId()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_setInternalKey">setInternalKey()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_setIsNew">setIsNew()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_setRevision">setRevision()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_toJson">toJson()</a></code>, <code><a href="class-triagens.ArangoDb.Document.html#_toSerialized">toSerialized()</a></code> </p> </div> <div class="panel panel-default"> <div class="panel-heading"><h2>Constants summary</h2></div> </div> <div class="panel panel-default"> <div class="panel-heading"><h3>Constants inherited from <a href="class-triagens.ArangoDb.Document.html#constants">triagens\ArangoDb\Document</a></h3></div> <p class="elementList"> <code><a href="class-triagens.ArangoDb.Document.html#ENTRY_HIDDENATTRIBUTES"><b>ENTRY_HIDDENATTRIBUTES</b></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#ENTRY_ID"><b>ENTRY_ID</b></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#ENTRY_IGNOREHIDDENATTRIBUTES"><b>ENTRY_IGNOREHIDDENATTRIBUTES</b></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#ENTRY_ISNEW"><b>ENTRY_ISNEW</b></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#ENTRY_KEY"><b>ENTRY_KEY</b></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#ENTRY_REV"><b>ENTRY_REV</b></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#OPTION_KEEPNULL"><b>OPTION_KEEPNULL</b></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#OPTION_POLICY"><b>OPTION_POLICY</b></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#OPTION_WAIT_FOR_SYNC"><b>OPTION_WAIT_FOR_SYNC</b></a></code> </p> </div> <div class="panel panel-default"> <div class="panel-heading"><h2>Properties summary</h2></div> </div> <div class="panel panel-default"> <div class="panel-heading"><h3>Properties inherited from <a href="class-triagens.ArangoDb.Document.html#properties">triagens\ArangoDb\Document</a></h3></div> <p class="elementList"> <code><a href="class-triagens.ArangoDb.Document.html#$_changed"><var>$_changed</var></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#$_doValidate"><var>$_doValidate</var></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#$_hiddenAttributes"><var>$_hiddenAttributes</var></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#$_id"><var>$_id</var></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#$_ignoreHiddenAttributes"><var>$_ignoreHiddenAttributes</var></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#$_isNew"><var>$_isNew</var></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#$_key"><var>$_key</var></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#$_rev"><var>$_rev</var></a></code>, <code><a href="class-triagens.ArangoDb.Document.html#$_values"><var>$_values</var></a></code> </p> </div> </div> </div> <div id="footer"> ArangoDB-PHP API Documentation API documentation generated by <a href="http://apigen.org">ApiGen</a> </div> </div> <script src="resources/combined.js"></script> <script src="elementlist.js"></script> </body> </html>
Java
'use strict'; var chai = require('chai'); var promised = require('chai-as-promised'); chai.use(promised); global.expect = chai.expect; exports.config = { // The timeout for each script run on the browser. This should be longer // than the maximum time your application needs to stabilize between tasks. allScriptsTimeout: 15000, // Capabilities to be passed to the webdriver instance. capabilities: { 'browserName': 'chrome', 'loggingPrefs': { 'browser': 'ALL' } }, // ----- What tests to run ----- // // Spec patterns are relative to the location of the spec file. They may // include glob patterns. specs: [ 'admin/specs/admin-user-spec.js', 'admin/specs/users-spec.js', 'admin/specs/groups-spec.js', 'admin/specs/system-spec.js', 'admin/specs/authorizations-spec.js', 'cockpit/specs/dashboard-spec.js', 'cockpit/specs/process-definition-spec.js', 'cockpit/specs/decision-definition-spec.js', 'cockpit/specs/process-instance-spec.js', 'cockpit/specs/process-definition-filter-spec.js', 'cockpit/specs/variable-spec.js', 'cockpit/specs/suspension-spec.js', 'tasklist/specs/filter-basic-spec.js', 'tasklist/specs/filter-permissions-spec.js', 'tasklist/specs/filter-criteria-spec.js', 'tasklist/specs/filter-vg-spec.js', 'tasklist/specs/process-stariables-spec.js', 'tasklist/specs/task-claiminart-spec.js', 'tasklist/specs/tasklist-sorting-spec.js', 'tasklist/specs/tasklist-search-spec.js', 'tasklist/specs/task-detail-view-spec.js', 'tasklist/specs/task-dates-spec.js' ], // A base URL for your application under test. Calls to protractor.get() // with relative paths will be prepended with this. baseUrl: 'http://localhost:8080', // ----- The test framework ----- // // Jasmine is fully supported as a test and assertion framework. // Mocha has limited beta support. You will need to include your own // assertion framework if working with mocha. framework: 'mocha', // ----- Options to be passed to minijasminenode ----- // // Options to be passed to Mocha-node. // See the full list at https://github.com/juliemr/minijasminenode mochaOpts: { timeout: 15000, colors: false, reporter: 'xunit-file', slow: 3000 } };
Java
# AUTOGENERATED FILE FROM balenalib/zc702-zynq7-ubuntu:cosmic-build ENV NODE_VERSION 12.20.1 ENV YARN_VERSION 1.22.4 RUN for key in \ 6A010C5166006599AA17F08146C2130DFD2497F5 \ ; do \ gpg --keyserver pgp.mit.edu --recv-keys "$key" || \ gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \ gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \ done \ && curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \ && echo "7283ced5d7c0cc036a35bc2e64b23e7d4b348848170567880edabcf5279f4f8a node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \ && tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \ && rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && mkdir -p /opt/yarn \ && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \ && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && npm config set unsafe-perm true -g --unsafe-perm \ && rm -rf /tmp/* CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \ && echo "Running test-stack@node" \ && chmod +x test-stack@node.sh \ && bash test-stack@node.sh \ && rm -rf test-stack@node.sh RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu cosmic \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v12.20.1, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
Java
bluebook ======== A simple encrypted notepad application using Java Swing and GNU Crypto. ###Build The repo contains a NetBeans project (last tested version: 7.1.2). GUI portions of the code are auto-generated by NetBeans' GUI Swing builder, Matisse. To build, open the project in NetBeans and click _Run > Build Project_. To build on the command line, type `ant jar` in the project's root directory. In either case, the runnable _bluebook.jar_ file and its associated _lib_ directory will be created in the project's _dist_ subdirectory. ###Run To run, type `java -jar bluebook.jar`. AES-256 encrypted data are stored in the file _bluebook.data_, which will be created in the current directory if it does not already exist. * In plaintext mode, click the notepad/pencil icon to enable editing. The text area's background color will change from grey to blue when something has been modified. * To encrypt and save after editing, enter your password in the password field, click the lock button, then confirm your password when prompted. (NB: Since the password is never stored, bluebook will encrypt using any password you type twice!) * To decrypt, enter your password and click the magnifying-glass button. ###License The original contents of this repository are released under the [Apache 2.0](http://www.apache.org/licenses/LICENSE-2.0) license. See the LICENSE file for details. The GNU Crypto library is distributed under the terms of the GPL (specifically, GPLv2 3(c) / GPLv3 6(c), allowing for distribution of object code); see the README*, LICENSE and COPYING files in the gnu-crypto-2.0.1-bin subdirectory for details and for instructions on obtaining source code. ###Thanks Thanks to the [Tango Desktop Project](http://tango.freedesktop.org/Tango_Desktop_Project) for public-domain icons and to the [GNU Crypto](http://www.gnu.org/software/gnu-crypto) team.
Java
/* * Copyright (c) 2015 IRCCloud, Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.irccloud.android.fragment; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.os.Bundle; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.method.ScrollingMovementMethod; import android.text.style.TabStopSpan; import android.view.LayoutInflater; import android.view.View; import android.widget.TextView; import androidx.appcompat.app.AlertDialog; import androidx.fragment.app.DialogFragment; import com.irccloud.android.R; import com.irccloud.android.activity.MainActivity; public class TextListFragment extends DialogFragment { private TextView textView; private String title = null; private String text = null; public boolean dismissed = false; public String type; @Override public Dialog onCreateDialog(Bundle savedInstanceState) { Context ctx = getActivity(); if(ctx == null) return null; LayoutInflater inflater = (LayoutInflater) ctx.getSystemService(Context.LAYOUT_INFLATER_SERVICE); View v = inflater.inflate(R.layout.dialog_textlist, null); textView = v.findViewById(R.id.textView); textView.setHorizontallyScrolling(true); textView.setMovementMethod(new ScrollingMovementMethod()); if (savedInstanceState != null && savedInstanceState.containsKey("text")) { text = savedInstanceState.getString("text"); } if(text != null) { setText(text); } Dialog d = new AlertDialog.Builder(ctx) .setView(v) .setTitle(title) .setNegativeButton("Close", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }) .create(); return d; } @Override public void onDismiss(DialogInterface dialog) { super.onDismiss(dialog); dismissed = true; if(getActivity() != null && ((MainActivity)getActivity()).help_fragment == this) ((MainActivity)getActivity()).help_fragment = null; } @Override public void onCancel(DialogInterface dialog) { super.onCancel(dialog); dismissed = true; if(getActivity() != null && ((MainActivity)getActivity()).help_fragment == this) ((MainActivity)getActivity()).help_fragment = null; } @Override public void onSaveInstanceState(Bundle state) { state.putString("text", text); } public void refresh() { Bundle args = getArguments(); if(args.containsKey("title")) { title = args.getString("title"); if(getDialog() != null) getDialog().setTitle(title); } if(args.containsKey("text")) { text = args.getString("text"); if(textView != null) setText(text); } } private void setText(String text) { SpannableStringBuilder sb = new SpannableStringBuilder(text); for (int i = 0; i < 100; i++) sb.setSpan(new TabStopSpan.Standard(i * 300), 0, sb.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); textView.setText(sb, TextView.BufferType.SPANNABLE); } @Override public void setArguments(Bundle args) { super.setArguments(args); refresh(); } @Override public void onPause() { super.onPause(); } }
Java
// Copyright 2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package log import ( "fmt" "strings" "testing" "time" "github.com/stretchr/testify/assert" "github.com/adfin/statster/metrics/core" ) func TestSimpleWrite(t *testing.T) { now := time.Now() batch := core.DataBatch{ Timestamp: now, MetricSets: make(map[string]*core.MetricSet), } batch.MetricSets["pod1"] = &core.MetricSet{ Labels: map[string]string{"bzium": "hocuspocus"}, MetricValues: map[string]core.MetricValue{ "m1": { ValueType: core.ValueInt64, MetricType: core.MetricGauge, IntValue: 31415, }, }, LabeledMetrics: []core.LabeledMetric{ { Name: "lm", MetricValue: core.MetricValue{ MetricType: core.MetricGauge, ValueType: core.ValueInt64, IntValue: 279, }, Labels: map[string]string{ "disk": "hard", }, }, }, } log := batchToString(&batch) assert.True(t, strings.Contains(log, "31415")) assert.True(t, strings.Contains(log, "m1")) assert.True(t, strings.Contains(log, "bzium")) assert.True(t, strings.Contains(log, "hocuspocus")) assert.True(t, strings.Contains(log, "pod1")) assert.True(t, strings.Contains(log, "279")) assert.True(t, strings.Contains(log, "disk")) assert.True(t, strings.Contains(log, "hard")) assert.True(t, strings.Contains(log, fmt.Sprintf("%s", now))) } func TestSortedOutput(t *testing.T) { const ( label1 = "abcLabel" label2 = "xyzLabel" pod1 = "pod1" pod2 = "pod2" metric1 = "metricA" metric2 = "metricB" ) metricVal := core.MetricValue{ ValueType: core.ValueInt64, MetricType: core.MetricGauge, IntValue: 31415, } metricSet := func(pod string) *core.MetricSet { return &core.MetricSet{ Labels: map[string]string{label2 + pod: pod, label1 + pod: pod}, MetricValues: map[string]core.MetricValue{ metric2 + pod: metricVal, metric1 + pod: metricVal, }, LabeledMetrics: []core.LabeledMetric{}, } } now := time.Now() batch := core.DataBatch{ Timestamp: now, MetricSets: map[string]*core.MetricSet{ pod2: metricSet(pod2), pod1: metricSet(pod1), }, } log := batchToString(&batch) sorted := []string{ pod1, label1 + pod1, label2 + pod1, metric1 + pod1, metric2 + pod1, pod2, label1 + pod2, label2 + pod2, metric1 + pod2, metric2 + pod2, } var ( previous string previousIndex int ) for _, metric := range sorted { metricIndex := strings.Index(log, metric) assert.NotEqual(t, -1, metricIndex, "%q not found", metric) if previous != "" { assert.True(t, previousIndex < metricIndex, "%q should be before %q", previous, metric) } previous = metric previousIndex = metricIndex } }
Java
import functools import warnings from collections import Mapping, Sequence from numbers import Number import numpy as np import pandas as pd from . import ops from . import utils from . import common from . import groupby from . import indexing from . import alignment from . import formatting from .. import conventions from .alignment import align, partial_align from .coordinates import DatasetCoordinates, Indexes from .common import ImplementsDatasetReduce, BaseDataObject from .utils import (Frozen, SortedKeysDict, ChainMap, maybe_wrap_array) from .variable import as_variable, Variable, Coordinate, broadcast_variables from .pycompat import (iteritems, itervalues, basestring, OrderedDict, dask_array_type) from .combine import concat # list of attributes of pd.DatetimeIndex that are ndarrays of time info _DATETIMEINDEX_COMPONENTS = ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond', 'nanosecond', 'date', 'time', 'dayofyear', 'weekofyear', 'dayofweek', 'quarter'] def _get_virtual_variable(variables, key): """Get a virtual variable (e.g., 'time.year') from a dict of xray.Variable objects (if possible) """ if not isinstance(key, basestring): raise KeyError(key) split_key = key.split('.', 1) if len(split_key) != 2: raise KeyError(key) ref_name, var_name = split_key ref_var = variables[ref_name] if ref_var.ndim == 1: date = ref_var.to_index() elif ref_var.ndim == 0: date = pd.Timestamp(ref_var.values) else: raise KeyError(key) if var_name == 'season': # TODO: move 'season' into pandas itself seasons = np.array(['DJF', 'MAM', 'JJA', 'SON']) month = date.month data = seasons[(month // 3) % 4] else: data = getattr(date, var_name) return ref_name, var_name, Variable(ref_var.dims, data) def _as_dataset_variable(name, var): """Prepare a variable for adding it to a Dataset """ try: var = as_variable(var, key=name) except TypeError: raise TypeError('Dataset variables must be an array or a tuple of ' 'the form (dims, data[, attrs, encoding])') if name in var.dims: # convert the into an Index if var.ndim != 1: raise ValueError('an index variable must be defined with ' '1-dimensional data') var = var.to_coord() return var def _align_variables(variables, join='outer'): """Align all DataArrays in the provided dict, leaving other values alone. """ alignable = [k for k, v in variables.items() if hasattr(v, 'indexes')] aligned = align(*[variables[a] for a in alignable], join=join, copy=False) new_variables = OrderedDict(variables) new_variables.update(zip(alignable, aligned)) return new_variables def _expand_variables(raw_variables, old_variables={}, compat='identical'): """Expand a dictionary of variables. Returns a dictionary of Variable objects suitable for inserting into a Dataset._variables dictionary. This includes converting tuples (dims, data) into Variable objects, converting coordinate variables into Coordinate objects and expanding DataArray objects into Variables plus coordinates. Raises ValueError if any conflicting values are found, between any of the new or old variables. """ new_variables = OrderedDict() new_coord_names = set() variables = ChainMap(new_variables, old_variables) def maybe_promote_or_replace(name, var): existing_var = variables[name] if name not in existing_var.dims: if name in var.dims: variables[name] = var else: common_dims = OrderedDict(zip(existing_var.dims, existing_var.shape)) common_dims.update(zip(var.dims, var.shape)) variables[name] = existing_var.expand_dims(common_dims) new_coord_names.update(var.dims) def add_variable(name, var): var = _as_dataset_variable(name, var) if name not in variables: variables[name] = var new_coord_names.update(variables[name].dims) else: if not getattr(variables[name], compat)(var): raise ValueError('conflicting value for variable %s:\n' 'first value: %r\nsecond value: %r' % (name, variables[name], var)) if compat == 'broadcast_equals': maybe_promote_or_replace(name, var) for name, var in iteritems(raw_variables): if hasattr(var, 'coords'): # it's a DataArray new_coord_names.update(var.coords) for dim, coord in iteritems(var.coords): if dim != name: add_variable(dim, coord.variable) var = var.variable add_variable(name, var) return new_variables, new_coord_names def _calculate_dims(variables): """Calculate the dimensions corresponding to a set of variables. Returns dictionary mapping from dimension names to sizes. Raises ValueError if any of the dimension sizes conflict. """ dims = {} last_used = {} scalar_vars = set(k for k, v in iteritems(variables) if not v.dims) for k, var in iteritems(variables): for dim, size in zip(var.dims, var.shape): if dim in scalar_vars: raise ValueError('dimension %s already exists as a scalar ' 'variable' % dim) if dim not in dims: dims[dim] = size last_used[dim] = k elif dims[dim] != size: raise ValueError('conflicting sizes for dimension %r: ' 'length %s on %r and length %s on %r' % (dim, size, k, dims[dim], last_used[dim])) return dims def _merge_expand(aligned_self, other, overwrite_vars, compat): possible_conflicts = dict((k, v) for k, v in aligned_self._variables.items() if k not in overwrite_vars) new_vars, new_coord_names = _expand_variables(other, possible_conflicts, compat) replace_vars = aligned_self._variables.copy() replace_vars.update(new_vars) return replace_vars, new_vars, new_coord_names def _merge_dataset(self, other, overwrite_vars, compat, join): aligned_self, other = partial_align(self, other, join=join, copy=False) replace_vars, new_vars, new_coord_names = _merge_expand( aligned_self, other._variables, overwrite_vars, compat) new_coord_names.update(other._coord_names) return replace_vars, new_vars, new_coord_names def _merge_dict(self, other, overwrite_vars, compat, join): other = _align_variables(other, join='outer') alignable = [k for k, v in other.items() if hasattr(v, 'indexes')] aligned = partial_align(self, *[other[a] for a in alignable], join=join, copy=False, exclude=overwrite_vars) aligned_self = aligned[0] other = OrderedDict(other) other.update(zip(alignable, aligned[1:])) return _merge_expand(aligned_self, other, overwrite_vars, compat) def _assert_empty(args, msg='%s'): if args: raise ValueError(msg % args) def as_dataset(obj): """Cast the given object to a Dataset. Handles DataArrays, Datasets and dictionaries of variables. A new Dataset object is only created in the last case. """ obj = getattr(obj, '_dataset', obj) if not isinstance(obj, Dataset): obj = Dataset(obj) return obj class Variables(Mapping): def __init__(self, dataset): self._dataset = dataset def __iter__(self): return (key for key in self._dataset._variables if key not in self._dataset._coord_names) def __len__(self): return len(self._dataset._variables) - len(self._dataset._coord_names) def __contains__(self, key): return (key in self._dataset._variables and key not in self._dataset._coord_names) def __getitem__(self, key): if key not in self._dataset._coord_names: return self._dataset[key] else: raise KeyError(key) def __repr__(self): return formatting.vars_repr(self) class _LocIndexer(object): def __init__(self, dataset): self.dataset = dataset def __getitem__(self, key): if not utils.is_dict_like(key): raise TypeError('can only lookup dictionaries from Dataset.loc') return self.dataset.sel(**key) class Dataset(Mapping, ImplementsDatasetReduce, BaseDataObject): """A multi-dimensional, in memory, array database. A dataset resembles an in-memory representation of a NetCDF file, and consists of variables, coordinates and attributes which together form a self describing dataset. Dataset implements the mapping interface with keys given by variable names and values given by DataArray objects for each variable name. One dimensional variables with name equal to their dimension are index coordinates used for label based indexing. """ # class properties defined for the benefit of __setstate__, which otherwise # runs into trouble because we overrode __getattr__ _attrs = None _variables = Frozen({}) groupby_cls = groupby.DatasetGroupBy def __init__(self, variables=None, coords=None, attrs=None, compat='broadcast_equals'): """To load data from a file or file-like object, use the `open_dataset` function. Parameters ---------- variables : dict-like, optional A mapping from variable names to :py:class:`~xray.DataArray` objects, :py:class:`~xray.Variable` objects or tuples of the form ``(dims, data[, attrs])`` which can be used as arguments to create a new ``Variable``. Each dimension must have the same length in all variables in which it appears. coords : dict-like, optional Another mapping in the same form as the `variables` argument, except the each item is saved on the dataset as a "coordinate". These variables have an associated meaning: they describe constant/fixed/independent quantities, unlike the varying/measured/dependent quantities that belong in `variables`. Coordinates values may be given by 1-dimensional arrays or scalars, in which case `dims` do not need to be supplied: 1D arrays will be assumed to give index values along the dimension with the same name. attrs : dict-like, optional Global attributes to save on this dataset. compat : {'broadcast_equals', 'equals', 'identical'}, optional String indicating how to compare variables of the same name for potential conflicts: - 'broadcast_equals': all values must be equal when variables are broadcast against each other to ensure common dimensions. - 'equals': all values and dimensions must be the same. - 'identical': all values, dimensions and attributes must be the same. """ self._variables = OrderedDict() self._coord_names = set() self._dims = {} self._attrs = None self._file_obj = None if variables is None: variables = {} if coords is None: coords = set() if variables or coords: self._set_init_vars_and_dims(variables, coords, compat) if attrs is not None: self.attrs = attrs def _add_missing_coords_inplace(self): """Add missing coordinates to self._variables """ for dim, size in iteritems(self.dims): if dim not in self._variables: # This is equivalent to np.arange(size), but # waits to create the array until its actually accessed. data = indexing.LazyIntegerRange(size) coord = Coordinate(dim, data) self._variables[dim] = coord def _update_vars_and_coords(self, new_variables, new_coord_names={}, needs_copy=True, check_coord_names=True): """Add a dictionary of new variables to this dataset. Raises a ValueError if any dimensions have conflicting lengths in the new dataset. Otherwise will update this dataset's _variables and _dims attributes in-place. Set `needs_copy=False` only if this dataset is brand-new and hence can be thrown away if this method fails. """ # default to creating another copy of variables so can unroll if we end # up with inconsistent dimensions variables = self._variables.copy() if needs_copy else self._variables if check_coord_names: _assert_empty([k for k in self.data_vars if k in new_coord_names], 'coordinates with these names already exist as ' 'variables: %s') variables.update(new_variables) dims = _calculate_dims(variables) # all checks are complete: it's safe to update self._variables = variables self._dims = dims self._add_missing_coords_inplace() self._coord_names.update(new_coord_names) def _set_init_vars_and_dims(self, vars, coords, compat): """Set the initial value of Dataset variables and dimensions """ _assert_empty([k for k in vars if k in coords], 'redundant variables and coordinates: %s') variables = ChainMap(vars, coords) aligned = _align_variables(variables) new_variables, new_coord_names = _expand_variables(aligned, compat=compat) new_coord_names.update(coords) self._update_vars_and_coords(new_variables, new_coord_names, needs_copy=False, check_coord_names=False) @classmethod def load_store(cls, store, decoder=None): """Create a new dataset from the contents of a backends.*DataStore object """ variables, attributes = store.load() if decoder: variables, attributes = decoder(variables, attributes) obj = cls(variables, attrs=attributes) obj._file_obj = store return obj def close(self): """Close any files linked to this dataset """ if self._file_obj is not None: self._file_obj.close() self._file_obj = None def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.close() def __getstate__(self): """Always load data in-memory before pickling""" self.load() # self.__dict__ is the default pickle object, we don't need to # implement our own __setstate__ method to make pickle work state = self.__dict__.copy() # throw away any references to datastores in the pickle state['_file_obj'] = None return state @property def variables(self): """Frozen dictionary of xray.Variable objects constituting this dataset's data """ return Frozen(self._variables) def _attrs_copy(self): return None if self._attrs is None else OrderedDict(self._attrs) @property def attrs(self): """Dictionary of global attributes on this dataset """ if self._attrs is None: self._attrs = OrderedDict() return self._attrs @attrs.setter def attrs(self, value): self._attrs = OrderedDict(value) @property def dims(self): """Mapping from dimension names to lengths. This dictionary cannot be modified directly, but is updated when adding new variables. """ return Frozen(SortedKeysDict(self._dims)) def load(self): """Manually trigger loading of this dataset's data from disk or a remote source into memory and return this dataset. Normally, it should not be necessary to call this method in user code, because all xray functions should either work on deferred data or load data automatically. However, this method can be necessary when working with many file objects on disk. """ # access .data to coerce everything to numpy or dask arrays all_data = dict((k, v.data) for k, v in self.variables.items()) lazy_data = dict((k, v) for k, v in all_data.items() if isinstance(v, dask_array_type)) if lazy_data: import dask.array as da # evaluate all the dask arrays simultaneously evaluated_data = da.compute(*lazy_data.values()) evaluated_variables = {} for k, data in zip(lazy_data, evaluated_data): self.variables[k].data = data return self def load_data(self): # pragma: no cover warnings.warn('the Dataset method `load_data` has been deprecated; ' 'use `load` instead', FutureWarning, stacklevel=2) return self.load() @classmethod def _construct_direct(cls, variables, coord_names, dims, attrs, file_obj=None): """Shortcut around __init__ for internal use when we want to skip costly validation """ obj = object.__new__(cls) obj._variables = variables obj._coord_names = coord_names obj._dims = dims obj._attrs = attrs obj._file_obj = file_obj return obj __default_attrs = object() def _replace_vars_and_dims(self, variables, coord_names=None, attrs=__default_attrs, inplace=False): """Fastpath constructor for internal use. Preserves coord names and attributes; dimensions are recalculated from the supplied variables. The arguments are *not* copied when placed on the new dataset. It is up to the caller to ensure that they have the right type and are not used elsewhere. Parameters ---------- variables : OrderedDict coord_names : set or None, optional attrs : OrderedDict or None, optional Returns ------- new : Dataset """ dims = _calculate_dims(variables) if inplace: self._dims = dims self._variables = variables if coord_names is not None: self._coord_names = coord_names if attrs is not self.__default_attrs: self._attrs = attrs obj = self else: if coord_names is None: coord_names = self._coord_names.copy() if attrs is self.__default_attrs: attrs = self._attrs_copy() obj = self._construct_direct(variables, coord_names, dims, attrs) return obj def copy(self, deep=False): """Returns a copy of this dataset. If `deep=True`, a deep copy is made of each of the component variables. Otherwise, a shallow copy is made, so each variable in the new dataset is also a variable in the original dataset. """ if deep: variables = OrderedDict((k, v.copy(deep=True)) for k, v in iteritems(self._variables)) else: variables = self._variables.copy() # skip __init__ to avoid costly validation return self._construct_direct(variables, self._coord_names.copy(), self._dims.copy(), self._attrs_copy()) def _copy_listed(self, names, keep_attrs=True): """Create a new Dataset with the listed variables from this dataset and the all relevant coordinates. Skips all validation. """ variables = OrderedDict() coord_names = set() for name in names: try: variables[name] = self._variables[name] except KeyError: ref_name, var_name, var = _get_virtual_variable( self._variables, name) variables[var_name] = var if ref_name in self._coord_names: coord_names.add(var_name) needed_dims = set() for v in variables.values(): needed_dims.update(v._dims) for k in self._coord_names: if set(self._variables[k]._dims) <= needed_dims: variables[k] = self._variables[k] coord_names.add(k) dims = dict((k, self._dims[k]) for k in needed_dims) attrs = self.attrs.copy() if keep_attrs else None return self._construct_direct(variables, coord_names, dims, attrs) def __copy__(self): return self.copy(deep=False) def __deepcopy__(self, memo=None): # memo does nothing but is required for compatibility with # copy.deepcopy return self.copy(deep=True) def __contains__(self, key): """The 'in' operator will return true or false depending on whether 'key' is an array in the dataset or not. """ return key in self._variables def __len__(self): return len(self._variables) def __iter__(self): return iter(self._variables) @property def nbytes(self): return sum(v.nbytes for v in self.variables.values()) @property def loc(self): """Attribute for location based indexing. Only supports __getitem__, and only when the key is a dict of the form {dim: labels}. """ return _LocIndexer(self) def __getitem__(self, key): """Access variables or coordinates this dataset as a :py:class:`~xray.DataArray`. Indexing with a list of names will return a new ``Dataset`` object. """ from .dataarray import DataArray if utils.is_dict_like(key): return self.isel(**key) key = np.asarray(key) if key.ndim == 0: return DataArray._new_from_dataset(self, key.item()) else: return self._copy_listed(key) def __setitem__(self, key, value): """Add an array to this dataset. If value is a `DataArray`, call its `select_vars()` method, rename it to `key` and merge the contents of the resulting dataset into this dataset. If value is an `Variable` object (or tuple of form ``(dims, data[, attrs])``), add it to this dataset as a new variable. """ if utils.is_dict_like(key): raise NotImplementedError('cannot yet use a dictionary as a key ' 'to set Dataset values') self.update({key: value}) def __delitem__(self, key): """Remove a variable from this dataset. If this variable is a dimension, all variables containing this dimension are also removed. """ def remove(k): del self._variables[k] self._coord_names.discard(k) remove(key) if key in self._dims: del self._dims[key] also_delete = [k for k, v in iteritems(self._variables) if key in v.dims] for key in also_delete: remove(key) # mutable objects should not be hashable __hash__ = None def _all_compat(self, other, compat_str): """Helper function for equals and identical""" # some stores (e.g., scipy) do not seem to preserve order, so don't # require matching order for equality compat = lambda x, y: getattr(x, compat_str)(y) return (self._coord_names == other._coord_names and utils.dict_equiv(self._variables, other._variables, compat=compat)) def broadcast_equals(self, other): """Two Datasets are broadcast equal if they are equal after broadcasting all variables against each other. For example, variables that are scalar in one dataset but non-scalar in the other dataset can still be broadcast equal if the the non-scalar variable is a constant. See Also -------- Dataset.equals Dataset.identical """ try: return self._all_compat(other, 'broadcast_equals') except (TypeError, AttributeError): return False def equals(self, other): """Two Datasets are equal if they have matching variables and coordinates, all of which are equal. Datasets can still be equal (like pandas objects) if they have NaN values in the same locations. This method is necessary because `v1 == v2` for ``Dataset`` does element-wise comparisions (like numpy.ndarrays). See Also -------- Dataset.broadcast_equals Dataset.identical """ try: return self._all_compat(other, 'equals') except (TypeError, AttributeError): return False def identical(self, other): """Like equals, but also checks all dataset attributes and the attributes on all variables and coordinates. See Also -------- Dataset.broadcast_equals Dataset.equals """ try: return (utils.dict_equiv(self.attrs, other.attrs) and self._all_compat(other, 'identical')) except (TypeError, AttributeError): return False @property def indexes(self): """OrderedDict of pandas.Index objects used for label based indexing """ return Indexes(self) @property def coords(self): """Dictionary of xray.DataArray objects corresponding to coordinate variables """ return DatasetCoordinates(self) @property def data_vars(self): """Dictionary of xray.DataArray objects corresponding to data variables """ return Variables(self) @property def vars(self): # pragma: no cover warnings.warn('the Dataset property `vars` has been deprecated; ' 'use `data_vars` instead', FutureWarning, stacklevel=2) return self.data_vars def set_coords(self, names, inplace=False): """Given names of one or more variables, set them as coordinates Parameters ---------- names : str or list of str Name(s) of variables in this dataset to convert into coordinates. inplace : bool, optional If True, modify this dataset inplace. Otherwise, create a new object. Returns ------- Dataset """ # TODO: allow inserting new coordinates with this method, like # DataFrame.set_index? # nb. check in self._variables, not self.data_vars to insure that the # operation is idempotent if isinstance(names, basestring): names = [names] self._assert_all_in_dataset(names) obj = self if inplace else self.copy() obj._coord_names.update(names) return obj def reset_coords(self, names=None, drop=False, inplace=False): """Given names of coordinates, reset them to become variables Parameters ---------- names : str or list of str, optional Name(s) of non-index coordinates in this dataset to reset into variables. By default, all non-index coordinates are reset. drop : bool, optional If True, remove coordinates instead of converting them into variables. inplace : bool, optional If True, modify this dataset inplace. Otherwise, create a new object. Returns ------- Dataset """ if names is None: names = self._coord_names - set(self.dims) else: if isinstance(names, basestring): names = [names] self._assert_all_in_dataset(names) _assert_empty( set(names) & set(self.dims), 'cannot remove index coordinates with reset_coords: %s') obj = self if inplace else self.copy() obj._coord_names.difference_update(names) if drop: for name in names: del obj._variables[name] return obj def dump_to_store(self, store, encoder=None, sync=True): """Store dataset contents to a backends.*DataStore object.""" variables, attrs = conventions.encode_dataset_coordinates(self) if encoder: variables, attrs = encoder(variables, attrs) store.store(variables, attrs) if sync: store.sync() def to_netcdf(self, path=None, mode='w', format=None, group=None, engine=None): """Write dataset contents to a netCDF file. Parameters ---------- path : str, optional Path to which to save this dataset. If no path is provided, this function returns the resulting netCDF file as a bytes object; in this case, we need to use scipy.io.netcdf, which does not support netCDF version 4 (the default format becomes NETCDF3_64BIT). mode : {'w', 'a'}, optional Write ('w') or append ('a') mode. If mode='w', any existing file at this location will be overwritten. format : {'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_64BIT', 'NETCDF3_CLASSIC'}, optional File format for the resulting netCDF file: * NETCDF4: Data is stored in an HDF5 file, using netCDF4 API features. * NETCDF4_CLASSIC: Data is stored in an HDF5 file, using only netCDF 3 compatibile API features. * NETCDF3_64BIT: 64-bit offset version of the netCDF 3 file format, which fully supports 2+ GB files, but is only compatible with clients linked against netCDF version 3.6.0 or later. * NETCDF3_CLASSIC: The classic netCDF 3 file format. It does not handle 2+ GB files very well. All formats are supported by the netCDF4-python library. scipy.io.netcdf only supports the last two formats. The default format is NETCDF4 if you are saving a file to disk and have the netCDF4-python library available. Otherwise, xray falls back to using scipy to write netCDF files and defaults to the NETCDF3_64BIT format (scipy does not support netCDF4). group : str, optional Path to the netCDF4 group in the given file to open (only works for format='NETCDF4'). The group(s) will be created if necessary. engine : {'netcdf4', 'scipy', 'h5netcdf'}, optional Engine to use when writing netCDF files. If not provided, the default engine is chosen based on available dependencies, with a preference for 'netcdf4' if writing to a file on disk. """ from ..backends.api import to_netcdf return to_netcdf(self, path, mode, format, group, engine) dump = utils.function_alias(to_netcdf, 'dumps') dumps = utils.function_alias(to_netcdf, 'dumps') def __repr__(self): return formatting.dataset_repr(self) @property def chunks(self): """Block dimensions for this dataset's data or None if it's not a dask array. """ chunks = {} for v in self.variables.values(): if v.chunks is not None: new_chunks = list(zip(v.dims, v.chunks)) if any(chunk != chunks[d] for d, chunk in new_chunks if d in chunks): raise ValueError('inconsistent chunks') chunks.update(new_chunks) return Frozen(SortedKeysDict(chunks)) def chunk(self, chunks=None, lock=False): """Coerce all arrays in this dataset into dask arrays with the given chunks. Non-dask arrays in this dataset will be converted to dask arrays. Dask arrays will be rechunked to the given chunk sizes. If neither chunks is not provided for one or more dimensions, chunk sizes along that dimension will not be updated; non-dask arrays will be converted into dask arrays with a single block. Parameters ---------- chunks : int or dict, optional Chunk sizes along each dimension, e.g., ``5`` or ``{'x': 5, 'y': 5}``. lock : optional Passed on to :py:func:`dask.array.from_array`, if the array is not already as dask array. Returns ------- chunked : xray.Dataset """ if isinstance(chunks, Number): chunks = dict.fromkeys(self.dims, chunks) if chunks is not None: bad_dims = [d for d in chunks if d not in self.dims] if bad_dims: raise ValueError('some chunks keys are not dimensions on this ' 'object: %s' % bad_dims) def selkeys(dict_, keys): if dict_ is None: return None return dict((d, dict_[d]) for d in keys if d in dict_) def maybe_chunk(name, var, chunks): chunks = selkeys(chunks, var.dims) if not chunks: chunks = None if var.ndim > 0: return var.chunk(chunks, name=name, lock=lock) else: return var variables = OrderedDict([(k, maybe_chunk(k, v, chunks)) for k, v in self.variables.items()]) return self._replace_vars_and_dims(variables) def isel(self, **indexers): """Returns a new dataset with each array indexed along the specified dimension(s). This method selects values from each array using its `__getitem__` method, except this method does not require knowing the order of each array's dimensions. Parameters ---------- **indexers : {dim: indexer, ...} Keyword arguments with names matching dimensions and values given by integers, slice objects or arrays. Returns ------- obj : Dataset A new Dataset with the same contents as this dataset, except each array and dimension is indexed by the appropriate indexers. In general, each array's data will be a view of the array's data in this dataset, unless numpy fancy indexing was triggered by using an array indexer, in which case the data will be a copy. See Also -------- Dataset.sel DataArray.isel DataArray.sel """ invalid = [k for k in indexers if not k in self.dims] if invalid: raise ValueError("dimensions %r do not exist" % invalid) # all indexers should be int, slice or np.ndarrays indexers = [(k, (np.asarray(v) if not isinstance(v, (int, np.integer, slice)) else v)) for k, v in iteritems(indexers)] variables = OrderedDict() for name, var in iteritems(self._variables): var_indexers = dict((k, v) for k, v in indexers if k in var.dims) variables[name] = var.isel(**var_indexers) return self._replace_vars_and_dims(variables) def sel(self, method=None, **indexers): """Returns a new dataset with each array indexed by tick labels along the specified dimension(s). In contrast to `Dataset.isel`, indexers for this method should use labels instead of integers. Under the hood, this method is powered by using Panda's powerful Index objects. This makes label based indexing essentially just as fast as using integer indexing. It also means this method uses pandas's (well documented) logic for indexing. This means you can use string shortcuts for datetime indexes (e.g., '2000-01' to select all values in January 2000). It also means that slices are treated as inclusive of both the start and stop values, unlike normal Python indexing. Parameters ---------- method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional Method to use for inexact matches (requires pandas>=0.16): * default: only exact matches * pad / ffill: propgate last valid index value forward * backfill / bfill: propagate next valid index value backward * nearest: use nearest valid index value **indexers : {dim: indexer, ...} Keyword arguments with names matching dimensions and values given by scalars, slices or arrays of tick labels. Returns ------- obj : Dataset A new Dataset with the same contents as this dataset, except each variable and dimension is indexed by the appropriate indexers. In general, each variable's data will be a view of the variable's data in this dataset, unless numpy fancy indexing was triggered by using an array indexer, in which case the data will be a copy. See Also -------- Dataset.isel DataArray.isel DataArray.sel """ return self.isel(**indexing.remap_label_indexers(self, indexers, method=method)) def isel_points(self, dim='points', **indexers): """Returns a new dataset with each array indexed pointwise along the specified dimension(s). This method selects pointwise values from each array and is akin to the NumPy indexing behavior of `arr[[0, 1], [0, 1]]`, except this method does not require knowing the order of each array's dimensions. Parameters ---------- dim : str or DataArray or pandas.Index or other list-like object, optional Name of the dimension to concatenate along. If dim is provided as a string, it must be a new dimension name, in which case it is added along axis=0. If dim is provided as a DataArray or Index or list-like object, its name, which must not be present in the dataset, is used as the dimension to concatenate along and the values are added as a coordinate. **indexers : {dim: indexer, ...} Keyword arguments with names matching dimensions and values given by array-like objects. All indexers must be the same length and 1 dimensional. Returns ------- obj : Dataset A new Dataset with the same contents as this dataset, except each array and dimension is indexed by the appropriate indexers. With pointwise indexing, the new Dataset will always be a copy of the original. See Also -------- Dataset.sel DataArray.isel DataArray.sel DataArray.isel_points """ indexer_dims = set(indexers) def relevant_keys(mapping): return [k for k, v in mapping.items() if any(d in indexer_dims for d in v.dims)] data_vars = relevant_keys(self.data_vars) coords = relevant_keys(self.coords) # all the indexers should be iterables keys = indexers.keys() indexers = [(k, np.asarray(v)) for k, v in iteritems(indexers)] # Check that indexers are valid dims, integers, and 1D for k, v in indexers: if k not in self.dims: raise ValueError("dimension %s does not exist" % k) if v.dtype.kind != 'i': raise TypeError('Indexers must be integers') if v.ndim != 1: raise ValueError('Indexers must be 1 dimensional') # all the indexers should have the same length lengths = set(len(v) for k, v in indexers) if len(lengths) > 1: raise ValueError('All indexers must be the same length') # Existing dimensions are not valid choices for the dim argument if isinstance(dim, basestring): if dim in self.dims: # dim is an invalid string raise ValueError('Existing dimension names are not valid ' 'choices for the dim argument in sel_points') elif hasattr(dim, 'dims'): # dim is a DataArray or Coordinate if dim.name in self.dims: # dim already exists raise ValueError('Existing dimensions are not valid choices ' 'for the dim argument in sel_points') else: # try to cast dim to DataArray with name = points from .dataarray import DataArray dim = DataArray(dim, dims='points', name='points') # TODO: This would be sped up with vectorized indexing. This will # require dask to support pointwise indexing as well. return concat([self.isel(**d) for d in [dict(zip(keys, inds)) for inds in zip(*[v for k, v in indexers])]], dim=dim, coords=coords, data_vars=data_vars) def reindex_like(self, other, method=None, copy=True): """Conform this object onto the indexes of another object, filling in missing values with NaN. Parameters ---------- other : Dataset or DataArray Object with an 'indexes' attribute giving a mapping from dimension names to pandas.Index objects, which provides coordinates upon which to index the variables in this dataset. The indexes on this other object need not be the same as the indexes on this dataset. Any mis-matched index values will be filled in with NaN, and any mis-matched dimension names will simply be ignored. method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional Method to use for filling index values from other not found in this dataset: * default: don't fill gaps * pad / ffill: propgate last valid index value forward * backfill / bfill: propagate next valid index value backward * nearest: use nearest valid index value (requires pandas>=0.16) copy : bool, optional If `copy=True`, the returned dataset contains only copied variables. If `copy=False` and no reindexing is required then original variables from this dataset are returned. Returns ------- reindexed : Dataset Another dataset, with this dataset's data but coordinates from the other object. See Also -------- Dataset.reindex align """ return self.reindex(method=method, copy=copy, **other.indexes) def reindex(self, indexers=None, method=None, copy=True, **kw_indexers): """Conform this object onto a new set of indexes, filling in missing values with NaN. Parameters ---------- indexers : dict. optional Dictionary with keys given by dimension names and values given by arrays of coordinates tick labels. Any mis-matched coordinate values will be filled in with NaN, and any mis-matched dimension names will simply be ignored. method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional Method to use for filling index values in ``indexers`` not found in this dataset: * default: don't fill gaps * pad / ffill: propgate last valid index value forward * backfill / bfill: propagate next valid index value backward * nearest: use nearest valid index value (requires pandas>=0.16) copy : bool, optional If `copy=True`, the returned dataset contains only copied variables. If `copy=False` and no reindexing is required then original variables from this dataset are returned. **kw_indexers : optional Keyword arguments in the same form as ``indexers``. Returns ------- reindexed : Dataset Another dataset, with this dataset's data but replaced coordinates. See Also -------- Dataset.reindex_like align pandas.Index.get_indexer """ indexers = utils.combine_pos_and_kw_args(indexers, kw_indexers, 'reindex') if not indexers: # shortcut return self.copy(deep=True) if copy else self variables = alignment.reindex_variables( self.variables, self.indexes, indexers, method, copy=copy) return self._replace_vars_and_dims(variables) def rename(self, name_dict, inplace=False): """Returns a new object with renamed variables and dimensions. Parameters ---------- name_dict : dict-like Dictionary whose keys are current variable or dimension names and whose values are new names. inplace : bool, optional If True, rename variables and dimensions in-place. Otherwise, return a new dataset object. Returns ------- renamed : Dataset Dataset with renamed variables and dimensions. See Also -------- Dataset.swap_dims DataArray.rename """ for k in name_dict: if k not in self: raise ValueError("cannot rename %r because it is not a " "variable in this dataset" % k) variables = OrderedDict() coord_names = set() for k, v in iteritems(self._variables): name = name_dict.get(k, k) dims = tuple(name_dict.get(dim, dim) for dim in v.dims) var = v.copy(deep=False) var.dims = dims variables[name] = var if k in self._coord_names: coord_names.add(name) return self._replace_vars_and_dims(variables, coord_names, inplace=inplace) def swap_dims(self, dims_dict, inplace=False): """Returns a new object with swapped dimensions. Parameters ---------- dims_dict : dict-like Dictionary whose keys are current dimension names and whose values are new names. Each value must already be a variable in the dataset. inplace : bool, optional If True, swap dimensions in-place. Otherwise, return a new dataset object. Returns ------- renamed : Dataset Dataset with swapped dimensions. See Also -------- Dataset.rename DataArray.swap_dims """ for k, v in dims_dict.items(): if k not in self.dims: raise ValueError('cannot swap from dimension %r because it is ' 'not an existing dimension' % k) if self.variables[v].dims != (k,): raise ValueError('replacement dimension %r is not a 1D ' 'variable along the old dimension %r' % (v, k)) result_dims = set(dims_dict.get(dim, dim) for dim in self.dims) variables = OrderedDict() coord_names = self._coord_names.copy() coord_names.update(dims_dict.values()) for k, v in iteritems(self.variables): dims = tuple(dims_dict.get(dim, dim) for dim in v.dims) var = v.to_coord() if k in result_dims else v.to_variable() var.dims = dims variables[k] = var return self._replace_vars_and_dims(variables, coord_names, inplace=inplace) def update(self, other, inplace=True): """Update this dataset's variables with those from another dataset. Parameters ---------- other : Dataset or castable to Dataset Dataset or variables with which to update this dataset. inplace : bool, optional If True, merge the other dataset into this dataset in-place. Otherwise, return a new dataset object. Returns ------- updated : Dataset Updated dataset. Raises ------ ValueError If any dimensions would have inconsistent sizes in the updated dataset. """ return self.merge( other, inplace=inplace, overwrite_vars=list(other), join='left') def merge(self, other, inplace=False, overwrite_vars=set(), compat='broadcast_equals', join='outer'): """Merge the arrays of two datasets into a single dataset. This method generally not allow for overriding data, with the exception of attributes, which are ignored on the second dataset. Variables with the same name are checked for conflicts via the equals or identical methods. Parameters ---------- other : Dataset or castable to Dataset Dataset or variables to merge with this dataset. inplace : bool, optional If True, merge the other dataset into this dataset in-place. Otherwise, return a new dataset object. overwrite_vars : str or sequence, optional If provided, update variables of these name(s) without checking for conflicts in this dataset. compat : {'broadcast_equals', 'equals', 'identical'}, optional String indicating how to compare variables of the same name for potential conflicts: - 'broadcast_equals': all values must be equal when variables are broadcast against each other to ensure common dimensions. - 'equals': all values and dimensions must be the same. - 'identical': all values, dimensions and attributes must be the same. join : {'outer', 'inner', 'left', 'right'}, optional Method for joining ``self`` and ``other`` along shared dimensions: - 'outer': use the union of the indexes - 'inner': use the intersection of the indexes - 'left': use indexes from ``self`` - 'right': use indexes from ``other`` Returns ------- merged : Dataset Merged dataset. Raises ------ ValueError If any variables conflict (see ``compat``). """ if compat not in ['broadcast_equals', 'equals', 'identical']: raise ValueError("compat=%r invalid: must be 'broadcast_equals', " "'equals' or 'identical'" % compat) if isinstance(overwrite_vars, basestring): overwrite_vars = [overwrite_vars] overwrite_vars = set(overwrite_vars) merge = _merge_dataset if isinstance(other, Dataset) else _merge_dict replace_vars, new_vars, new_coord_names = merge( self, other, overwrite_vars, compat=compat, join=join) newly_coords = new_coord_names & (set(self) - set(self.coords)) no_longer_coords = set(self.coords) & (set(new_vars) - new_coord_names) ambiguous_coords = (newly_coords | no_longer_coords) - overwrite_vars if ambiguous_coords: raise ValueError('cannot merge: the following variables are ' 'coordinates on one dataset but not the other: %s' % list(ambiguous_coords)) obj = self if inplace else self.copy() obj._update_vars_and_coords(replace_vars, new_coord_names) return obj def _assert_all_in_dataset(self, names, virtual_okay=False): bad_names = set(names) - set(self._variables) if virtual_okay: bad_names -= self.virtual_variables if bad_names: raise ValueError('One or more of the specified variables ' 'cannot be found in this dataset') def drop(self, labels, dim=None): """Drop variables or index labels from this dataset. If a variable corresponding to a dimension is dropped, all variables that use that dimension are also dropped. Parameters ---------- labels : str Names of variables or index labels to drop. dim : None or str, optional Dimension along which to drop index labels. By default (if ``dim is None``), drops variables rather than index labels. Returns ------- dropped : Dataset """ if utils.is_scalar(labels): labels = [labels] if dim is None: return self._drop_vars(labels) else: new_index = self.indexes[dim].drop(labels) return self.loc[{dim: new_index}] def _drop_vars(self, names): self._assert_all_in_dataset(names) drop = set(names) drop |= set(k for k, v in iteritems(self._variables) if any(name in v.dims for name in names)) variables = OrderedDict((k, v) for k, v in iteritems(self._variables) if k not in drop) coord_names = set(k for k in self._coord_names if k in variables) return self._replace_vars_and_dims(variables, coord_names) def drop_vars(self, *names): # pragma: no cover warnings.warn('the Dataset method `drop_vars` has been deprecated; ' 'use `drop` instead', FutureWarning, stacklevel=2) return self.drop(names) def transpose(self, *dims): """Return a new Dataset object with all array dimensions transposed. Although the order of dimensions on each array will change, the dataset dimensions themselves will remain in fixed (sorted) order. Parameters ---------- *dims : str, optional By default, reverse the dimensions on each array. Otherwise, reorder the dimensions to this order. Returns ------- transposed : Dataset Each array in the dataset (including) coordinates will be transposed to the given order. Notes ----- Although this operation returns a view of each array's data, it is not lazy -- the data will be fully loaded into memory. See Also -------- numpy.transpose DataArray.transpose """ if dims: if set(dims) ^ set(self.dims): raise ValueError('arguments to transpose (%s) must be ' 'permuted dataset dimensions (%s)' % (dims, tuple(self.dims))) ds = self.copy() for name, var in iteritems(self._variables): var_dims = tuple(dim for dim in dims if dim in var.dims) ds._variables[name] = var.transpose(*var_dims) return ds @property def T(self): return self.transpose() def squeeze(self, dim=None): """Returns a new dataset with squeezed data. Parameters ---------- dim : None or str or tuple of str, optional Selects a subset of the length one dimensions. If a dimension is selected with length greater than one, an error is raised. If None, all length one dimensions are squeezed. Returns ------- squeezed : Dataset This dataset, but with with all or a subset of the dimensions of length 1 removed. Notes ----- Although this operation returns a view of each variable's data, it is not lazy -- all variable data will be fully loaded. See Also -------- numpy.squeeze """ return common.squeeze(self, self.dims, dim) def dropna(self, dim, how='any', thresh=None, subset=None): """Returns a new dataset with dropped labels for missing values along the provided dimension. Parameters ---------- dim : str Dimension along which to drop missing values. Dropping along multiple dimensions simultaneously is not yet supported. how : {'any', 'all'}, optional * any : if any NA values are present, drop that label * all : if all values are NA, drop that label thresh : int, default None If supplied, require this many non-NA values. subset : sequence, optional Subset of variables to check for missing values. By default, all variables in the dataset are checked. Returns ------- Dataset """ # TODO: consider supporting multiple dimensions? Or not, given that # there are some ugly edge cases, e.g., pandas's dropna differs # depending on the order of the supplied axes. if dim not in self.dims: raise ValueError('%s must be a single dataset dimension' % dim) if subset is None: subset = list(self.data_vars) count = np.zeros(self.dims[dim], dtype=np.int64) size = 0 for k in subset: array = self._variables[k] if dim in array.dims: dims = [d for d in array.dims if d != dim] count += array.count(dims) size += np.prod([self.dims[d] for d in dims]) if thresh is not None: mask = count >= thresh elif how == 'any': mask = count == size elif how == 'all': mask = count > 0 elif how is not None: raise ValueError('invalid how option: %s' % how) else: raise TypeError('must specify how or thresh') return self.isel(**{dim: mask}) def fillna(self, value): """Fill missing values in this object. This operation follows the normal broadcasting and alignment rules that xray uses for binary arithmetic, except the result is aligned to this object (``join='left'``) instead of aligned to the intersection of index coordinates (``join='inner'``). Parameters ---------- value : scalar, ndarray, DataArray, dict or Dataset Used to fill all matching missing values in this dataset's data variables. Scalars, ndarrays or DataArrays arguments are used to fill all data with aligned coordinates (for DataArrays). Dictionaries or datasets match data variables and then align coordinates if necessary. Returns ------- Dataset """ return self._fillna(value) def reduce(self, func, dim=None, keep_attrs=False, numeric_only=False, allow_lazy=False, **kwargs): """Reduce this dataset by applying `func` along some dimension(s). Parameters ---------- func : function Function which can be called in the form `f(x, axis=axis, **kwargs)` to return the result of reducing an np.ndarray over an integer valued axis. dim : str or sequence of str, optional Dimension(s) over which to apply `func`. By default `func` is applied over all dimensions. keep_attrs : bool, optional If True, the datasets's attributes (`attrs`) will be copied from the original object to the new one. If False (default), the new object will be returned without attributes. numeric_only : bool, optional If True, only apply ``func`` to variables with a numeric dtype. **kwargs : dict Additional keyword arguments passed on to ``func``. Returns ------- reduced : Dataset Dataset with this object's DataArrays replaced with new DataArrays of summarized data and the indicated dimension(s) removed. """ if isinstance(dim, basestring): dims = set([dim]) elif dim is None: dims = set(self.dims) else: dims = set(dim) _assert_empty([dim for dim in dims if dim not in self.dims], 'Dataset does not contain the dimensions: %s') variables = OrderedDict() for name, var in iteritems(self._variables): reduce_dims = [dim for dim in var.dims if dim in dims] if reduce_dims or not var.dims: if name not in self.coords: if (not numeric_only or np.issubdtype(var.dtype, np.number) or var.dtype == np.bool_): if len(reduce_dims) == 1: # unpack dimensions for the benefit of functions # like np.argmin which can't handle tuple arguments reduce_dims, = reduce_dims elif len(reduce_dims) == var.ndim: # prefer to aggregate over axis=None rather than # axis=(0, 1) if they will be equivalent, because # the former is often more efficient reduce_dims = None variables[name] = var.reduce(func, dim=reduce_dims, keep_attrs=keep_attrs, allow_lazy=allow_lazy, **kwargs) else: variables[name] = var coord_names = set(k for k in self.coords if k in variables) attrs = self.attrs if keep_attrs else None return self._replace_vars_and_dims(variables, coord_names, attrs) def apply(self, func, keep_attrs=False, args=(), **kwargs): """Apply a function over the data variables in this dataset. Parameters ---------- func : function Function which can be called in the form `f(x, **kwargs)` to transform each DataArray `x` in this dataset into another DataArray. keep_attrs : bool, optional If True, the dataset's attributes (`attrs`) will be copied from the original object to the new one. If False, the new object will be returned without attributes. args : tuple, optional Positional arguments passed on to `func`. **kwargs : dict Keyword arguments passed on to `func`. Returns ------- applied : Dataset Resulting dataset from applying ``func`` over each data variable. """ variables = OrderedDict( (k, maybe_wrap_array(v, func(v, *args, **kwargs))) for k, v in iteritems(self.data_vars)) attrs = self.attrs if keep_attrs else None return type(self)(variables, attrs=attrs) def assign(self, **kwargs): """Assign new data variables to a Dataset, returning a new object with all the original variables in addition to the new ones. Parameters ---------- kwargs : keyword, value pairs keywords are the variables names. If the values are callable, they are computed on the Dataset and assigned to new data variables. If the values are not callable, (e.g. a DataArray, scalar, or array), they are simply assigned. Returns ------- ds : Dataset A new Dataset with the new variables in addition to all the existing variables. Notes ----- Since ``kwargs`` is a dictionary, the order of your arguments may not be preserved, and so the order of the new variables is not well defined. Assigning multiple variables within the same ``assign`` is possible, but you cannot reference other variables created within the same ``assign`` call. See Also -------- pandas.DataFrame.assign """ data = self.copy() # do all calculations first... results = data._calc_assign_results(kwargs) # ... and then assign data.update(results) return data def to_array(self, dim='variable', name=None): """Convert this dataset into an xray.DataArray The data variables of this dataset will be broadcast against each other and stacked along the first axis of the new array. All coordinates of this dataset will remain coordinates. Parameters ---------- dim : str, optional Name of the new dimension. name : str, optional Name of the new data array. Returns ------- array : xray.DataArray """ from .dataarray import DataArray data_vars = [self.variables[k] for k in self.data_vars] broadcast_vars = broadcast_variables(*data_vars) data = ops.stack([b.data for b in broadcast_vars], axis=0) coords = dict(self.coords) coords[dim] = list(self.data_vars) dims = (dim,) + broadcast_vars[0].dims return DataArray(data, coords, dims, attrs=self.attrs, name=name) def _to_dataframe(self, ordered_dims): columns = [k for k in self if k not in self.dims] data = [self._variables[k].expand_dims(ordered_dims).values.reshape(-1) for k in columns] index = self.coords.to_index(ordered_dims) return pd.DataFrame(OrderedDict(zip(columns, data)), index=index) def to_dataframe(self): """Convert this dataset into a pandas.DataFrame. Non-index variables in this dataset form the columns of the DataFrame. The DataFrame is be indexed by the Cartesian product of this dataset's indices. """ return self._to_dataframe(self.dims) @classmethod def from_dataframe(cls, dataframe): """Convert a pandas.DataFrame into an xray.Dataset Each column will be converted into an independent variable in the Dataset. If the dataframe's index is a MultiIndex, it will be expanded into a tensor product of one-dimensional indices (filling in missing values with NaN). This method will produce a Dataset very similar to that on which the 'to_dataframe' method was called, except with possibly redundant dimensions (since all dataset variables will have the same dimensionality). """ # TODO: Add an option to remove dimensions along which the variables # are constant, to enable consistent serialization to/from a dataframe, # even if some variables have different dimensionality. idx = dataframe.index obj = cls() if hasattr(idx, 'levels'): # it's a multi-index # expand the DataFrame to include the product of all levels full_idx = pd.MultiIndex.from_product(idx.levels, names=idx.names) dataframe = dataframe.reindex(full_idx) dims = [name if name is not None else 'level_%i' % n for n, name in enumerate(idx.names)] for dim, lev in zip(dims, idx.levels): obj[dim] = (dim, lev) shape = [lev.size for lev in idx.levels] else: if idx.size: dims = (idx.name if idx.name is not None else 'index',) obj[dims[0]] = (dims, idx) else: dims = [] shape = -1 for name, series in iteritems(dataframe): data = series.values.reshape(shape) obj[name] = (dims, data) return obj @staticmethod def _unary_op(f): @functools.wraps(f) def func(self, *args, **kwargs): ds = self.coords.to_dataset() for k in self.data_vars: ds._variables[k] = f(self._variables[k], *args, **kwargs) return ds return func @staticmethod def _binary_op(f, reflexive=False, join='inner', drop_na_vars=True): @functools.wraps(f) def func(self, other): if isinstance(other, groupby.GroupBy): return NotImplemented if hasattr(other, 'indexes'): self, other = align(self, other, join=join, copy=False) empty_indexes = [d for d, s in self.dims.items() if s == 0] if empty_indexes: raise ValueError('no overlapping labels for some ' 'dimensions: %s' % empty_indexes) g = f if not reflexive else lambda x, y: f(y, x) ds = self._calculate_binary_op(g, other, drop_na_vars=drop_na_vars) return ds return func @staticmethod def _inplace_binary_op(f): @functools.wraps(f) def func(self, other): if isinstance(other, groupby.GroupBy): raise TypeError('in-place operations between a Dataset and ' 'a grouped object are not permitted') if hasattr(other, 'indexes'): other = other.reindex_like(self, copy=False) # we don't want to actually modify arrays in-place g = ops.inplace_to_noninplace_op(f) ds = self._calculate_binary_op(g, other, inplace=True) self._replace_vars_and_dims(ds._variables, ds._coord_names, ds._attrs, inplace=True) return self return func def _calculate_binary_op(self, f, other, inplace=False, drop_na_vars=True): def apply_over_both(lhs_data_vars, rhs_data_vars, lhs_vars, rhs_vars): dest_vars = OrderedDict() performed_op = False for k in lhs_data_vars: if k in rhs_data_vars: dest_vars[k] = f(lhs_vars[k], rhs_vars[k]) performed_op = True elif inplace: raise ValueError( 'datasets must have the same data variables ' 'for in-place arithmetic operations: %s, %s' % (list(lhs_data_vars), list(rhs_data_vars))) elif not drop_na_vars: # this shortcuts left alignment of variables for fillna dest_vars[k] = lhs_vars[k] if not performed_op: raise ValueError( 'datasets have no overlapping data variables: %s, %s' % (list(lhs_data_vars), list(rhs_data_vars))) return dest_vars if utils.is_dict_like(other) and not isinstance(other, Dataset): # can't use our shortcut of doing the binary operation with # Variable objects, so apply over our data vars instead. new_data_vars = apply_over_both(self.data_vars, other, self.data_vars, other) return Dataset(new_data_vars) other_coords = getattr(other, 'coords', None) ds = self.coords.merge(other_coords) if isinstance(other, Dataset): new_vars = apply_over_both(self.data_vars, other.data_vars, self.variables, other.variables) else: other_variable = getattr(other, 'variable', other) new_vars = OrderedDict((k, f(self.variables[k], other_variable)) for k in self.data_vars) ds._variables.update(new_vars) return ds ops.inject_all_ops_and_reduce_methods(Dataset, array_only=False)
Java
# SPDX-License-Identifier: Apache-2.0 # This file contains boards in Zephyr which has been replaced with a new board # name. # This allows the system to automatically change the board while at the same # time prints a warning to the user, that the board name is deprecated. # # To add a board rename, add a line in following format: # set(<old_board_name>_DEPRECATED <new_board_name>) set(bl5340_dvk_cpuappns_DEPRECATED bl5340_dvk_cpuapp_ns) set(mps2_an521_nonsecure_DEPRECATED mps2_an521_ns) set(musca_b1_nonsecure_DEPRECATED musca_b1_ns) set(musca_s1_nonsecure_DEPRECATED musca_s1_ns) set(nrf5340dk_nrf5340_cpuappns_DEPRECATED nrf5340dk_nrf5340_cpuapp_ns) set(nrf9160dk_nrf9160ns_DEPRECATED nrf9160dk_nrf9160_ns) set(circuitdojo_feather_nrf9160ns_DEPRECATED circuitdojo_feather_nrf9160_ns) set(nrf9160_innblue21ns_DEPRECATED nrf9160_innblue21_ns) set(nrf9160_innblue22ns_DEPRECATED nrf9160_innblue22_ns) set(sparkfun_thing_plus_nrf9160ns_DEPRECATED sparkfun_thing_plus_nrf9160_ns) set(thingy53_nrf5340_cpuappns_DEPRECATED thingy53_nrf5340_cpuapp_ns)
Java
<?php App::uses('AppModel', 'Model'); /** * ProdCodeDivn Model * * @property ProdCodeSection $ProdCodeSection * @property ProdCodeGroup $ProdCodeGroup */ class ProdCodeDivn extends AppModel { /** * Display field * * @var string */ public $displayField = 'divn_desc_eng'; /** * Validation rules * * @var array */ public $validate = array( 'prod_code_section_id' => array( 'notempty' => array( 'rule' => array('notempty'), //'message' => 'Your custom message here', //'allowEmpty' => false, //'required' => false, //'last' => false, // Stop validation after this rule //'on' => 'create', // Limit validation to 'create' or 'update' operations ), ), 'divn_code' => array( 'numeric' => array( 'rule' => array('numeric'), //'message' => 'Your custom message here', //'allowEmpty' => false, //'required' => false, //'last' => false, // Stop validation after this rule //'on' => 'create', // Limit validation to 'create' or 'update' operations ), ), ); //The Associations below have been created with all possible keys, those that are not needed can be removed /** * belongsTo associations * * @var array */ public $belongsTo = array( 'ProdCodeSection' => array( 'className' => 'ProdCodeSection', 'foreignKey' => 'prod_code_section_id', 'conditions' => '', 'fields' => '', 'order' => '' ) ); /** * hasMany associations * * @var array */ /* public $hasMany = array( 'ProdCodeGroup' => array( 'className' => 'ProdCodeGroup', 'foreignKey' => 'prod_code_divn_id', 'dependent' => false, 'conditions' => '', 'fields' => '', 'order' => '', 'limit' => '', 'offset' => '', 'exclusive' => '', 'finderQuery' => '', 'counterQuery' => '' ) );*/ }
Java
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002-2010 Oracle. All rights reserved. * * $Id: ReplicaSyncupReader.java,v 1.4 2010/01/11 20:00:48 linda Exp $ */ package com.sleepycat.je.rep.stream; import static com.sleepycat.je.utilint.DbLsn.NULL_LSN; import java.io.IOException; import java.nio.ByteBuffer; import java.util.logging.Level; import java.util.logging.Logger; import com.sleepycat.je.DatabaseException; import com.sleepycat.je.EnvironmentFailureException; import com.sleepycat.je.dbi.EnvironmentImpl; import com.sleepycat.je.log.LogEntryType; import com.sleepycat.je.log.entry.LogEntry; import com.sleepycat.je.recovery.CheckpointEnd; import com.sleepycat.je.rep.impl.node.NameIdPair; import com.sleepycat.je.rep.vlsn.VLSNIndex; import com.sleepycat.je.rep.vlsn.VLSNRange; import com.sleepycat.je.txn.TxnCommit; import com.sleepycat.je.utilint.LoggerUtils; import com.sleepycat.je.utilint.VLSN; /** * The ReplicaSyncupReader scans the log backwards for requested log entries. * The reader must track whether it has passed a checkpoint, and therefore * can not used the vlsn index to skip over entries. * * The ReplicaSyncupReader is not thread safe, and can only be used * serially. It will stop at the finishLsn, which should be set using the * GlobalCBVLSN. */ public class ReplicaSyncupReader extends VLSNReader { /* True if this particular record retrieval is for a syncable record. */ private boolean syncableSearch; private final LogEntry ckptEndLogEntry = LogEntryType.LOG_CKPT_END.getNewLogEntry(); private final LogEntry commitLogEntry = LogEntryType.LOG_TXN_COMMIT.getNewLogEntry(); /* * SearchResults retains the information as to whether the found * matchpoint is valid. */ private final MatchpointSearchResults searchResults; private final Logger logger; public ReplicaSyncupReader(EnvironmentImpl envImpl, VLSNIndex vlsnIndex, long endOfLogLsn, int readBufferSize, NameIdPair nameIdPair, VLSN startVLSN, long finishLsn, MatchpointSearchResults searchResults) throws IOException, DatabaseException { /* * If we go backwards, endOfFileLsn and startLsn must not be null. * Make them the same, so we always start at the same very end. */ super(envImpl, vlsnIndex, false, // forward endOfLogLsn, readBufferSize, nameIdPair, finishLsn); initScan(startVLSN, endOfLogLsn); this.searchResults = searchResults; logger = LoggerUtils.getLogger(getClass()); } /** * Set up the ReplicaSyncupReader to start scanning from this VLSN. * @throws IOException */ private void initScan(VLSN startVLSN, long endOfLogLsn) throws IOException { if (startVLSN.equals(VLSN.NULL_VLSN)) { throw EnvironmentFailureException.unexpectedState ("ReplicaSyncupReader start can't be NULL_VLSN"); } startLsn = endOfLogLsn; assert startLsn != NULL_LSN; /* * Flush the log so that syncup can assume that all log entries that * are represented in the VLSNIndex are safely out of the log buffers * and on disk. Simplifies this reader, so it can use the regular * ReadWindow, which only works on a file. */ envImpl.getLogManager().flush(); window.initAtFileStart(startLsn); currentEntryPrevOffset = window.getEndOffset(); currentEntryOffset = window.getEndOffset(); currentVLSN = startVLSN; } /** * Backward scanning for the replica's part in syncup. */ public OutputWireRecord scanBackwards(VLSN vlsn) throws DatabaseException { syncableSearch = false; VLSNRange range = vlsnIndex.getRange(); if (vlsn.compareTo(range.getFirst()) < 0) { /* * The requested VLSN is before the start of our range, we don't * have this record. */ return null; } currentVLSN = vlsn; if (readNextEntry()) { return currentFeedRecord; } return null; } /** * Backward scanning for finding an earlier candidate syncup matchpoint. */ public OutputWireRecord findPrevSyncEntry() throws DatabaseException { currentFeedRecord = null; syncableSearch = true; /* Start by looking at the entry before the current record. */ currentVLSN = currentVLSN.getPrev(); VLSNRange range = vlsnIndex.getRange(); if (currentVLSN.compareTo(range.getFirst()) < 0) { /* * We've walked off the end of the contiguous VLSN range. */ return null; } if (readNextEntry() == false) { /* * We scanned all the way to the front of the log, no * other sync-able entry found. */ return null; } assert LogEntryType.isSyncPoint(currentFeedRecord.getEntryType()) : "Unexpected log type= " + currentFeedRecord; return currentFeedRecord; } /** * @throw an EnvironmentFailureException if we were scanning for a * particular VLSN and we have passed it by. */ private void checkForPassingTarget(int compareResult) { if (compareResult < 0) { /* Hey, we passed the VLSN we wanted. */ throw EnvironmentFailureException.unexpectedState ("want to read " + currentVLSN + " but reader at " + currentEntryHeader.getVLSN()); } } /** * Return true for ckpt entries, for syncable entries, and if we're in * specific vlsn scan mode, any replicated entry. There is an additional * level of filtering in processEntry. */ @Override protected boolean isTargetEntry() throws DatabaseException { if (logger.isLoggable(Level.FINEST)) { LoggerUtils.finest(logger, envImpl, " isTargetEntry " + currentEntryHeader); } nScanned++; /* Skip invisible entries. */ if (currentEntryHeader.isInvisible()) { return false; } byte currentType = currentEntryHeader.getType(); /* * Return true if this entry is replicated. All entries need to be * perused by processEntry, when we are doing a vlsn based search, * even if they are not a sync point, because: * (a) If this is a vlsn-based search, it's possible that the replica * and feeder are mismatched. The feeder will only propose a sync type * entry as a matchpoint but it might be that the replica has a non- * sync entry at that vlsn. * (b) We need to note passed commits in processEntry. */ if (entryIsReplicated()) { if (syncableSearch) { if (LogEntryType.isSyncPoint(currentType)) { return true; } currentVLSN = currentEntryHeader.getVLSN().getPrev(); } else { return true; } } /* * We'll also need to read checkpoint end records to record their * presence. */ if (LogEntryType.LOG_CKPT_END.equalsType(currentType)) { return true; } return false; } /** * ProcessEntry does additional filtering before deciding whether to * return an entry as a candidate for matching. * * If this is a record we are submitting as a matchpoint candidate, * instantiate a WireRecord to house this log entry. If this is a * non-replicated entry or a txn end that follows the candidate matchpoint, * record whatever status we need to, but don't use it for comparisons. * * For example, suppose the log is like this:f * * VLSN entry * 10 LN * 11 commit * 12 LN * -- ckpt end * 13 commit * 14 abort * * And that the master only has VLSNs 1-12. The replica will suggest vlsn * 14 as the first matchpoint. The feeder will counter with a suggestion * of vlsn 11, since it doe not have vlsn 14. * * At that point, the ReplicaSyncupReader will scan backwards in the log, * looking for vlsn 11. Although the reader should only return an entry * when it gets to vlsn 11. the reader must process commits and ckpts that * follow 11, so that they can be recorded in the searchResults, so the * number of rolled back commits can be accurately reported. */ @Override protected boolean processEntry(ByteBuffer entryBuffer) { if (logger.isLoggable(Level.FINEST)) { LoggerUtils.finest(logger, envImpl, " syncup reader saw " + currentEntryHeader); } byte currentType = currentEntryHeader.getType(); /* * CheckpointEnd entries are tracked in order to see if a rollback * must be done, but are not returned as possible matchpoints. */ if (LogEntryType.LOG_CKPT_END.equalsType(currentType)) { /* * Read the entry, which both lets us decipher its contents and * also advances the file reader position. */ ckptEndLogEntry.readEntry(currentEntryHeader, entryBuffer, true /*readFullItem*/); if (logger.isLoggable(Level.FINEST)) { LoggerUtils.finest(logger, envImpl, " syncup reader read " + currentEntryHeader + ckptEndLogEntry); } if (((CheckpointEnd) ckptEndLogEntry.getMainItem()). getCleanedFilesToDelete()) { searchResults.notePassedCheckpointEnd(); } return false; } /* * Setup the log entry as a wire record so we can compare it to * the entry from the feeder as we look for a matchpoint. Do this * before we change positions on the entry buffer by reading it. */ ByteBuffer buffer = entryBuffer.slice(); buffer.limit(currentEntryHeader.getItemSize()); currentFeedRecord = new OutputWireRecord(currentEntryHeader, buffer); /* * All commit records must be tracked to figure out if we've exceeded * the txn rollback limit. For reporting reasons, we'll need to * unmarshal the log entry, so we can read the timestamp in the commit * record. */ if (LogEntryType.LOG_TXN_COMMIT.equalsType(currentType)) { commitLogEntry.readEntry(currentEntryHeader, entryBuffer, true /*readFullItem*/); TxnCommit commit = (TxnCommit) commitLogEntry.getMainItem(); searchResults.notePassedCommits(commit.getTime(), commit.getId(), currentEntryHeader.getVLSN(), getLastLsn()); if (logger.isLoggable(Level.FINEST)) { LoggerUtils.finest(logger, envImpl, "syncup reader read " + currentEntryHeader + commitLogEntry); } } else { entryBuffer.position(entryBuffer.position() + currentEntryHeader.getItemSize()); } if (syncableSearch) { return true; } /* We're looking for a particular VLSN. */ int compareResult = currentEntryHeader.getVLSN().compareTo(currentVLSN); checkForPassingTarget(compareResult); /* return true if this is the entry we want. */ return (compareResult == 0); } }
Java
package so.modernized.whip import java.util.{Set => JSet} import java.net.{URI => JURI} import com.cambridgesemantics.anzo.unstructured.graphsummarization.PatternSolutionExtras import com.cambridgesemantics.anzo.unstructured.graphsummarization.XMLUnapplicable._ import so.modernized.psl_scala.primitives.PSLUnapplicable._ import so.modernized.psl_scala.primitives.{PSLUnapplicable, PSLVar} import so.modernized.whip.URIUniqueId._ import so.modernized.whip.sparql.QueryIterator import so.modernized.whip.util._ import scala.util.{Failure, Success, Try} import scala.collection.JavaConverters._ import scala.collection.mutable import com.cambridgesemantics.anzo.utilityservices.common.EncodingUtils import edu.umd.cs.psl.database.loading.{Updater, Inserter} import edu.umd.cs.psl.database._ import edu.umd.cs.psl.model.argument._ import edu.umd.cs.psl.model.atom._ import edu.umd.cs.psl.model.predicate.{SpecialPredicate, FunctionalPredicate, Predicate, StandardPredicate} import org.openanzo.client.IAnzoClient import org.openanzo.rdf.{URI => AnzoURI, Statement, Value} class TypedStandardPredicate[A, B](name:String, val uriType:AnzoURI, val domain:AnzoURI, val range:AnzoURI)(implicit aEv:PSLUnapplicable[A], bEv:PSLUnapplicable[B]) extends StandardPredicate(name, Array(aEv.argType, bEv.argType)) /** * A Variable that is typed by the rdf:class of the arguments that it can take (determined by @uriType) */ case class TypedVariable(name:String, uriType:AnzoURI) extends Variable(name) { override def toString = name } object TypedVariable { def tv(name:String, uri:AnzoURI) = new TypedVariable(name, uri) } object PSLURIVar { def unapply(t:Term) = t match { case v:TypedVariable => Some(v) case _ => None } } /* class LazyResultList(iter:QueryIterator, varPos:Map[Variable, Int], val size:Int) extends ResultList { private val resStream = iter.flatten.toStream def get(resultNo: Int, `var`: Variable) = get(resultNo)(varPos(`var`)) def get(resultNo: Int): Array[GroundTerm] = resStream(resultNo) val getArity = 2 } */ class SparqlResultList(varPos:Map[Variable, Int]) extends mutable.ArrayBuffer[Array[GroundTerm]] with ResultList { override def +=(elem: Array[GroundTerm]) = { assert(elem.length == 2) super.+=(elem) } override def get(resultNo: Int, `var`: Variable): GroundTerm = this(resultNo)(varPos(`var`)) override def get(resultNo: Int): Array[GroundTerm] = this(resultNo) val getArity = 2 } class PSLSparqlDataStore(protected[whip] val anzo:IAnzoClient, keyFields:Set[AnzoURI]) extends DataStore { protected[whip] val observedPredicates = mutable.HashSet[StandardPredicate]() //mutable.HashMap[AnzoURI, StandardPredicate]() protected[whip] val targetPredicates = mutable.HashSet[StandardPredicate]() protected[whip] val variables = mutable.HashMap[String, TypedVariable]() override def registerPredicate(predicate: StandardPredicate): Unit = { predicate match { case tp:TypedStandardPredicate[_,_] => if(keyFields contains tp.uriType) { observedPredicates += tp } else { targetPredicates += tp } case s:StandardPredicate => require(predicate.getArity == 2) Try(EncodingUtils.uri(predicate.getName)) match { case Success(uri) if keyFields contains uri => observedPredicates += predicate case Success(uri) => targetPredicates += predicate case Failure(f) => throw new IllegalArgumentException("Expected a uri for predicate name, got " + predicate.getName) } } } def registerTypedVariable(v:TypedVariable): Unit = { variables += v.name -> v } override def getRegisteredPredicates: JSet[StandardPredicate] = (observedPredicates ++ targetPredicates).asJava override def getUniqueID(key: Any): UniqueID = key match { case uri:AnzoURI => new URIUniqueId(uri) case jUri:JURI => new URIUniqueId(EncodingUtils.uri(jUri.toString)) case str:String if Try(EncodingUtils.uri(str)).isSuccess => new URIUniqueId(EncodingUtils.uri(str)) case otw => throw new IllegalArgumentException("Expected a uri or uri string, received " + otw.toString) } def getDatabase(datasets:Set[AnzoURI], ontology:AnzoURI=null) = new PSLSparqlDatabase(this, datasets, ontology, variables.toMap) override def getUpdater(predicate: StandardPredicate, partition: Partition): Updater = ??? override def getInserter(predicate: StandardPredicate, partition: Partition): Inserter = ??? override def deletePartition(partition: Partition): Int = ??? override def getDatabase(write: Partition, read: Partition*): Database = ??? override def getDatabase(write: Partition, toClose: JSet[StandardPredicate], read: Partition*): Database = ??? override def close() {/*noOp*/} override def getNextPartition: Partition = ??? } class PSLSparqlDatabase(private val datastore:PSLSparqlDataStore, private val datasets:Set[AnzoURI], private val ontology:AnzoURI, variableMap:Map[String,TypedVariable]) extends Database { private val anzo = datastore.anzo private val cache = new AtomCache(this) private val observed = datastore.observedPredicates private val target = datastore.targetPredicates def getAtom(p:Predicate, arguments:GroundTerm*) = Option(cache.getCachedAtom(new QueryAtom(p, arguments:_*))) match { case Some(res) => res case None => p match { case tp:TypedStandardPredicate[_,_] => // TODO should this work for non-typed predicates? nothing else will val Seq(PSLURI(s), PSLURI(o)) = arguments // TODO expand for other options val value = if(anzo.serverQuery(null, null, datasets.asJava, s"ASK { <$s> <${tp.uriType}> <$o> }").getAskResults) 1.0 else 0.0 if(observed contains tp) { println("generating obs atom for " + (tp, arguments, value)) cache.instantiateObservedAtom(tp, arguments.toArray, value, Double.NaN) } else if(target contains tp) { if(value > 0.0) println("generating rv atom for " + (tp, arguments, value)) cache.instantiateRandomVariableAtom(tp, arguments.toArray, value, Double.NaN) } else { throw new IllegalArgumentException("Expected predicate to be registered as observed or target, but wasn't either") } case sp:SparqlPredicate => if(!sp.isComputed) sp.precompute(this) cache.instantiateObservedAtom(sp, arguments.toArray, sp.computeValue(new ReadOnlyDatabase(this), arguments:_*), Double.NaN) } } override def getRegisteredPredicates = datastore.getRegisteredPredicates override def getUniqueID(key: Any) = datastore.getUniqueID(key) override def getDataStore = datastore private val executeQ = """SELECT %s |WHERE { | %s |}""".stripMargin def executeQuery(query:DatabaseQuery) = { val f = query.getFormula val atoms = f.getAtoms(mutable.Set.empty[Atom].asJava).asScala val projected = (query.getProjectionSubset.asScala.toSet ++ f.collectVariables(new VariableTypeMap).asScala.keySet) -- query.getPartialGrounding.asScala.keySet val projectedBindings = mutable.ArrayBuffer[Variable]() val whereClauses = atoms.map { a => (a.getPredicate, a.getArguments) match { case (p:TypedStandardPredicate[_, _], Array(PSLVar(s), PSLVar(o))) if observed contains p => projectedBindings += s projectedBindings += o s"\t?$s <${p.uriType}> ?$o ." case (p:TypedStandardPredicate[_, _], Array(PSLVar(s), PSLVar(o))) if target contains p => val (sType, oType) = (s, o) match { case (PSLURIVar(su), PSLURIVar(ou)) => su.uriType -> ou.uriType case _ => p.domain -> p.range } projectedBindings += s projectedBindings += o Seq(s"\t?$s a <$sType> .", s"\t?$o a <$oType> .").mkString("\n") case (sp:SparqlPredicate, Array(PSLVar(s), PSLVar(o))) => if(!sp.isComputed) { sp.precompute(this) } s"?$s <${sp.predicate}> ?$o ." case (p:StandardPredicate, ts) => println ("observed " + observed + "\ntarget " + target) throw new IllegalArgumentException("Wasn't expecting " + (p, p.getClass, observed contains p, target contains p, ts.toSeq)) } }.mkString("\n") val Q = s"SELECT ${projectedBindings.map(v => "?" + v.getName).toSet.mkString(" ")}\nWHERE {\n$whereClauses\n}" println(f) println(projected) println(Q) val res = new SparqlResultList(projectedBindings.zipWithIndex.toMap) val q = anzo.serverQuery(null, null, datasets.asJava, Q).getSelectResults.asScala.foreach { ps => val m = ps.toMap res += projectedBindings.map(v => xml2Psl(m(v.getName))).toArray } res } override def close() {/*noOp*/} override def isClosed(predicate: StandardPredicate) = target contains predicate override def getAtomCache = cache override def commit(atom: RandomVariableAtom): Unit = { require(atom.getArity == 2) val p = EncodingUtils.uri(atom.getPredicate.getName) atom.getArguments match { case Array(PSLURI(s), PSLURI(o)) => val stmt = new Statement(s, p, o) val stmtVal = new Statement(s, EncodingUtils.uri(p.toString +"_value"), xmlWrap(atom.getValue)) val stmtConf = new Statement(s, EncodingUtils.uri(p.toString +"_confidence"), xmlWrap(atom.getConfidenceValue)) anzo.add(stmt, stmtVal, stmtConf) anzo.commit() anzo.updateRepository(true) case otw => ??? } } }
Java
using De.Osthus.Ambeth.Bytecode.Visitor; using De.Osthus.Ambeth.Ioc.Annotation; using De.Osthus.Ambeth.Log; using De.Osthus.Ambeth.Merge; using De.Osthus.Ambeth.Merge.Model; using De.Osthus.Ambeth.Proxy; using System; using System.Collections.Generic; namespace De.Osthus.Ambeth.Bytecode.Behavior { public class EnhancedTypeBehavior : AbstractBehavior { [LogInstance] public ILogger Log { private get; set; } [Autowired] public IEntityMetaDataProvider EntityMetaDataProvider { protected get; set; } public override Type[] GetEnhancements() { return new Type[] { typeof(IEnhancedType), typeof(IEntityMetaDataHolder) }; } public override IClassVisitor Extend(IClassVisitor visitor, IBytecodeBehaviorState state, IList<IBytecodeBehavior> remainingPendingBehaviors, IList<IBytecodeBehavior> cascadePendingBehaviors) { if ((state.GetContext<EntityEnhancementHint>() == null && state.GetContext<EmbeddedEnhancementHint>() == null)) { return visitor; } if (state.GetContext<EntityEnhancementHint>() != null) { IEntityMetaData metaData = EntityMetaDataProvider.GetMetaData(state.OriginalType); visitor = new InterfaceAdder(visitor, typeof(IEntityMetaDataHolder)); visitor = new EntityMetaDataHolderVisitor(visitor, metaData); } visitor = new InterfaceAdder(visitor, typeof(IEnhancedType)); visitor = new GetBaseTypeMethodCreator(visitor); return visitor; } } }
Java
#!/bin/bash if [[ $# -eq 2 ]] && [[ "x$2" = "xadMin" ]] ; then /bin/rm -rf $1 fi
Java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jsecurity.authz.aop; import org.jsecurity.aop.AnnotationMethodInterceptor; import org.jsecurity.aop.MethodInvocation; import org.jsecurity.authz.AuthorizationException; import java.lang.annotation.Annotation; /** * An <tt>AnnotationMethodInterceptor</tt> that asserts the calling code is authorized to execute the method * before allowing the invocation to continue. * * @author Les Hazlewood * @since 0.1 */ public abstract class AuthorizingAnnotationMethodInterceptor extends AnnotationMethodInterceptor { public AuthorizingAnnotationMethodInterceptor(Class<? extends Annotation> annotationClass) { super(annotationClass); } public Object invoke(MethodInvocation methodInvocation) throws Throwable { assertAuthorized(methodInvocation); return methodInvocation.proceed(); } public abstract void assertAuthorized(MethodInvocation mi) throws AuthorizationException; }
Java
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0, (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tle.web.workflow.soap; public interface TaskListSoapInterface { String getTaskFilterCounts(boolean ignoreZero); String[] getTaskFilterNames(); String getTaskList(String filterName, int start, int numResults) throws Exception; }
Java
package com.soulkey.calltalent.db; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import com.soulkey.calltalent.db.model.SettingModel; import com.soulkey.calltalent.db.populator.SettingPopulator; public final class DbOpenHelper extends SQLiteOpenHelper { public static final String DB_NAME = "calltalent.db"; private static final int DB_VERSION = 1; private static DbOpenHelper instance; public static DbOpenHelper getInstance(Context context) { if (null == instance) { instance = new DbOpenHelper(context); } return instance; } private DbOpenHelper(Context context) { super(context, DB_NAME, null, DB_VERSION); } @Override public void onCreate(SQLiteDatabase db) { db.execSQL(SettingModel.CREATE_TABLE); populateDb(db); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { } private void populateDb(SQLiteDatabase db) { SettingPopulator.populate(db); } }
Java
//////////////////////////////////////////////////////////////////////////// // Module : alife_human_object_handler.h // Created : 07.10.2005 // Modified : 07.10.2005 // Author : Dmitriy Iassenev // Description : ALife human object handler class //////////////////////////////////////////////////////////////////////////// #pragma once #include "alife_space.h" class CSE_ALifeItemWeapon; class CSE_ALifeInventoryItem; class CSE_ALifeGroupAbstract; class CSE_ALifeHumanAbstract; class CALifeHumanObjectHandler { public: typedef CSE_ALifeHumanAbstract object_type; private: object_type* m_object; public: IC CALifeHumanObjectHandler(object_type* object); IC object_type& object() const; public: u16 get_available_ammo_count(const CSE_ALifeItemWeapon* weapon, ALife::OBJECT_VECTOR& objects); u16 get_available_ammo_count(const CSE_ALifeItemWeapon* weapon, ALife::ITEM_P_VECTOR& items, ALife::OBJECT_VECTOR* objects = 0); void attach_available_ammo(CSE_ALifeItemWeapon* weapon, ALife::ITEM_P_VECTOR& items, ALife::OBJECT_VECTOR* objects = 0); bool can_take_item(CSE_ALifeInventoryItem* inventory_item); void collect_ammo_boxes(); public: void detach_all(bool fictitious); void update_weapon_ammo(); void process_items(); CSE_ALifeDynamicObject* best_detector(); CSE_ALifeItemWeapon* best_weapon(); public: int choose_equipment(ALife::OBJECT_VECTOR* objects = 0); int choose_weapon(const ALife::EWeaponPriorityType& weapon_priority_type, ALife::OBJECT_VECTOR* objects = 0); int choose_food(ALife::OBJECT_VECTOR* objects = 0); int choose_medikit(ALife::OBJECT_VECTOR* objects = 0); int choose_detector(ALife::OBJECT_VECTOR* objects = 0); int choose_valuables(); bool choose_fast(); void choose_group(CSE_ALifeGroupAbstract* group_abstract); void attach_items(); }; #include "alife_human_object_handler_inline.h"
Java
package com.splinter.graphing; import org.junit.Assert; import org.junit.Test; import java.util.HashMap; import java.util.Map; public class SplinterLogTest { @Test public void testDisableLogs() { try { SLog.setEnabled(false); String expected = ""; Assert.assertEquals(expected, new SLogStop("Coffee Time", "coffeeComplete") .withOperationAlias("ensureCapacity") .withComponentOverride("WaterReservoir") .withUserData("size", "large") .withInstrumentationOverride(0, null) .toString()); } finally { SLog.setEnabled(true); } } @Test public void testStaticUtilsVarArgs() { String expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;"; Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null)); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=null;"; Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null, null)); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;"; Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size")); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=null;"; Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size", null)); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=large;"; Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null, "large")); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=large;"; Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null, "large", "newkey")); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;"; Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size", "large")); } @Test public void testStaticUtils() { String expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;"; Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size", "large")); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;"; Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize")); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=A;size=large;"; Assert.assertEquals(expected, SLogStart.log("Coffee Time", "selectCupSize", "size", "large")); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=A;"; Assert.assertEquals(expected, SLogStart.log("Coffee Time", "selectCupSize")); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=F;size=large;"; Assert.assertEquals(expected, SLogStop.log("Coffee Time", "selectCupSize", "size", "large")); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=F;"; Assert.assertEquals(expected, SLogStop.log("Coffee Time", "selectCupSize")); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;+MC=1;size=large;"; Assert.assertEquals(expected, SLogBroadcastSend.log("Coffee Time", "selectCupSize", "size", "large")); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;+MC=1;"; Assert.assertEquals(expected, SLogBroadcastSend.log("Coffee Time", "selectCupSize")); expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=A;+OA=selectCupSize;size=large;"; Assert.assertEquals(expected, SLogBroadcastStart.log("Coffee Time", "bcastId", "selectCupSize","size", "large")); expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=A;+OA=selectCupSize;"; Assert.assertEquals(expected, SLogBroadcastStart.log("Coffee Time", "bcastId", "selectCupSize")); expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=F;+OA=selectCupSize;size=large;"; Assert.assertEquals(expected, SLogBroadcastStop.log("Coffee Time", "bcastId", "selectCupSize","size", "large")); expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=F;+OA=selectCupSize;"; Assert.assertEquals(expected, SLogBroadcastStop.log("Coffee Time", "bcastId", "selectCupSize")); } @Test public void testSunnyDay() { String expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;"; Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize") .withUserData("size", "large").toString()); Map<String, String> userData = new HashMap<String, String>(); userData.put("size", "large"); Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize") .withUserData(userData).toString()); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;size1=large;size2=large;size3=large;size4=large;size5=large;"; Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize") .withUserData("size", "large") .withUserData("size1", "large") .withUserData("size2", "large") .withUserData("size3", "large") .withUserData("size4", "large") .withUserData("size5", "large").toString()); } @Test public void testOptionalParams() { String expected = "$SPG$+T=Coffee Time;+O=pumpWater;+M=A;+I^=100ms;"; Assert.assertEquals(expected, new SLogStart("Coffee Time", "pumpWater") .withInstrumentationOverride(100, SLog.TimeNotation.MILLIS) .toString()); expected = "$SPG$+T=Coffee Time;+O=coffeeComplete;+M=F;+OA=ensureCapacity;+C^=WaterReservoir;"; Assert.assertEquals(expected, new SLogStop("Coffee Time", "coffeeComplete") .withOperationAlias("ensureCapacity") .withComponentOverride("WaterReservoir") .toString()); } @Test public void testMissingParams() { String expected = "$SPG$+T=_MISSING_TASK_;+O=_MISSING_OPERATION_;+M=S;"; Assert.assertEquals(expected, new SLog(null, null, null) .toString()); expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=large;"; Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize") .withUserData(null, "large").toString()); } @Test public void testEscaping() { Assert.assertEquals("abcd", SLog.escape("abcd")); Assert.assertEquals("ab\\ncd", SLog.escape("ab\ncd")); Assert.assertNull(SLog.escape(null)); Assert.assertEquals("", SLog.escape("")); Assert.assertEquals("ab\\=cd", SLog.escape("ab=cd")); Assert.assertEquals("ab\\;cd", SLog.escape("ab;cd")); Assert.assertEquals("ab\\\\cd", SLog.escape("ab\\cd")); } @Test public void testEscapingLog() { String expected = "$SPG$+T=file\\; opened;+O=\\\\open;+M=S;+OA=\\=1;r\\=sr=/Users/dimitarz/\\;filename.log;"; Assert.assertEquals(expected, new SLog(null, null, null) .withUserData("r=sr", "/Users/dimitarz/;filename.log") .withOperation("\\open") .withOperationAlias("=1") .withTask("file; opened") .toString()); } }
Java
package io.zrz.graphql.core.decl; import java.util.List; import org.eclipse.jdt.annotation.Nullable; import io.zrz.graphql.core.doc.GQLDirective; import io.zrz.graphql.core.parser.GQLSourceLocation; public interface GQLDeclaration { @Nullable String description(); <R> R apply(GQLDeclarationVisitor<R> visitor); List<GQLDirective> directives(); @Nullable GQLSourceLocation location(); GQLDeclaration withDescription(String value); GQLDeclaration withDirectives(GQLDirective... elements); GQLDeclaration withDirectives(Iterable<? extends GQLDirective> elements); GQLDeclaration withLocation(GQLSourceLocation value); }
Java
/* * @class TableExamplesService */ export default class TableExamplesService { constructor($http) { this.$http = $http; } static getClassName() { return 'TableExamplesService'; } getClassName() { return TableExamplesService.getClassName(); } /* * @func getColumns * @desc getes a list of columns representing the dataset that * allows data tables to map the array of data to the table */ getColumns() { return this.$http.get('http://localhost:3001/api/DataTable/Columns/People'); } /* * @func addColumn * @desc adds a col * allows data tables to map the array of data to the table */ addColumn(item) { return this.$http.post('http://localhost:3001/api/DataTable/Columns/People', item); } /* * @func getData * @desc gets a list of items from the api */ getData() { return this.$http.get('http://localhost:3001/api/People'); } /* * @func addData * @desc adds an item to the api * @param item */ addData(item) { return this.$http.post('http://localhost:3001/api/People', item); } }
Java
/* * Copyright © 2016 - 2017 Dominik Szalai (emptulik@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cz.muni.fi.editor.support; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.core.annotation.AliasFor; import org.springframework.security.test.context.support.WithSecurityContext; /** * @author Dominik Szalai - emptulik at gmail.com on 10.8.2016. */ @Target({ElementType.METHOD, ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Inherited @Documented @WithSecurityContext( factory = TestSecurityContextFactory.class ) public @interface WithEditorUser { // the owner is always user with ID 1 @AliasFor("value") long id() default 1L; @AliasFor("id") long value() default 1L; boolean mock() default false; }
Java
# Ceratium buceros (Zacharias) Schiller SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
# Dactylis glomerata var. hispanica VARIETY #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in Cat. Bot. 1:8. 1797 #### Original name null ### Remarks null
Java
# Andropogon exaltatus var. ambiguus VARIETY #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
# Sida decumbens f. suberecta Chodat & Hassl. FORM #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
# Cereus sublanatus Salm-Dyck SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
# Acianthera carinata (C.Schweinf.) Luer SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name Pleurothallis carinata C.Schweinf. ### Remarks null
Java
# Ramariopsis kunzei var. subasperata Corner, 1950 VARIETY #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in Monograph of Clavaria and allied Genera (Annals of Botany Memoirs No. 1) 700 (1950) #### Original name Ramariopsis kunzei var. subasperata Corner, 1950 ### Remarks null
Java
# Colpoda colpoda SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
# Cornus acuminata Hort. ex Lavallée SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
/* * Copyright (C) 2014-2015 Vy Nguyen * Github https://github.com/vy-nguyen/tvntd * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ #ifndef _CRYPTO_USER_CACHE_H_ #define _CRYPTO_USER_CACHE_H_ #include <crypto/user.h> class AsyncKeyClient; class KeyServiceClient; class UserLruCache : public LruTable { public: UserLruCache(int max_elm, Crypto::ptr m) : LruTable(max_elm), cryp_mod(m) {} inline UserCrypto::ptr get_user(uint64_t uuid) { return object_cast<UserCrypto>(lookup(uuid)); } inline UserCrypto::ptr get_user(uint64_t uuid, uint64_t key_ver) { return object_cast<UserCrypto>(lookup(uuid)); } void get_user(uint64_t uuid, uint64_t key_ver, lru_str_cb cb); bool get_public_key(uint64_t uuid, uint64_t ver, std::string *key); void get_public_key(uint64_t uuid, uint64_t ver, lru_str_cb cb); bool get_all_pub_keys(uint64_t uuid, std::vector<Crypto::KeyVer::ptr> *krec); LruObj::ptr lookup_missed(uint64_t uuid) override; LruObj::ptr lookup_missed(const std::string &key) override; void lookup_missed(uint64_t uuid, lru_u64_cb cb) override; void lookup_missed(const std::string &key, lru_str_cb cb) override; protected: Crypto::ptr cryp_mod; }; #endif /* _CRYPTO_USER_CACHE_H_ */
Java
/** * Licensed to the Austrian Association for Software Tool Integration (AASTI) * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. The AASTI licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openengsb.ui.admin.tree.editablePanel; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.IModel; @SuppressWarnings("serial") public class EditablePanel extends Panel { public EditablePanel(String id, IModel<String> inputModel) { super(id); TextField<String> field = new TextField<String>("textfield", inputModel); add(field); field.add(new AjaxFormComponentUpdatingBehavior("onblur") { @Override protected void onUpdate(AjaxRequestTarget target) { } }); } }
Java
# Sopubia kacondensis S.Moore SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="de"> <head> <!-- Generated by javadoc (1.8.0_101) on Mon Aug 22 00:07:21 CEST 2016 --> <title>de.dhbw.wi13c.jguicreator.data.validator</title> <meta name="date" content="2016-08-22"> <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="de.dhbw.wi13c.jguicreator.data.validator"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../../de/dhbw/wi13c/jguicreator/data/util/package-summary.html">Prev&nbsp;Package</a></li> <li><a href="../../../../../../de/dhbw/wi13c/jguicreator/elemente/package-summary.html">Next&nbsp;Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?de/dhbw/wi13c/jguicreator/data/validator/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 title="Package" class="title">Package&nbsp;de.dhbw.wi13c.jguicreator.data.validator</h1> </div> <div class="contentContainer"> <ul class="blockList"> <li class="blockList"> <table class="typeSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation"> <caption><span>Class Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Class</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../../../de/dhbw/wi13c/jguicreator/data/validator/NotNullValidator.html" title="class in de.dhbw.wi13c.jguicreator.data.validator">NotNullValidator</a></td> <td class="colLast">&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../../../de/dhbw/wi13c/jguicreator/data/validator/PatternValidator.html" title="class in de.dhbw.wi13c.jguicreator.data.validator">PatternValidator</a></td> <td class="colLast">&nbsp;</td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../../../de/dhbw/wi13c/jguicreator/data/validator/SizeValidator.html" title="class in de.dhbw.wi13c.jguicreator.data.validator">SizeValidator</a></td> <td class="colLast">&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../../../de/dhbw/wi13c/jguicreator/data/validator/Validator.html" title="class in de.dhbw.wi13c.jguicreator.data.validator">Validator</a>&lt;T&gt;</td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../../de/dhbw/wi13c/jguicreator/data/util/package-summary.html">Prev&nbsp;Package</a></li> <li><a href="../../../../../../de/dhbw/wi13c/jguicreator/elemente/package-summary.html">Next&nbsp;Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?de/dhbw/wi13c/jguicreator/data/validator/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
Java
<?php namespace Test\Webmail; use Magium\AbstractTestCase; use Magium\Mail\Webmail\Messages; class WebmailTest extends AbstractTestCase { protected $subjectContains = '<subject contains something>'; protected $recipient = '<magium mail recipient'; protected $subjectEquals = '<full subject >'; public function setUp() { self::markTestSkipped('This test requires individual configuration'); parent::setUp(); } public function testNavigateByNumber() { $messages = $this->get(Messages::LOCATOR); /* @var $messages Messages */ $messages->open(); $message = $messages->getMessage(); self::assertNotNull($message->getSubject()); $message = $messages->getMessage(2); self::assertNotNull($message->getSubject()); } public function testNavigateSubjectContains() { $messages = $this->get(Messages::LOCATOR); /* @var $messages Messages */ $messages->open(); $message = $messages->getMessageWithSubjectContains($this->subjectContains); self::assertNotNull($message->getSubject()); } public function testNavigateSubjectEquals() { $messages = $this->get(Messages::LOCATOR); /* @var $messages Messages */ $messages->open(); $message = $messages->getMessageWithSubject($this->subjectEquals); self::assertNotNull($message->getSubject()); } public function testNavigateSubjectContainsAndRecipient() { $messages = $this->get(Messages::LOCATOR); /* @var $messages Messages */ $messages->open(); $message = $messages->getMessageWithSubjectContains($this->subjectContains, 1, $this->recipient); self::assertNotNull($message->getSubject()); } public function testNavigateSubjectEqualsAndRecipient() { $messages = $this->get(Messages::LOCATOR); /* @var $messages Messages */ $messages->open(); $message = $messages->getMessageWithSubject($this->subjectEquals, 1, $this->recipient); self::assertNotNull($message->getSubject()); } public function testNavigateSubjectContainsAndInvalidRecipientDoesNotDisplay() { $messages = $this->get(Messages::LOCATOR); /* @var $messages Messages */ $messages->open(); $message = $messages->getMessageWithSubjectContains($this->subjectContains, 1, 'nobody'); self::assertNull($message); } }
Java
#!/usr/bin/env python # -*- coding: utf-8 -*- # File: common.py # Author: Yuxin Wu <ppwwyyxxc@gmail.com> import random import time import threading import multiprocessing import numpy as np from tqdm import tqdm from six.moves import queue from tensorpack import * from tensorpack.utils.concurrency import * from tensorpack.utils.stats import * def play_one_episode(player, func, verbose=False): def f(s): spc = player.get_action_space() act = func([[s]])[0][0].argmax() if random.random() < 0.001: act = spc.sample() if verbose: print(act) return act return np.mean(player.play_one_episode(f)) def play_model(cfg, player): predfunc = OfflinePredictor(cfg) while True: score = play_one_episode(player, predfunc) print("Total:", score) def eval_with_funcs(predictors, nr_eval, get_player_fn): class Worker(StoppableThread, ShareSessionThread): def __init__(self, func, queue): super(Worker, self).__init__() self._func = func self.q = queue def func(self, *args, **kwargs): if self.stopped(): raise RuntimeError("stopped!") return self._func(*args, **kwargs) def run(self): with self.default_sess(): player = get_player_fn(train=False) while not self.stopped(): try: score = play_one_episode(player, self.func) # print("Score, ", score) except RuntimeError: return self.queue_put_stoppable(self.q, score) q = queue.Queue() threads = [Worker(f, q) for f in predictors] for k in threads: k.start() time.sleep(0.1) # avoid simulator bugs stat = StatCounter() try: for _ in tqdm(range(nr_eval), **get_tqdm_kwargs()): r = q.get() stat.feed(r) logger.info("Waiting for all the workers to finish the last run...") for k in threads: k.stop() for k in threads: k.join() while q.qsize(): r = q.get() stat.feed(r) except: logger.exception("Eval") finally: if stat.count > 0: return (stat.average, stat.max) return (0, 0) def eval_model_multithread(cfg, nr_eval, get_player_fn): func = OfflinePredictor(cfg) NR_PROC = min(multiprocessing.cpu_count() // 2, 8) mean, max = eval_with_funcs([func] * NR_PROC, nr_eval, get_player_fn) logger.info("Average Score: {}; Max Score: {}".format(mean, max)) class Evaluator(Triggerable): def __init__(self, nr_eval, input_names, output_names, get_player_fn): self.eval_episode = nr_eval self.input_names = input_names self.output_names = output_names self.get_player_fn = get_player_fn def _setup_graph(self): NR_PROC = min(multiprocessing.cpu_count() // 2, 20) self.pred_funcs = [self.trainer.get_predictor( self.input_names, self.output_names)] * NR_PROC def _trigger(self): t = time.time() mean, max = eval_with_funcs( self.pred_funcs, self.eval_episode, self.get_player_fn) t = time.time() - t if t > 10 * 60: # eval takes too long self.eval_episode = int(self.eval_episode * 0.94) self.trainer.monitors.put_scalar('mean_score', mean) self.trainer.monitors.put_scalar('max_score', max) def play_n_episodes(player, predfunc, nr): logger.info("Start evaluation: ") for k in range(nr): if k != 0: player.restart_episode() score = play_one_episode(player, predfunc) print("{}/{}, score={}".format(k, nr, score))
Java
#ifndef dynamic_array_h #define dynamic_array_h #include <stdlib.h> #include "dynamic_array.c" struct dynamic_array { uint32_t size; uint32_t expansion_factor; void ** array; }; int da_precise_create (struct dynamic_array * ar, uint32_t init_size, uint32_t expansion_factor); int da_create (struct dynamic_array * ar); int64_t da_add(struct dynamic_array * ar, void * val); void da_delete(struct dynamic_array * ar); #endif
Java
package org.devocative.demeter.service.template; import groovy.lang.Binding; import groovy.lang.Script; import org.devocative.demeter.iservice.template.BaseStringTemplate; import java.util.Map; public class GroovyScript extends BaseStringTemplate<Script> { private Script script; public GroovyScript(Script script) { this.script = script; } @Override public Object process(Map<String, Object> params) { Binding binding = new Binding(); for (Map.Entry<String, Object> entry : params.entrySet()) { binding.setVariable(entry.getKey(), entry.getValue()); } script.setBinding(binding); return script.run(); } @Override public Script unwrap() { return script; } }
Java
/** * * ThingBench - Things and Devices Simulator * * http://github.com/frapu78/thingbench * * @author Frank Puhlmann * */ package thingbench; import java.util.HashMap; import net.frapu.code.visualization.ProcessNode; /** * This class provides an operation on a thing. * * @author fpu */ public abstract class ThingOperation { private ProcessNode thingNode; private String operationName; public ThingOperation(ProcessNode thingNode, String operationName) { this.thingNode = thingNode; this.operationName = operationName; } public ProcessNode getThingNode() { return thingNode; } public String getOperationName() { return operationName; } /** * This class executes the Operation. Each operation has a set of properties * of the type <String, String> as input and output. What the operation * does with it remains to the operation. * @param properties * @return * @throws thingbench.ThingExecutionException */ public abstract HashMap<String, String> execute(HashMap<String, String> properties) throws ThingExecutionException; }
Java
package lan.dk.podcastserver.manager.worker.selector; import lan.dk.podcastserver.manager.worker.selector.update.*; import lan.dk.podcastserver.manager.worker.updater.*; import org.junit.Before; import org.junit.Test; import java.util.HashSet; import java.util.Set; import static org.assertj.core.api.Assertions.assertThat; public class UpdaterSelectorTest { Set<UpdaterCompatibility> updaterSelectors = new HashSet<>(); @Before public void setUp() throws Exception { updaterSelectors.add(new YoutubeUpdaterCompatibility()); updaterSelectors.add(new RssUpdaterCompatibility()); updaterSelectors.add(new BeInSportUpdaterCompatibility()); updaterSelectors.add(new CanalPlusUpdaterCompatibility()); updaterSelectors.add(new JeuxVideoFrCompatibility()); updaterSelectors.add(new JeuxVideoComCompatibility()); updaterSelectors.add(new ParleysCompatibility()); updaterSelectors.add(new PluzzCompatibility()); } @Test public void should_return_an_RssUpdater () { /* Given */ UpdaterSelector updaterSelector = new UpdaterSelector().setUpdaterCompatibilities(updaterSelectors); /* When */ Class updaterClass = updaterSelector.of("www.link.to.rss/feeds"); /* Then */ assertThat(updaterClass).isEqualTo(RSSUpdater.class); } @Test public void should_return_a_YoutubeUpdater () { /* Given */ UpdaterSelector updaterSelector = new UpdaterSelector().setUpdaterCompatibilities(updaterSelectors); /* When */ Class updaterClass = updaterSelector.of("http://www.youtube.com/user/fakeUser"); /* Then */ assertThat(updaterClass).isEqualTo(YoutubeUpdater.class); } @Test public void should_return_a_BeInSportUpdater () { /* Given */ UpdaterSelector updaterSelector = new UpdaterSelector().setUpdaterCompatibilities(updaterSelectors); /* When */ Class updaterClass = updaterSelector.of("http://www.beinsports.fr/replay/category/3361/name/lexpresso"); /* Then */ assertThat(updaterClass).isEqualTo(BeInSportsUpdater.class); } @Test public void should_return_a_CanalPlusUpdater() { /* Given */ UpdaterSelector updaterSelector = new UpdaterSelector().setUpdaterCompatibilities(updaterSelectors); /* When */ Class updaterClass = updaterSelector.of("http://www.canalplus.fr/show/for/dummies"); /* Then */ assertThat(updaterClass).isEqualTo(CanalPlusUpdater.class); } @Test public void should_return_a_JeuxVideoFrUpdater() { /* Given */ UpdaterSelector updaterSelector = new UpdaterSelector().setUpdaterCompatibilities(updaterSelectors); /* When */ Class updaterClass = updaterSelector.of("http://www.jeuxvideo.fr/show/for/dummies"); /* Then */ assertThat(updaterClass).isEqualTo(JeuxVideoFRUpdater.class); } @Test public void should_return_a_JeuxVideoComUpdater() { /* Given */ UpdaterSelector updaterSelector = new UpdaterSelector().setUpdaterCompatibilities(updaterSelectors); /* When */ Class updaterClass = updaterSelector.of("http://www.jeuxvideo.com/show/for/dummies"); /* Then */ assertThat(updaterClass).isEqualTo(JeuxVideoComUpdater.class); } @Test public void should_return_a_ParleysUpdater() { /* Given */ UpdaterSelector updaterSelector = new UpdaterSelector().setUpdaterCompatibilities(updaterSelectors); /* When */ Class updaterClass = updaterSelector.of("http://www.parleys.com/show/for/dummies"); /* Then */ assertThat(updaterClass).isEqualTo(ParleysUpdater.class); } @Test public void should_return_a_PluzzUpdater() { /* Given */ UpdaterSelector updaterSelector = new UpdaterSelector().setUpdaterCompatibilities(updaterSelectors); /* When */ Class updaterClass = updaterSelector.of("http://www.pluzz.francetv.fr/show/for/dummies"); /* Then */ assertThat(updaterClass).isEqualTo(PluzzUpdater.class); } @Test(expected = RuntimeException.class) public void should_reject_empty_url() { /* Given */ UpdaterSelector updaterSelector = new UpdaterSelector().setUpdaterCompatibilities(updaterSelectors); /* When */ updaterSelector.of(""); } }
Java
// Code generated by msgraph.go/gen DO NOT EDIT. package msgraph import "context" // type WorkbookFunctionsTanRequestBuilder struct{ BaseRequestBuilder } // Tan action undocumented func (b *WorkbookFunctionsRequestBuilder) Tan(reqObj *WorkbookFunctionsTanRequestParameter) *WorkbookFunctionsTanRequestBuilder { bb := &WorkbookFunctionsTanRequestBuilder{BaseRequestBuilder: b.BaseRequestBuilder} bb.BaseRequestBuilder.baseURL += "/tan" bb.BaseRequestBuilder.requestObject = reqObj return bb } // type WorkbookFunctionsTanRequest struct{ BaseRequest } // func (b *WorkbookFunctionsTanRequestBuilder) Request() *WorkbookFunctionsTanRequest { return &WorkbookFunctionsTanRequest{ BaseRequest: BaseRequest{baseURL: b.baseURL, client: b.client, requestObject: b.requestObject}, } } // func (r *WorkbookFunctionsTanRequest) Post(ctx context.Context) (resObj *WorkbookFunctionResult, err error) { err = r.JSONRequest(ctx, "POST", "", r.requestObject, &resObj) return }
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.4.2_11) on Sun Oct 15 22:02:30 CDT 2006 --> <TITLE> Overview (Hibernate API Documentation) </TITLE> <META NAME="keywords" CONTENT="Overview, Hibernate API Documentation (3.2.0.ga)"> <LINK REL ="stylesheet" TYPE="text/css" HREF="jdstyle.css" TITLE="Style"> </HEAD> <BODY BGCOLOR="white"> <TABLE BORDER="0" WIDTH="100%" SUMMARY=""> <TR> <TD NOWRAP><FONT size="+1" CLASS="FrameTitleFont"> <B></B></FONT></TD> </TR> </TABLE> <TABLE BORDER="0" WIDTH="100%" SUMMARY=""> <TR> <TD NOWRAP><FONT CLASS="FrameItemFont"><A HREF="allclasses-frame.html" target="packageFrame">All Classes</A></FONT> <P> <FONT size="+1" CLASS="FrameHeadingFont"> Packages</FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/package-frame.html" target="packageFrame">org.hibernate</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/action/package-frame.html" target="packageFrame">org.hibernate.action</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/bytecode/package-frame.html" target="packageFrame">org.hibernate.bytecode</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/bytecode/cglib/package-frame.html" target="packageFrame">org.hibernate.bytecode.cglib</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/bytecode/javassist/package-frame.html" target="packageFrame">org.hibernate.bytecode.javassist</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/bytecode/util/package-frame.html" target="packageFrame">org.hibernate.bytecode.util</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/cache/package-frame.html" target="packageFrame">org.hibernate.cache</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/cache/entry/package-frame.html" target="packageFrame">org.hibernate.cache.entry</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/cfg/package-frame.html" target="packageFrame">org.hibernate.cfg</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/classic/package-frame.html" target="packageFrame">org.hibernate.classic</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/collection/package-frame.html" target="packageFrame">org.hibernate.collection</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/connection/package-frame.html" target="packageFrame">org.hibernate.connection</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/context/package-frame.html" target="packageFrame">org.hibernate.context</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/criterion/package-frame.html" target="packageFrame">org.hibernate.criterion</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/dialect/package-frame.html" target="packageFrame">org.hibernate.dialect</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/dialect/function/package-frame.html" target="packageFrame">org.hibernate.dialect.function</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/dialect/lock/package-frame.html" target="packageFrame">org.hibernate.dialect.lock</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/engine/package-frame.html" target="packageFrame">org.hibernate.engine</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/engine/query/package-frame.html" target="packageFrame">org.hibernate.engine.query</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/engine/query/sql/package-frame.html" target="packageFrame">org.hibernate.engine.query.sql</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/engine/transaction/package-frame.html" target="packageFrame">org.hibernate.engine.transaction</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/event/package-frame.html" target="packageFrame">org.hibernate.event</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/event/def/package-frame.html" target="packageFrame">org.hibernate.event.def</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/exception/package-frame.html" target="packageFrame">org.hibernate.exception</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/hql/package-frame.html" target="packageFrame">org.hibernate.hql</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/hql/antlr/package-frame.html" target="packageFrame">org.hibernate.hql.antlr</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/hql/ast/package-frame.html" target="packageFrame">org.hibernate.hql.ast</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/hql/ast/exec/package-frame.html" target="packageFrame">org.hibernate.hql.ast.exec</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/hql/ast/tree/package-frame.html" target="packageFrame">org.hibernate.hql.ast.tree</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/hql/ast/util/package-frame.html" target="packageFrame">org.hibernate.hql.ast.util</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/hql/classic/package-frame.html" target="packageFrame">org.hibernate.hql.classic</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/id/package-frame.html" target="packageFrame">org.hibernate.id</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/id/insert/package-frame.html" target="packageFrame">org.hibernate.id.insert</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/impl/package-frame.html" target="packageFrame">org.hibernate.impl</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/intercept/package-frame.html" target="packageFrame">org.hibernate.intercept</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/intercept/cglib/package-frame.html" target="packageFrame">org.hibernate.intercept.cglib</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/intercept/javassist/package-frame.html" target="packageFrame">org.hibernate.intercept.javassist</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/jdbc/package-frame.html" target="packageFrame">org.hibernate.jdbc</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/jmx/package-frame.html" target="packageFrame">org.hibernate.jmx</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/loader/package-frame.html" target="packageFrame">org.hibernate.loader</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/loader/collection/package-frame.html" target="packageFrame">org.hibernate.loader.collection</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/loader/criteria/package-frame.html" target="packageFrame">org.hibernate.loader.criteria</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/loader/custom/package-frame.html" target="packageFrame">org.hibernate.loader.custom</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/loader/custom/sql/package-frame.html" target="packageFrame">org.hibernate.loader.custom.sql</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/loader/entity/package-frame.html" target="packageFrame">org.hibernate.loader.entity</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/loader/hql/package-frame.html" target="packageFrame">org.hibernate.loader.hql</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/lob/package-frame.html" target="packageFrame">org.hibernate.lob</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/mapping/package-frame.html" target="packageFrame">org.hibernate.mapping</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/metadata/package-frame.html" target="packageFrame">org.hibernate.metadata</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/param/package-frame.html" target="packageFrame">org.hibernate.param</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/persister/package-frame.html" target="packageFrame">org.hibernate.persister</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/persister/collection/package-frame.html" target="packageFrame">org.hibernate.persister.collection</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/persister/entity/package-frame.html" target="packageFrame">org.hibernate.persister.entity</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/pretty/package-frame.html" target="packageFrame">org.hibernate.pretty</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/property/package-frame.html" target="packageFrame">org.hibernate.property</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/proxy/package-frame.html" target="packageFrame">org.hibernate.proxy</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/proxy/dom4j/package-frame.html" target="packageFrame">org.hibernate.proxy.dom4j</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/proxy/map/package-frame.html" target="packageFrame">org.hibernate.proxy.map</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/proxy/pojo/package-frame.html" target="packageFrame">org.hibernate.proxy.pojo</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/proxy/pojo/cglib/package-frame.html" target="packageFrame">org.hibernate.proxy.pojo.cglib</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/proxy/pojo/javassist/package-frame.html" target="packageFrame">org.hibernate.proxy.pojo.javassist</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/secure/package-frame.html" target="packageFrame">org.hibernate.secure</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/sql/package-frame.html" target="packageFrame">org.hibernate.sql</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/stat/package-frame.html" target="packageFrame">org.hibernate.stat</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/tool/hbm2ddl/package-frame.html" target="packageFrame">org.hibernate.tool.hbm2ddl</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/tool/instrument/package-frame.html" target="packageFrame">org.hibernate.tool.instrument</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/tool/instrument/cglib/package-frame.html" target="packageFrame">org.hibernate.tool.instrument.cglib</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/tool/instrument/javassist/package-frame.html" target="packageFrame">org.hibernate.tool.instrument.javassist</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/transaction/package-frame.html" target="packageFrame">org.hibernate.transaction</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/transform/package-frame.html" target="packageFrame">org.hibernate.transform</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/tuple/package-frame.html" target="packageFrame">org.hibernate.tuple</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/tuple/component/package-frame.html" target="packageFrame">org.hibernate.tuple.component</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/tuple/entity/package-frame.html" target="packageFrame">org.hibernate.tuple.entity</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/type/package-frame.html" target="packageFrame">org.hibernate.type</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/usertype/package-frame.html" target="packageFrame">org.hibernate.usertype</A></FONT> <BR> <FONT CLASS="FrameItemFont"><A HREF="org/hibernate/util/package-frame.html" target="packageFrame">org.hibernate.util</A></FONT> <BR> </TD> </TR> </TABLE> <P> &nbsp; </BODY> </HTML>
Java
import optparse import pickle #converts urls to wiki_id parser = optparse.OptionParser() parser.add_option('-i','--input', dest = 'input_file', help = 'input_file') parser.add_option('-o','--output', dest = 'output_file', help = 'output_file') (options, args) = parser.parse_args() if options.input_file is None: options.input_file = raw_input('Enter input file:') if options.output_file is None: options.output_file = raw_input('Enter output file:') input_file = options.input_file output_file = options.output_file #define the dictionary url:wiki_id wiki_from_url_dict = {} with open('../../datasets/dbpedia/page_ids_en_2016.ttl','r') as f: for line in f: line = line.split(' ') if line[0] == '#': continue url = line[0] wiki_id_list = line[2].split('\"') wiki_id = wiki_id_list[1] print(url, wiki_id) wiki_from_url_dict[url] = int(wiki_id) output_file_write = open(output_file,'w') #iterate through the page links and turn urls into wiki_ids max_wiki_id = max(wiki_from_url_dict.values()) + 1 local_id = {} count = 0 with open(input_file) as page_links: for line in page_links: line = line.split(' ') if line[0] == '#': continue url_1 = line[0] url_2 = line[2] #if wiki_id not found, assign an id = max_wiki_id and increment max_wiki_id try: wiki_id1 = wiki_from_url_dict[url_1] #first entity has wiki_id try: wiki_id2 = wiki_from_url_dict[url_2] #first and second entities have wiki_ids except (KeyError, IndexError): #first entity has wiki_id, second entity doesn't try: #check if a local id has already been assigned wiki_id2 = local_id[url_2] except (KeyError, IndexError): wiki_id2 = max_wiki_id local_id[url_2] = wiki_id2 max_wiki_id +=1 except (KeyError, IndexError): #first entity doesn't have wiki_id try: wiki_id1 = local_id[url_1] except (KeyError, IndexError): wiki_id1 = max_wiki_id local_id[url_1] = wiki_id1 max_wiki_id += 1 try: #first entity doesn't have wiki_id, second entity has it wiki_id2 = wiki_from_url_dict[url_2] except (KeyError, IndexError): #neither first nor second entity have wiki_ids try: #check if a local id has already been assigned wiki_id2 = local_id[url_2] except (KeyError, IndexError): wiki_id2 = max_wiki_id local_id[url_2] = wiki_id2 max_wiki_id +=1 output_file_write.write('%d %d\n' %(wiki_id1,wiki_id2)) print count count += 1 output_file_write.close() pickle.dump(local_id,open('../../datasets/dbpedia/local_id_to_url_full_mapping_based.p','wb'))
Java
/* * #%L * fujion * %% * Copyright (C) 2021 Fujion Framework * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * #L% */ package org.fujion.highcharts; import org.fujion.ancillary.OptionMap; import org.fujion.ancillary.Options; import org.fujion.annotation.Option; import java.util.ArrayList; import java.util.List; /** * Base class for all plot types. * <p> * PlotOptions is a wrapper for config objects for each series type. The config objects for each * series can also be overridden for each series item as given in the series array. Configuration * options for the series are given in three levels. Options for all series in a chart are given in * the plotOptions.series object. Then options for all series of a specific type are given in the * plotOptions of that type, for example plotOptions.line. Next, options for one single series are * given in the series array. */ public abstract class PlotOptions extends Options { /** * The text identifier of the plot type. */ @Option(ignore = true) protected String type; /** * Allow this series' points to be selected by clicking on the markers, bars or pie slices. * Defaults to false. */ @Option public Boolean allowPointSelect; /** * Enable or disable the initial animation when a series is displayed. Since version 2.1, the * animation can be set as a configuration object. Please note that this option only applies to * the initial animation of the series itself. For other animations, see #chart.animation and * the animation parameter under the API methods. */ @Option public final AnimationOptions animation = new AnimationOptions(); /** * For some series, there is a limit that shuts down initial animation by default when the total * number of points in the chart is too high. For example, for a column chart and its * derivatives, animation doesn't run if there is more than 250 points totally. To disable this * cap, set animationLimit to Infinity. */ @Option public Integer animationLimit; /** * Set the point threshold for when a series should enter boost mode. Setting it to e.g. 2000 * will cause the series to enter boost mode when there are 2000 or more points in the series. * To disable boosting on the series, set the boostThreshold to 0. Setting it to 1 will force * boosting. Requires modules/boost.js. */ @Option public Integer boostThreshold; /** * A CSS class name to apply to the series' graphical elements. */ @Option public String className; /** * The main color or the series. In line type series it applies to the line and the point * markers unless otherwise specified. In bar type series it applies to the bars unless a color * is specified per point. The default value is pulled from the options.colors array. */ @Option public String color; /** * Styled mode only. A specific color index to use for the series, so its graphic * representations are given the class name highcharts-color-{n}. Defaults to undefined. */ @Option public Integer colorIndex; /** * When using automatic point colors pulled from the options.colors collection, this option * determines whether the chart should receive one color per series or one color per point. * Defaults to false. */ @Option public Boolean colorByPoint; /** * A series specific or series type specific color set to apply instead of the global colors * when colorByPoint is true. */ @Option public List<String> colors = new ArrayList<>(); /** * Polar charts only. Whether to connect the ends of a line series plot across the extremes. * Defaults to true. */ @Option public Boolean connectEnds; /** * Whether to connect a graph line across null points. Defaults to false. */ @Option public Boolean connectNulls; /** * When the series contains less points than the crop threshold, all points are drawn, event if * the points fall outside the visible plot area at the current zoom. The advantage of drawing * all points (including markers and columns), is that animation is performed on updates. On the * other hand, when the series contains more points than the crop threshold, the series data is * cropped to only contain points that fall within the plot area. The advantage of cropping away * invisible points is to increase performance on large series. . Defaults to 300. */ @Option public Double cropThreshold; /** * You can set the cursor to "pointer" if you have click events attached to the series, to * signal to the user that the points and lines can be clicked. Defaults to ''. */ @Option public String cursor; /** * A name for the dash style to use for the graph. Applies only to series type having a graph, * like line, spline, area and scatter in case it has a lineWidth. The value for the dashStyle * include: * <ul> * <li>Solid</li> * <li>ShortDash</li> * <li>ShortDot</li> * <li>ShortDashDot</li> * <li>ShortDashDotDot</li> * <li>Dot</li> * <li>Dash</li> * <li>LongDash</li> * <li>DashDot</li> * <li>LongDashDot</li> * <li>LongDashDotDot</li> * </ul> * Defaults to null. */ @Option public DashStyle dashStyle; /** * Options for data labels. * * @see DataLabelOptions */ @Option public final DataLabelOptions dataLabels = new DataLabelOptions(); /** * Requires the Accessibility module. A description of the series to add to the screen reader * information about the series. Defaults to undefined. */ @Option public String description; /** * Enable or disable the mouse tracking for a specific series. This includes point tooltips and * click events on graphs and points. For large datasets it improves performance. Defaults to * true. */ @Option public Boolean enableMouseTracking; /** * Requires the Accessibility module. By default, series are exposed to screen readers as * regions. By enabling this option, the series element itself will be exposed in the same way * as the data points. This is useful if the series is not used as a grouping entity in the * chart, but you still want to attach a description to the series. Defaults to undefined. */ @Option public Boolean exposeElementToA11y; /** * Determines whether the series should look for the nearest point in both dimensions or just * the x-dimension when hovering the series. Defaults to 'xy' for scatter series and 'x' for * most other series. If the data has duplicate x-values, it is recommended to set this to 'xy' * to allow hovering over all points. Applies only to series types using nearest neighbor search * (not direct hover) for tooltip. Defaults to x. */ @Option public String findNearestPointBy; /** * Whether to use the Y extremes of the total chart width or only the zoomed area when zooming * in on parts of the X axis. By default, the Y axis adjusts to the min and max of the visible * data. Cartesian series only. Defaults to false. */ @Option public Boolean getExtremesFromAll; /** * An id for the series. Defaults to null. */ @Option public String id; /** * An array specifying which option maps to which key in the data point array. This makes it * convenient to work with unstructured data arrays from different sources. Defaults to * undefined. */ @Option public final List<String> keys = new ArrayList<>(); /** * Text labels for the plot bands. */ @Option public final PlotLabelOptions label = new PlotLabelOptions(); /** * The line cap used for line ends and line joins on the graph. Defaults to round. */ @Option public String linecap; /** * Pixel with of the graph line. Defaults to 2. */ @Option public Integer lineWidth; /** * The id of another series to link to. Additionally, the value can be ":previous" to link to * the previous series. When two series are linked, only the first one appears in the legend. * Toggling the visibility of this also toggles the linked series. */ @Option public String linkedTo; /** * Options for point markers. * * @see MarkerOptions */ @Option public final MarkerOptions marker = new MarkerOptions(); /** * The color for the parts of the graph or points that are below the threshold. Defaults to * null. */ @Option public String negativeColor; /** * If no x values are given for the points in a series, pointInterval defines the interval of * the x values. For example, if a series contains one value every decade starting from year 0, * set pointInterval to 10. Defaults to 1. */ @Option public Double pointInterval; /** * On datetime series, this allows for setting the pointInterval to irregular time units, day, * month and year. A day is usually the same as 24 hours, but pointIntervalUnit also takes the * DST crossover into consideration when dealing with local time. Combine this option with * pointInterval to draw weeks, quarters, 6 months, 10 years etc. Please note that this options * applies to the series data, not the interval of the axis ticks, which is independent. * Defaults to undefined. */ @Option public String pointIntervalUnit; /** * Possible values: null, "on", "between". In a column chart, when pointPlacement is "on", the * point will not create any padding of the X axis. In a polar column chart this means that the * first column points directly north. If the pointPlacement is "between", the columns will be * laid out between ticks. This is useful for example for visualising an amount between two * points in time or in a certain sector of a polar chart. Defaults to null in cartesian charts, * "between" in polar charts. */ @Option public String pointPlacement; /** * If no x values are given for the points in a series, pointStart defines on what value to * start. For example, if a series contains one yearly value starting from 1945, set pointStart * to 1945. Defaults to 0. */ @Option public Double pointStart; /** * Whether to select the series initially. If showCheckbox is true, the checkbox next to the * series name will be checked for a selected series. Defaults to false. */ @Option public Boolean selected; /** * Boolean value whether to apply a drop shadow to the graph line. Optionally can be a * ShadowOptions object. Defaults to true. * * @see ShadowOptions */ @Option public Object shadow; /** * If true, a checkbox is displayed next to the legend item to allow selecting the series. The * state of the checkbox is determined by the selected option. Defaults to false. */ @Option public Boolean showCheckbox; /** * Whether to display this particular series or series type in the legend. Defaults to true. */ @Option public Boolean showInLegend; /** * If set to True, the accessibility module will skip past the points in this series for * keyboard navigation. Defaults to undefined. */ @Option public Boolean skipKeyboardNavigation; /** * When this is true, the series will not cause the Y axis to cross the zero plane (or threshold * option) unless the data actually crosses the plane. For example, if softThreshold is false, a * series of 0, 1, 2, 3 will make the Y axis show negative values according to the minPadding * option. If softThreshold is true, the Y axis starts at 0. Defaults to true. */ @Option public Boolean softThreshold; /** * Whether to stack the values of each series on top of each other. Possible values are null to * disable, "normal" to stack by value or "percent". Defaults to null. */ @Option public String stacking; /** * Whether to apply steps to the line. Possible values are left, center and right. Defaults to * undefined. */ @Option public AlignHorizontal step; /** * Sticky tracking of mouse events. When true, the mouseOut event on a series isn't triggered * until the mouse moves over another series, or out of the plot area. When false, the mouseOut * event on a series is triggered when the mouse leaves the area around the series' graph or * markers. This also implies the tooltip. When stickyTracking is false and tooltip.shared is * false, the tooltip will be hidden when moving the mouse between series. Defaults to true. */ @Option public Boolean stickyTracking; /** * The threshold, also called zero level or base level. For line type series this is only used * in conjunction with negativeColor. Defaults to 0. */ @Option public Double threshold; /** * A configuration object for the tooltip rendering of each single series. */ @Option public final TooltipOptions tooltip = new TooltipOptions(); /** * When a series contains a data array that is longer than this, only one dimensional arrays of * numbers, or two dimensional arrays with x and y values are allowed. Also, only the first * point is tested, and the rest are assumed to be the same format. This saves expensive data * checking and indexing in long series. Defaults to 1000. */ @Option public Integer turboThreshold; /** * Set the initial visibility of the series. Defaults to true. */ @Option public Boolean visible; /** * Defines the axis on which the zones are applied. Defaults to y. */ @Option public String zoneAxis; /** * An array defining zones within a series. Zones can be applied to the X axis, Y axis or Z axis * for bubbles, according to the zoneAxis option. In styled mode, the color zones are styled * with the .highcharts-zone-{n} class, or custom classed from the className option. */ @Option public final List<Zone> zones = new ArrayList<>(); /** * If type is not null, place options under a submap indexed by the type id. */ @Override public OptionMap toMap() { OptionMap map = super.toMap(); if (type != null) { OptionMap newMap = new OptionMap(); newMap.put(type, map); map = newMap; } return map; } }
Java
describe('app.components.SaveProfileModal', function() { beforeEach(function () { module('app.components', 'ui.bootstrap', 'gettext'); }); describe('service', function () { var callbackObject; var saveSpy; var doNotSaveSpy; var cancelSpy; beforeEach(function () { bard.inject('SaveProfileModal', '$rootScope', '$document'); callbackObject = { save: function () {}, doNotSave: function () {}, cancel: function () {}, } saveSpy = sinon.spy(callbackObject, "save"); doNotSaveSpy = sinon.spy(callbackObject, "doNotSave"); cancelSpy = sinon.spy(callbackObject, "cancel"); }); xit('should show the modal', function () { var modal = SaveProfileModal.showModal(callbackObject.save, callbackObject.doNotSave, callbackObject.cancel); $rootScope.$digest(); var saveDialog = $document.find('.save-profile-modal'); expect(saveDialog.length).to.eq(1); var saveButton = $document.find('.save-profile-modal .btn.btn-primary'); expect(saveButton.length).to.eq(1); eventFire(saveButton[0], 'click'); $rootScope.$digest(); expect(saveSpy).to.have.been.called; var closeButtons = $document.find('.save-profile-modal .btn.btn-default'); expect(closeButtons.length).to.eq(2); eventFire(closeButtons[0], 'click'); $rootScope.$digest(); expect(cancelSpy).to.have.been.called; eventFire(closeButtons[1], 'click'); $rootScope.$digest(); expect(doNotSaveSpy).to.have.been.called; }); }); });
Java
package grok.core; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.auto.value.AutoValue; @AutoValue public abstract class Image { @JsonCreator public static Image of(@JsonProperty("id") String id, @JsonProperty("title") String title, @JsonProperty("url") String url) { return builder().id(id) .title(title) .url(url) .build(); } public static Builder builder() { return new AutoValue_Image.Builder(); } Image() {} @JsonProperty public abstract String id(); @JsonProperty public abstract String title(); @JsonProperty public abstract String url(); @AutoValue.Builder public abstract static class Builder { Builder() {} public abstract Builder id(String value); @JsonProperty public abstract Builder title(String value); @JsonProperty public abstract Builder url(String value); public abstract Image build(); } }
Java
package com.mgaetan89.showsrage.fragment; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Arrays; import java.util.Collection; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; @RunWith(Parameterized.class) public class ShowsFragment_OnQueryTextChangeTest { @Parameterized.Parameter(0) public String newText; private ShowsFragment fragment; @Before public void before() { this.fragment = spy(new ShowsFragment()); } @Test public void onQueryTextChange() { try { assertTrue(this.fragment.onQueryTextChange(this.newText)); } catch (NullPointerException exception) { // LocalBroadcastManager.getInstance(Context) returns null in tests } verify(this.fragment).sendFilterMessage(); } @After public void after() { this.fragment = null; } @Parameterized.Parameters public static Collection<Object[]> data() { return Arrays.asList(new Object[][]{ {null}, {""}, {" "}, {"Search Query"}, }); } }
Java
# frozen_string_literal: true class CreateSnapshotAttributeValues < ActiveRecord::Migration[4.2] def change create_table :snapshot_attribute_values do |t| t.integer :snapshot_id, null: false t.integer :attribute_value_id, null: false t.timestamps null: false end end end
Java
/*************************************************************** * * Copyright (C) 1990-2007, Condor Team, Computer Sciences Department, * University of Wisconsin-Madison, WI. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ***************************************************************/ #include "condor_common.h" #include "condor_config.h" #include "condor_state.h" #include "condor_api.h" #include "status_types.h" #include "totals.h" #include "get_daemon_name.h" #include "daemon.h" #include "dc_collector.h" #include "extArray.h" #include "sig_install.h" #include "string_list.h" #include "condor_string.h" // for strnewp() #include "match_prefix.h" // is_arg_colon_prefix #include "print_wrapped_text.h" #include "error_utils.h" #include "condor_distribution.h" #include "condor_version.h" #include <vector> #include <sstream> #include <iostream> using std::vector; using std::string; using std::stringstream; struct SortSpec { string arg; string keyAttr; string keyExprAttr; ExprTree* expr; ExprTree* exprLT; ExprTree* exprEQ; SortSpec(): arg(), keyAttr(), keyExprAttr(), expr(NULL), exprLT(NULL), exprEQ(NULL) {} ~SortSpec() { if (NULL != expr) delete expr; if (NULL != exprLT) delete exprLT; if (NULL != exprEQ) delete exprEQ; } SortSpec(const SortSpec& src): expr(NULL), exprLT(NULL), exprEQ(NULL) { *this = src; } SortSpec& operator=(const SortSpec& src) { if (this == &src) return *this; arg = src.arg; keyAttr = src.keyAttr; keyExprAttr = src.keyExprAttr; if (NULL != expr) delete expr; expr = src.expr->Copy(); if (NULL != exprLT) delete exprLT; exprLT = src.exprLT->Copy(); if (NULL != exprEQ) delete exprEQ; exprEQ = src.exprEQ->Copy(); return *this; } }; // global variables AttrListPrintMask pm; printmask_headerfooter_t pmHeadFoot = STD_HEADFOOT; List<const char> pm_head; // The list of headings for the mask entries std::vector<GroupByKeyInfo> group_by_keys; // TJ 8.1.5 for future use, ignored for now. bool explicit_format = false; bool using_print_format = false; // hack for now so we can get standard totals when using -print-format bool disable_user_print_files = false; // allow command line to defeat use of default user print files. const char *DEFAULT= "<default>"; DCCollector* pool = NULL; AdTypes type = (AdTypes) -1; ppOption ppStyle = PP_NOTSET; ppOption ppTotalStyle = PP_NOTSET; // used when setting PP_CUSTOM to keep track of how to do totals. int wantOnlyTotals = 0; int summarySize = -1; bool expert = false; bool wide_display = false; // when true, don't truncate field data bool invalid_fields_empty = false; // when true, print "" instead of "[?]" for missing data Mode mode = MODE_NOTSET; const char * mode_constraint = NULL; // constraint set by mode int diagnose = 0; char* direct = NULL; char* statistics = NULL; char* genericType = NULL; CondorQuery *query; char buffer[1024]; char *myName; vector<SortSpec> sortSpecs; bool noSort = false; // set to true to disable sorting entirely bool javaMode = false; bool vmMode = false; bool absentMode = false; char *target = NULL; const char * ads_file = NULL; // read classads from this file instead of querying them from the collector ClassAd *targetAd = NULL; ArgList projList; // Attributes that we want the server to send us // instantiate templates // function declarations void usage (); void firstPass (int, char *[]); void secondPass (int, char *[]); void prettyPrint(ClassAdList &, TrackTotals *); int matchPrefix(const char *, const char *, int min_len); int lessThanFunc(AttrList*,AttrList*,void*); int customLessThanFunc(AttrList*,AttrList*,void*); static bool read_classad_file(const char *filename, ClassAdList &classads, const char * constr); extern "C" int SetSyscalls (int) {return 0;} extern void setPPstyle (ppOption, int, const char *); extern void setType (const char *, int, const char *); extern void setMode (Mode, int, const char *); int main (int argc, char *argv[]) { #if !defined(WIN32) install_sig_handler(SIGPIPE, (SIG_HANDLER)SIG_IGN ); #endif // initialize to read from config file myDistro->Init( argc, argv ); myName = argv[0]; config(); dprintf_config_tool_on_error(0); // The arguments take two passes to process --- the first pass // figures out the mode, after which we can instantiate the required // query object. We add implied constraints from the command line in // the second pass. firstPass (argc, argv); // if the mode has not been set, it is STARTD_NORMAL if (mode == MODE_NOTSET) { setMode (MODE_STARTD_NORMAL, 0, DEFAULT); } // instantiate query object if (!(query = new CondorQuery (type))) { dprintf_WriteOnErrorBuffer(stderr, true); fprintf (stderr, "Error: Out of memory\n"); exit (1); } // if a first-pass setMode set a mode_constraint, apply it now to the query object if (mode_constraint && ! explicit_format) { query->addANDConstraint(mode_constraint); } // set pretty print style implied by the type of entity being queried // but do it with default priority, so that explicitly requested options // can override it switch (type) { #ifdef HAVE_EXT_POSTGRESQL case QUILL_AD: setPPstyle(PP_QUILL_NORMAL, 0, DEFAULT); break; #endif /* HAVE_EXT_POSTGRESQL */ case DEFRAG_AD: setPPstyle(PP_GENERIC_NORMAL, 0, DEFAULT); break; case STARTD_AD: setPPstyle(PP_STARTD_NORMAL, 0, DEFAULT); break; case SCHEDD_AD: setPPstyle(PP_SCHEDD_NORMAL, 0, DEFAULT); break; case MASTER_AD: setPPstyle(PP_MASTER_NORMAL, 0, DEFAULT); break; case CKPT_SRVR_AD: setPPstyle(PP_CKPT_SRVR_NORMAL, 0, DEFAULT); break; case COLLECTOR_AD: setPPstyle(PP_COLLECTOR_NORMAL, 0, DEFAULT); break; case STORAGE_AD: setPPstyle(PP_STORAGE_NORMAL, 0, DEFAULT); break; case NEGOTIATOR_AD: setPPstyle(PP_NEGOTIATOR_NORMAL, 0, DEFAULT); break; case GRID_AD: setPPstyle(PP_GRID_NORMAL, 0, DEFAULT); break; case GENERIC_AD: setPPstyle(PP_GENERIC, 0, DEFAULT); break; case ANY_AD: setPPstyle(PP_ANY_NORMAL, 0, DEFAULT); break; default: setPPstyle(PP_VERBOSE, 0, DEFAULT); } // set the constraints implied by the mode switch (mode) { #ifdef HAVE_EXT_POSTGRESQL case MODE_QUILL_NORMAL: #endif /* HAVE_EXT_POSTGRESQL */ case MODE_DEFRAG_NORMAL: case MODE_STARTD_NORMAL: case MODE_MASTER_NORMAL: case MODE_CKPT_SRVR_NORMAL: case MODE_SCHEDD_NORMAL: case MODE_SCHEDD_SUBMITTORS: case MODE_COLLECTOR_NORMAL: case MODE_NEGOTIATOR_NORMAL: case MODE_STORAGE_NORMAL: case MODE_GENERIC_NORMAL: case MODE_ANY_NORMAL: case MODE_GRID_NORMAL: case MODE_HAD_NORMAL: break; case MODE_OTHER: // tell the query object what the type we're querying is query->setGenericQueryType(genericType); free(genericType); genericType = NULL; break; case MODE_STARTD_AVAIL: // For now, -avail shows you machines avail to anyone. sprintf (buffer, "%s == \"%s\"", ATTR_STATE, state_to_string(unclaimed_state)); if (diagnose) { printf ("Adding constraint [%s]\n", buffer); } query->addORConstraint (buffer); break; case MODE_STARTD_RUN: sprintf (buffer, "%s == \"%s\"", ATTR_STATE, state_to_string(claimed_state)); if (diagnose) { printf ("Adding constraint [%s]\n", buffer); } query->addORConstraint (buffer); break; case MODE_STARTD_COD: sprintf (buffer, "%s > 0", ATTR_NUM_COD_CLAIMS ); if (diagnose) { printf ("Adding constraint [%s]\n", buffer); } query->addORConstraint (buffer); break; default: break; } if(javaMode) { sprintf( buffer, "%s == TRUE", ATTR_HAS_JAVA ); if (diagnose) { printf ("Adding constraint [%s]\n", buffer); } query->addANDConstraint (buffer); projList.AppendArg(ATTR_HAS_JAVA); projList.AppendArg(ATTR_JAVA_MFLOPS); projList.AppendArg(ATTR_JAVA_VENDOR); projList.AppendArg(ATTR_JAVA_VERSION); } if(absentMode) { sprintf( buffer, "%s == TRUE", ATTR_ABSENT ); if (diagnose) { printf( "Adding constraint %s\n", buffer ); } query->addANDConstraint( buffer ); projList.AppendArg( ATTR_ABSENT ); projList.AppendArg( ATTR_LAST_HEARD_FROM ); projList.AppendArg( ATTR_CLASSAD_LIFETIME ); } if(vmMode) { sprintf( buffer, "%s == TRUE", ATTR_HAS_VM); if (diagnose) { printf ("Adding constraint [%s]\n", buffer); } query->addANDConstraint (buffer); projList.AppendArg(ATTR_VM_TYPE); projList.AppendArg(ATTR_VM_MEMORY); projList.AppendArg(ATTR_VM_NETWORKING); projList.AppendArg(ATTR_VM_NETWORKING_TYPES); projList.AppendArg(ATTR_VM_HARDWARE_VT); projList.AppendArg(ATTR_VM_AVAIL_NUM); projList.AppendArg(ATTR_VM_ALL_GUEST_MACS); projList.AppendArg(ATTR_VM_ALL_GUEST_IPS); projList.AppendArg(ATTR_VM_GUEST_MAC); projList.AppendArg(ATTR_VM_GUEST_IP); } // second pass: add regular parameters and constraints if (diagnose) { printf ("----------\n"); } secondPass (argc, argv); // initialize the totals object if (ppStyle == PP_CUSTOM && using_print_format) { if (pmHeadFoot & HF_NOSUMMARY) ppTotalStyle = PP_CUSTOM; } else { ppTotalStyle = ppStyle; } TrackTotals totals(ppTotalStyle); // fetch the query QueryResult q; if ((mode == MODE_STARTD_NORMAL) && (ppStyle == PP_STARTD_NORMAL)) { projList.AppendArg("Name"); projList.AppendArg("Machine"); projList.AppendArg("Opsys"); projList.AppendArg("Arch"); projList.AppendArg("State"); projList.AppendArg("Activity"); projList.AppendArg("LoadAvg"); projList.AppendArg("Memory"); projList.AppendArg("ActvtyTime"); projList.AppendArg("MyCurrentTime"); projList.AppendArg("EnteredCurrentActivity"); } else if( ppStyle == PP_VERBOSE ) { // Remove everything from the projection list if we're displaying // the "long form" of the ads. projList.Clear(); } if( projList.Count() > 0 ) { char **attr_list = projList.GetStringArray(); query->setDesiredAttrs(attr_list); deleteStringArray(attr_list); } // if diagnose was requested, just print the query ad if (diagnose) { ClassAd queryAd; // print diagnostic information about inferred internal state setMode ((Mode) 0, 0, NULL); setType (NULL, 0, NULL); setPPstyle ((ppOption) 0, 0, DEFAULT); printf ("----------\n"); q = query->getQueryAd (queryAd); fPrintAd (stdout, queryAd); printf ("----------\n"); fprintf (stderr, "Result of making query ad was: %d\n", q); exit (1); } // Address (host:port) is taken from requested pool, if given. char* addr = (NULL != pool) ? pool->addr() : NULL; Daemon* requested_daemon = pool; // If we're in "direct" mode, then we attempt to locate the daemon // associated with the requested subsystem (here encoded by value of mode) // In this case the host:port of pool (if given) denotes which // pool is being consulted if( direct ) { Daemon *d = NULL; switch( mode ) { case MODE_MASTER_NORMAL: d = new Daemon( DT_MASTER, direct, addr ); break; case MODE_STARTD_NORMAL: case MODE_STARTD_AVAIL: case MODE_STARTD_RUN: case MODE_STARTD_COD: d = new Daemon( DT_STARTD, direct, addr ); break; #ifdef HAVE_EXT_POSTGRESQL case MODE_QUILL_NORMAL: d = new Daemon( DT_QUILL, direct, addr ); break; #endif /* HAVE_EXT_POSTGRESQL */ case MODE_SCHEDD_NORMAL: case MODE_SCHEDD_SUBMITTORS: d = new Daemon( DT_SCHEDD, direct, addr ); break; case MODE_NEGOTIATOR_NORMAL: d = new Daemon( DT_NEGOTIATOR, direct, addr ); break; case MODE_CKPT_SRVR_NORMAL: case MODE_COLLECTOR_NORMAL: case MODE_LICENSE_NORMAL: case MODE_STORAGE_NORMAL: case MODE_GENERIC_NORMAL: case MODE_ANY_NORMAL: case MODE_OTHER: case MODE_GRID_NORMAL: case MODE_HAD_NORMAL: // These have to go to the collector, anyway. break; default: fprintf( stderr, "Error: Illegal mode %d\n", mode ); exit( 1 ); break; } // Here is where we actually override 'addr', if we can obtain // address of the requested daemon/subsys. If it can't be // located, then fail with error msg. // 'd' will be null (unset) if mode is one of above that must go to // collector (MODE_ANY_NORMAL, MODE_COLLECTOR_NORMAL, etc) if (NULL != d) { if( d->locate() ) { addr = d->addr(); requested_daemon = d; } else { const char* id = d->idStr(); if (NULL == id) id = d->name(); dprintf_WriteOnErrorBuffer(stderr, true); if (NULL == id) id = "daemon"; fprintf(stderr, "Error: Failed to locate %s\n", id); fprintf(stderr, "%s\n", d->error()); exit( 1 ); } } } ClassAdList result; CondorError errstack; if (NULL != ads_file) { MyString req; // query requirements q = query->getRequirements(req); const char * constraint = req.empty() ? NULL : req.c_str(); if (read_classad_file(ads_file, result, constraint)) { q = Q_OK; } } else if (NULL != addr) { // this case executes if pool was provided, or if in "direct" mode with // subsystem that corresponds to a daemon (above). // Here 'addr' represents either the host:port of requested pool, or // alternatively the host:port of daemon associated with requested subsystem (direct mode) q = query->fetchAds (result, addr, &errstack); } else { // otherwise obtain list of collectors and submit query that way CollectorList * collectors = CollectorList::create(); q = collectors->query (*query, result, &errstack); delete collectors; } // if any error was encountered during the query, report it and exit if (Q_OK != q) { dprintf_WriteOnErrorBuffer(stderr, true); // we can always provide these messages: fprintf( stderr, "Error: %s\n", getStrQueryResult(q) ); fprintf( stderr, "%s\n", errstack.getFullText(true).c_str() ); if ((NULL != requested_daemon) && ((Q_NO_COLLECTOR_HOST == q) || (requested_daemon->type() == DT_COLLECTOR))) { // Specific long message if connection to collector failed. const char* fullhost = requested_daemon->fullHostname(); if (NULL == fullhost) fullhost = "<unknown_host>"; const char* daddr = requested_daemon->addr(); if (NULL == daddr) daddr = "<unknown>"; char info[1000]; sprintf(info, "%s (%s)", fullhost, daddr); printNoCollectorContact( stderr, info, !expert ); } else if ((NULL != requested_daemon) && (Q_COMMUNICATION_ERROR == q)) { // more helpful message for failure to connect to some daemon/subsys const char* id = requested_daemon->idStr(); if (NULL == id) id = requested_daemon->name(); if (NULL == id) id = "daemon"; const char* daddr = requested_daemon->addr(); if (NULL == daddr) daddr = "<unknown>"; fprintf(stderr, "Error: Failed to contact %s at %s\n", id, daddr); } // fail exit (1); } if (noSort) { // do nothing } else if (sortSpecs.empty()) { // default classad sorting result.Sort((SortFunctionType)lessThanFunc); } else { // User requested custom sorting expressions: // insert attributes related to custom sorting result.Open(); while (ClassAd* ad = result.Next()) { for (vector<SortSpec>::iterator ss(sortSpecs.begin()); ss != sortSpecs.end(); ++ss) { ss->expr->SetParentScope(ad); classad::Value v; ss->expr->Evaluate(v); stringstream vs; // This will properly render all supported value types, // including undefined and error, although current semantic // pre-filters classads where sort expressions are undef/err: vs << ((v.IsStringValue())?"\"":"") << v << ((v.IsStringValue())?"\"":""); ad->AssignExpr(ss->keyAttr.c_str(), vs.str().c_str()); // Save the full expr in case user wants to examine on output: ad->AssignExpr(ss->keyExprAttr.c_str(), ss->arg.c_str()); } } result.Open(); result.Sort((SortFunctionType)customLessThanFunc); } // output result prettyPrint (result, &totals); delete query; return 0; } const CustomFormatFnTable * getCondorStatusPrintFormats(); int set_status_print_mask_from_stream ( const char * streamid, bool is_filename, const char ** pconstraint) { std::string where_expr; std::string messages; StringList attrs; SimpleInputStream * pstream = NULL; *pconstraint = NULL; FILE *file = NULL; if (MATCH == strcmp("-", streamid)) { pstream = new SimpleFileInputStream(stdin, false); } else if (is_filename) { file = safe_fopen_wrapper_follow(streamid, "r"); if (file == NULL) { fprintf(stderr, "Can't open select file: %s\n", streamid); return -1; } pstream = new SimpleFileInputStream(file, true); } else { pstream = new StringLiteralInputStream(streamid); } ASSERT(pstream); int err = SetAttrListPrintMaskFromStream( *pstream, *getCondorStatusPrintFormats(), pm, pmHeadFoot, group_by_keys, where_expr, attrs, messages); delete pstream; pstream = NULL; if ( ! err) { if ( ! where_expr.empty()) { *pconstraint = pm.store(where_expr.c_str()); //if ( ! validate_constraint(*pconstraint)) { // formatstr_cat(messages, "WHERE expression is not valid: %s\n", *pconstraint); //} } // convert projection list into the format that condor status likes. because programmers. attrs.rewind(); const char * attr; while ((attr = attrs.next())) { projList.AppendArg(attr); } } if ( ! messages.empty()) { fprintf(stderr, "%s", messages.c_str()); } return err; } static bool read_classad_file(const char *filename, ClassAdList &classads, const char * constr) { bool success = false; FILE* file = safe_fopen_wrapper_follow(filename, "r"); if (file == NULL) { fprintf(stderr, "Can't open file of job ads: %s\n", filename); return false; } else { CondorClassAdFileParseHelper parse_helper("\n"); for (;;) { ClassAd* classad = new ClassAd(); int error; bool is_eof; int cAttrs = classad->InsertFromFile(file, is_eof, error, &parse_helper); bool include_classad = cAttrs > 0 && error >= 0; if (include_classad && constr) { classad::Value val; if (classad->EvaluateExpr(constr,val)) { if ( ! val.IsBooleanValueEquiv(include_classad)) { include_classad = false; } } } if (include_classad) { classads.Insert(classad); } else { delete classad; } if (is_eof) { success = true; break; } if (error < 0) { success = false; break; } } fclose(file); } return success; } void usage () { fprintf (stderr,"Usage: %s [help-opt] [query-opt] [display-opt] " "[custom-opts ...] [name ...]\n" " where [help-opt] is one of\n" "\t-help\t\t\tPrint this screen and exit\n" "\t-version\t\tPrint HTCondor version and exit\n" "\t-diagnose\t\tPrint out query ad without performing query\n" " and [query-opt] is one of\n" "\t-absent\t\t\tPrint information about absent resources\n" "\t-avail\t\t\tPrint information about available resources\n" "\t-ckptsrvr\t\tDisplay checkpoint server attributes\n" "\t-claimed\t\tPrint information about claimed resources\n" "\t-cod\t\t\tDisplay Computing On Demand (COD) jobs\n" "\t-collector\t\tDisplay collector daemon attributes\n" "\t-debug\t\t\tDisplay debugging info to console\n" "\t-defrag\t\t\tDisplay status of defrag daemon\n" "\t-direct <host>\t\tGet attributes directly from the given daemon\n" "\t-java\t\t\tDisplay Java-capable hosts\n" "\t-vm\t\t\tDisplay VM-capable hosts\n" "\t-license\t\tDisplay attributes of licenses\n" "\t-master\t\t\tDisplay daemon master attributes\n" "\t-pool <name>\t\tGet information from collector <name>\n" "\t-ads <file>\t\tGet information from <file>\n" "\t-grid\t\t\tDisplay grid resources\n" "\t-run\t\t\tSame as -claimed [deprecated]\n" #ifdef HAVE_EXT_POSTGRESQL "\t-quill\t\t\tDisplay attributes of quills\n" #endif /* HAVE_EXT_POSTGRESQL */ "\t-schedd\t\t\tDisplay attributes of schedds\n" "\t-server\t\t\tDisplay important attributes of resources\n" "\t-startd\t\t\tDisplay resource attributes\n" "\t-generic\t\tDisplay attributes of 'generic' ads\n" "\t-subsystem <type>\tDisplay classads of the given type\n" "\t-negotiator\t\tDisplay negotiator attributes\n" "\t-storage\t\tDisplay network storage resources\n" "\t-any\t\t\tDisplay any resources\n" "\t-state\t\t\tDisplay state of resources\n" "\t-submitters\t\tDisplay information about request submitters\n" // "\t-statistics <set>:<n>\tDisplay statistics for <set> at level <n>\n" // "\t\t\t\tsee STATISTICS_TO_PUBLISH for valid <set> and level values\n" // "\t-world\t\t\tDisplay all pools reporting to UW collector\n" " and [display-opt] is one of\n" "\t-long\t\t\tDisplay entire classads\n" "\t-sort <expr>\t\tSort entries by expressions. 'no' disables sorting\n" "\t-total\t\t\tDisplay totals only\n" "\t-verbose\t\tSame as -long\n" "\t-wide\t\t\tdon't truncate data to fit in 80 columns.\n" "\t-xml\t\t\tDisplay entire classads, but in XML\n" "\t-attributes X,Y,...\tAttributes to show in -xml or -long \n" "\t-expert\t\t\tDisplay shorter error messages\n" " and [custom-opts ...] are one or more of\n" "\t-constraint <const>\tAdd constraint on classads\n" "\t-format <fmt> <attr>\tRegister display format and attribute\n" "\t-autoformat:[V,ntlh] <attr> [attr2 [attr3 ...]]\t Print attr(s) with automatic formatting\n" "\t\tV\tUse %%V formatting\n" "\t\t,\tComma separated (default is space separated)\n" "\t\tt\tTab separated\n" "\t\tn\tNewline after each attribute\n" "\t\tl\tLabel each value\n" "\t\th\tHeadings\n" "\t-target filename\tIf -format or -af is used, the option target classad\n", myName); } void firstPass (int argc, char *argv[]) { int had_pool_error = 0; int had_direct_error = 0; int had_statistics_error = 0; //bool explicit_mode = false; const char * pcolon = NULL; // Process arguments: there are dependencies between them // o -l/v and -serv are mutually exclusive // o -sub, -avail and -run are mutually exclusive // o -pool and -entity may be used at most once // o since -c can be processed only after the query has been instantiated, // constraints are added on the second pass for (int i = 1; i < argc; i++) { if (matchPrefix (argv[i], "-avail", 3)) { setMode (MODE_STARTD_AVAIL, i, argv[i]); } else if (matchPrefix (argv[i], "-pool", 2)) { if( pool ) { delete pool; had_pool_error = 1; } i++; if( ! argv[i] ) { fprintf( stderr, "%s: -pool requires a hostname as an argument.\n", myName ); if (!expert) { printf("\n"); print_wrapped_text("Extra Info: The hostname should be the central " "manager of the Condor pool you wish to work with.", stderr); printf("\n"); } fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } pool = new DCCollector( argv[i] ); if( !pool->addr() ) { dprintf_WriteOnErrorBuffer(stderr, true); fprintf( stderr, "Error: %s\n", pool->error() ); if (!expert) { printf("\n"); print_wrapped_text("Extra Info: You specified a hostname for a pool " "(the -pool argument). That should be the Internet " "host name for the central manager of the pool, " "but it does not seem to " "be a valid hostname. (The DNS lookup failed.)", stderr); } exit( 1 ); } } else if (is_dash_arg_prefix (argv[i], "ads", 2)) { if( !argv[i+1] ) { fprintf( stderr, "%s: -ads requires a filename argument\n", myName ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } i += 1; ads_file = argv[i]; } else if (matchPrefix (argv[i], "-format", 2)) { setPPstyle (PP_CUSTOM, i, argv[i]); if( !argv[i+1] || !argv[i+2] ) { fprintf( stderr, "%s: -format requires two other arguments\n", myName ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } i += 2; explicit_format = true; } else if (*argv[i] == '-' && (is_arg_colon_prefix(argv[i]+1, "autoformat", &pcolon, 5) || is_arg_colon_prefix(argv[i]+1, "af", &pcolon, 2)) ) { // make sure we have at least one more argument if ( !argv[i+1] || *(argv[i+1]) == '-') { fprintf( stderr, "Error: Argument %s requires " "at last one attribute parameter\n", argv[i] ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } explicit_format = true; setPPstyle (PP_CUSTOM, i, argv[i]); while (argv[i+1] && *(argv[i+1]) != '-') { ++i; } // if autoformat list ends in a '-' without any characters after it, just eat the arg and keep going. if (i+1 < argc && '-' == (argv[i+1])[0] && 0 == (argv[i+1])[1]) { ++i; } } else if (is_dash_arg_colon_prefix(argv[i], "print-format", &pcolon, 2)) { if ( (i+1 >= argc) || (*(argv[i+1]) == '-' && (argv[i+1])[1] != 0)) { fprintf( stderr, "Error: Argument -print-format requires a filename argument\n"); exit( 1 ); } explicit_format = true; ++i; // eat the next argument. // we can't fully parse the print format argument until the second pass, so we are done for now. } else if (matchPrefix (argv[i], "-wide", 3)) { wide_display = true; // when true, don't truncate field data //invalid_fields_empty = true; } else if (matchPrefix (argv[i], "-target", 5)) { if( !argv[i+1] ) { fprintf( stderr, "%s: -target requires one additional argument\n", myName ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } i += 1; target = argv[i]; FILE *targetFile = safe_fopen_wrapper_follow(target, "r"); int iseof, iserror, empty; targetAd = new ClassAd(targetFile, "\n\n", iseof, iserror, empty); fclose(targetFile); } else if (matchPrefix (argv[i], "-constraint", 4)) { // can add constraints on second pass only i++; if( ! argv[i] ) { fprintf( stderr, "%s: -constraint requires another argument\n", myName ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } } else if (matchPrefix (argv[i], "-direct", 4)) { if( direct ) { free( direct ); had_direct_error = 1; } i++; if( ! argv[i] ) { fprintf( stderr, "%s: -direct requires another argument\n", myName ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } direct = strdup( argv[i] ); } else if (matchPrefix (argv[i], "-diagnose", 4)) { diagnose = 1; } else if (matchPrefix (argv[i], "-debug", 3)) { // dprintf to console dprintf_set_tool_debug("TOOL", 0); } else if (matchPrefix (argv[i], "-defrag", 4)) { setMode (MODE_DEFRAG_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-help", 2)) { usage (); exit (0); } else if (matchPrefix (argv[i], "-long", 2) || matchPrefix (argv[i],"-verbose", 3)) { setPPstyle (PP_VERBOSE, i, argv[i]); } else if (matchPrefix (argv[i],"-xml", 2)){ setPPstyle (PP_XML, i, argv[i]); } else if (matchPrefix (argv[i],"-attributes", 3)){ if( !argv[i+1] ) { fprintf( stderr, "%s: -attributes requires one additional argument\n", myName ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } i++; } else if (matchPrefix (argv[i], "-run", 2) || matchPrefix(argv[i], "-claimed", 3)) { setMode (MODE_STARTD_RUN, i, argv[i]); } else if( matchPrefix (argv[i], "-cod", 4) ) { setMode (MODE_STARTD_COD, i, argv[i]); } else if (matchPrefix (argv[i], "-java", 2)) { /*explicit_mode =*/ javaMode = true; } else if (matchPrefix (argv[i], "-absent", 3)) { /*explicit_mode =*/ absentMode = true; } else if (matchPrefix (argv[i], "-vm", 3)) { /*explicit_mode =*/ vmMode = true; } else if (matchPrefix (argv[i], "-server", 3)) { setPPstyle (PP_STARTD_SERVER, i, argv[i]); } else if (matchPrefix (argv[i], "-state", 5)) { setPPstyle (PP_STARTD_STATE, i, argv[i]); } else if (matchPrefix (argv[i], "-statistics", 6)) { if( statistics ) { free( statistics ); had_statistics_error = 1; } i++; if( ! argv[i] ) { fprintf( stderr, "%s: -statistics requires another argument\n", myName ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } statistics = strdup( argv[i] ); } else if (matchPrefix (argv[i], "-startd", 5)) { setMode (MODE_STARTD_NORMAL,i, argv[i]); } else if (matchPrefix (argv[i], "-schedd", 3)) { setMode (MODE_SCHEDD_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-grid", 2)) { setMode (MODE_GRID_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-subsystem", 5)) { i++; if( !argv[i] ) { fprintf( stderr, "%s: -subsystem requires another argument\n", myName ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } if (matchPrefix (argv[i], "schedd", 6)) { setMode (MODE_SCHEDD_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "startd", 6)) { setMode (MODE_STARTD_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "quill", 5)) { setMode (MODE_QUILL_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "negotiator", 10)) { setMode (MODE_NEGOTIATOR_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "master", 6)) { setMode (MODE_MASTER_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "collector", 9)) { setMode (MODE_COLLECTOR_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "generic", 7)) { setMode (MODE_GENERIC_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "had", 3)) { setMode (MODE_HAD_NORMAL, i, argv[i]); } else if (*argv[i] == '-') { fprintf(stderr, "%s: -subsystem requires another argument\n", myName); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit(1); } else { genericType = strdup(argv[i]); setMode (MODE_OTHER, i, argv[i]); } } else #ifdef HAVE_EXT_POSTGRESQL if (matchPrefix (argv[i], "-quill", 2)) { setMode (MODE_QUILL_NORMAL, i, argv[i]); } else #endif /* HAVE_EXT_POSTGRESQL */ if (matchPrefix (argv[i], "-license", 3)) { setMode (MODE_LICENSE_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-storage", 4)) { setMode (MODE_STORAGE_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-negotiator", 2)) { setMode (MODE_NEGOTIATOR_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-generic", 3)) { setMode (MODE_GENERIC_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-any", 3)) { setMode (MODE_ANY_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-sort", 3)) { i++; if( ! argv[i] ) { fprintf( stderr, "%s: -sort requires another argument\n", myName ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } if (MATCH == strcasecmp(argv[i], "false") || MATCH == strcasecmp(argv[i], "0") || MATCH == strcasecmp(argv[i], "no") || MATCH == strcasecmp(argv[i], "none")) { noSort = true; continue; } int jsort = sortSpecs.size(); SortSpec ss; ExprTree* sortExpr = NULL; if (ParseClassAdRvalExpr(argv[i], sortExpr)) { fprintf(stderr, "Error: Parse error of: %s\n", argv[i]); exit(1); } ss.expr = sortExpr; ss.arg = argv[i]; formatstr(ss.keyAttr, "CondorStatusSortKey%d", jsort); formatstr(ss.keyExprAttr, "CondorStatusSortKeyExpr%d", jsort); string exprString; formatstr(exprString, "MY.%s < TARGET.%s", ss.keyAttr.c_str(), ss.keyAttr.c_str()); if (ParseClassAdRvalExpr(exprString.c_str(), sortExpr)) { fprintf(stderr, "Error: Parse error of: %s\n", exprString.c_str()); exit(1); } ss.exprLT = sortExpr; formatstr(exprString, "MY.%s == TARGET.%s", ss.keyAttr.c_str(), ss.keyAttr.c_str()); if (ParseClassAdRvalExpr(exprString.c_str(), sortExpr)) { fprintf(stderr, "Error: Parse error of: %s\n", exprString.c_str()); exit(1); } ss.exprEQ = sortExpr; sortSpecs.push_back(ss); // the silent constraint TARGET.%s =!= UNDEFINED is added // as a customAND constraint on the second pass } else if (matchPrefix (argv[i], "-submitters", 5)) { setMode (MODE_SCHEDD_SUBMITTORS, i, argv[i]); } else if (matchPrefix (argv[i], "-master", 2)) { setMode (MODE_MASTER_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-collector", 4)) { setMode (MODE_COLLECTOR_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-world", 2)) { setMode (MODE_COLLECTOR_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-ckptsrvr", 3)) { setMode (MODE_CKPT_SRVR_NORMAL, i, argv[i]); } else if (matchPrefix (argv[i], "-total", 2)) { wantOnlyTotals = 1; explicit_format = true; } else if (matchPrefix(argv[i], "-expert", 2)) { expert = true; } else if (matchPrefix(argv[i], "-version", 4)) { printf( "%s\n%s\n", CondorVersion(), CondorPlatform() ); exit(0); } else if (*argv[i] == '-') { fprintf (stderr, "Error: Unknown option %s\n", argv[i]); usage (); exit (1); } } if( had_pool_error ) { fprintf( stderr, "Warning: Multiple -pool arguments given, using \"%s\"\n", pool->name() ); } if( had_direct_error ) { fprintf( stderr, "Warning: Multiple -direct arguments given, using \"%s\"\n", direct ); } if( had_statistics_error ) { fprintf( stderr, "Warning: Multiple -statistics arguments given, using \"%s\"\n", statistics ); } } void secondPass (int argc, char *argv[]) { const char * pcolon = NULL; char *daemonname; for (int i = 1; i < argc; i++) { // omit parameters which qualify switches if( matchPrefix(argv[i],"-pool", 2) || matchPrefix(argv[i],"-direct", 4) ) { i++; continue; } if( matchPrefix(argv[i],"-subsystem", 5) ) { i++; continue; } if (matchPrefix (argv[i], "-format", 2)) { pm.registerFormat (argv[i+1], argv[i+2]); StringList attributes; ClassAd ad; if(!ad.GetExprReferences(argv[i+2],attributes,attributes)){ fprintf( stderr, "Error: Parse error of: %s\n", argv[i+2]); exit(1); } attributes.rewind(); char const *s; while( (s=attributes.next()) ) { projList.AppendArg(s); } if (diagnose) { printf ("Arg %d --- register format [%s] for [%s]\n", i, argv[i+1], argv[i+2]); } i += 2; continue; } if (*argv[i] == '-' && (is_arg_colon_prefix(argv[i]+1, "autoformat", &pcolon, 5) || is_arg_colon_prefix(argv[i]+1, "af", &pcolon, 2)) ) { // make sure we have at least one more argument if ( !argv[i+1] || *(argv[i+1]) == '-') { fprintf( stderr, "Error: Argument %s requires " "at last one attribute parameter\n", argv[i] ); fprintf( stderr, "Use \"%s -help\" for details\n", myName ); exit( 1 ); } bool flabel = false; bool fCapV = false; bool fheadings = false; const char * pcolpre = " "; const char * pcolsux = NULL; if (pcolon) { ++pcolon; while (*pcolon) { switch (*pcolon) { case ',': pcolsux = ","; break; case 'n': pcolsux = "\n"; break; case 't': pcolpre = "\t"; break; case 'l': flabel = true; break; case 'V': fCapV = true; break; case 'h': fheadings = true; break; } ++pcolon; } } pm.SetAutoSep(NULL, pcolpre, pcolsux, "\n"); while (argv[i+1] && *(argv[i+1]) != '-') { ++i; ClassAd ad; StringList attributes; if(!ad.GetExprReferences(argv[i],attributes,attributes)){ fprintf( stderr, "Error: Parse error of: %s\n", argv[i]); exit(1); } attributes.rewind(); char const *s; while ((s = attributes.next())) { projList.AppendArg(s); } MyString lbl = ""; int wid = 0; int opts = FormatOptionNoTruncate; if (fheadings || pm_head.Length() > 0) { const char * hd = fheadings ? argv[i] : "(expr)"; wid = 0 - (int)strlen(hd); opts = FormatOptionAutoWidth | FormatOptionNoTruncate; pm_head.Append(hd); } else if (flabel) { lbl.formatstr("%s = ", argv[i]); wid = 0; opts = 0; } lbl += fCapV ? "%V" : "%v"; if (diagnose) { printf ("Arg %d --- register format [%s] width=%d, opt=0x%x for [%s]\n", i, lbl.Value(), wid, opts, argv[i]); } pm.registerFormat(lbl.Value(), wid, opts, argv[i]); } // if autoformat list ends in a '-' without any characters after it, just eat the arg and keep going. if (i+1 < argc && '-' == (argv[i+1])[0] && 0 == (argv[i+1])[1]) { ++i; } continue; } if (is_dash_arg_colon_prefix(argv[i], "print-format", &pcolon, 2)) { if ( (i+1 >= argc) || (*(argv[i+1]) == '-' && (argv[i+1])[1] != 0)) { fprintf( stderr, "Error: Argument -print-format requires a filename argument\n"); exit( 1 ); } // hack allow -pr ! to disable use of user-default print format files. if (MATCH == strcmp(argv[i+1], "!")) { ++i; disable_user_print_files = true; continue; } ppTotalStyle = ppStyle; setPPstyle (PP_CUSTOM, i, argv[i]); ++i; // skip to the next argument. if (set_status_print_mask_from_stream(argv[i], true, &mode_constraint) < 0) { fprintf(stderr, "Error: invalid select file %s\n", argv[i]); exit (1); } if (mode_constraint) { query->addANDConstraint(mode_constraint); } using_print_format = true; // so we can hack totals. continue; } if (matchPrefix (argv[i], "-target", 5)) { i++; continue; } if (is_dash_arg_prefix(argv[i], "ads", 2)) { ++i; continue; } if( matchPrefix(argv[i], "-sort", 3) ) { i++; if ( ! noSort) { sprintf( buffer, "%s =!= UNDEFINED", argv[i] ); query->addANDConstraint( buffer ); } continue; } if (matchPrefix (argv[i], "-statistics", 6)) { i += 2; sprintf(buffer,"STATISTICS_TO_PUBLISH = \"%s\"", statistics); if (diagnose) { printf ("[%s]\n", buffer); } query->addExtraAttribute(buffer); continue; } if (matchPrefix (argv[i], "-attributes", 3) ) { // parse attributes to be selected and split them along "," StringList more_attrs(argv[i+1],","); char const *s; more_attrs.rewind(); while( (s=more_attrs.next()) ) { projList.AppendArg(s); } i++; continue; } // figure out what the other parameters should do if (*argv[i] != '-') { // display extra information for diagnosis if (diagnose) { printf ("Arg %d (%s) --- adding constraint", i, argv[i]); } if( !(daemonname = get_daemon_name(argv[i])) ) { if ( (mode==MODE_SCHEDD_SUBMITTORS) && strchr(argv[i],'@') ) { // For a submittor query, it is possible that the // hostname is really a UID_DOMAIN. And there is // no requirement that UID_DOMAIN actually have // an inverse lookup in DNS... so if get_daemon_name() // fails with a fully qualified submittor lookup, just // use what we are given and do not flag an error. daemonname = strnewp(argv[i]); } else { dprintf_WriteOnErrorBuffer(stderr, true); fprintf( stderr, "%s: unknown host %s\n", argv[0], get_host_part(argv[i]) ); exit(1); } } switch (mode) { case MODE_DEFRAG_NORMAL: case MODE_STARTD_NORMAL: case MODE_STARTD_COD: #ifdef HAVE_EXT_POSTGRESQL case MODE_QUILL_NORMAL: #endif /* HAVE_EXT_POSTGRESQL */ case MODE_SCHEDD_NORMAL: case MODE_SCHEDD_SUBMITTORS: case MODE_MASTER_NORMAL: case MODE_COLLECTOR_NORMAL: case MODE_CKPT_SRVR_NORMAL: case MODE_NEGOTIATOR_NORMAL: case MODE_STORAGE_NORMAL: case MODE_ANY_NORMAL: case MODE_GENERIC_NORMAL: case MODE_STARTD_AVAIL: case MODE_OTHER: case MODE_GRID_NORMAL: case MODE_HAD_NORMAL: sprintf(buffer,"(%s==\"%s\") || (%s==\"%s\")", ATTR_NAME, daemonname, ATTR_MACHINE, daemonname ); if (diagnose) { printf ("[%s]\n", buffer); } query->addORConstraint (buffer); break; case MODE_STARTD_RUN: sprintf (buffer,"%s == \"%s\"",ATTR_REMOTE_USER,argv[i]); if (diagnose) { printf ("[%s]\n", buffer); } query->addORConstraint (buffer); break; default: fprintf(stderr,"Error: Don't know how to process %s\n",argv[i]); } delete [] daemonname; daemonname = NULL; } else if (matchPrefix (argv[i], "-constraint", 4)) { if (diagnose) { printf ("[%s]\n", argv[i+1]); } query->addANDConstraint (argv[i+1]); i++; } } } int matchPrefix (const char *s1, const char *s2, int min_len) { int lenS1 = strlen (s1); int lenS2 = strlen (s2); int len = (lenS1 < lenS2) ? lenS1 : lenS2; if(len < min_len) { return 0; } return (strncmp (s1, s2, len) == 0); } int lessThanFunc(AttrList *ad1, AttrList *ad2, void *) { MyString buf1; MyString buf2; int val; if( !ad1->LookupString(ATTR_OPSYS, buf1) || !ad2->LookupString(ATTR_OPSYS, buf2) ) { buf1 = ""; buf2 = ""; } val = strcmp( buf1.Value(), buf2.Value() ); if( val ) { return (val < 0); } if( !ad1->LookupString(ATTR_ARCH, buf1) || !ad2->LookupString(ATTR_ARCH, buf2) ) { buf1 = ""; buf2 = ""; } val = strcmp( buf1.Value(), buf2.Value() ); if( val ) { return (val < 0); } if( !ad1->LookupString(ATTR_MACHINE, buf1) || !ad2->LookupString(ATTR_MACHINE, buf2) ) { buf1 = ""; buf2 = ""; } val = strcmp( buf1.Value(), buf2.Value() ); if( val ) { return (val < 0); } if (!ad1->LookupString(ATTR_NAME, buf1) || !ad2->LookupString(ATTR_NAME, buf2)) return 0; return ( strcmp( buf1.Value(), buf2.Value() ) < 0 ); } int customLessThanFunc( AttrList *ad1, AttrList *ad2, void *) { classad::Value lt_result; bool val; for (unsigned i = 0; i < sortSpecs.size(); ++i) { if (EvalExprTree(sortSpecs[i].exprLT, ad1, ad2, lt_result) && lt_result.IsBooleanValue(val) ) { if( val ) { return 1; } else { if (EvalExprTree( sortSpecs[i].exprEQ, ad1, ad2, lt_result ) && ( !lt_result.IsBooleanValue(val) || !val )){ return 0; } } } else { return 0; } } return 0; }
Java
# # Copyright (C) 2007-2011 OpenWrt.org # # This is free software, licensed under the GNU General Public License v2. # See /LICENSE for more information. # include $(TOPDIR)/rules.mk PKG_NAME:=tcpdump PKG_VERSION:=4.2.1 PKG_RELEASE:=3 PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE_URL:=http://www.tcpdump.org/release/ \ http://ftp.gwdg.de/pub/misc/tcpdump/ \ http://www.at.tcpdump.org/ \ http://www.br.tcpdump.org/ PKG_MD5SUM:=c202878c6db054767b52651041b9e60e PKG_BUILD_DIR:=$(BUILD_DIR)/$(PKG_NAME)-$(BUILD_VARIANT)/$(PKG_NAME)-$(PKG_VERSION) PKG_BUILD_PARALLEL:=1 PKG_MAINTAINER:=Felix Fietkau <nbd@openwrt.org> PKG_INSTALL:=1 include $(INCLUDE_DIR)/package.mk define Package/tcpdump/default SECTION:=net CATEGORY:=Network DEPENDS:=+libpcap TITLE:=Network monitoring and data acquisition tool URL:=http://www.tcpdump.org/ endef define Package/tcpdump $(Package/tcpdump/default) VARIANT:=full endef define Package/tcpdump-mini $(Package/tcpdump/default) TITLE+= (minimal version) VARIANT:=mini endef CONFIGURE_ARGS += \ --without-crypto ifeq ($(CONFIG_IPV6),y) CONFIGURE_ARGS += \ --enable-ipv6 endif TARGET_CFLAGS += -ffunction-sections -fdata-sections TARGET_LDFLAGS += -Wl,--gc-sections CONFIGURE_VARS += \ BUILD_CC="$(TARGET_CC)" \ HOSTCC="$(HOSTCC)" \ td_cv_buggygetaddrinfo="no" \ ac_cv_linux_vers=$(LINUX_VERSION) \ ac_cv_header_rpc_rpcent_h=no \ ac_cv_lib_rpc_main=no \ ac_cv_path_PCAP_CONFIG="" MAKE_FLAGS := ifeq ($(BUILD_VARIANT),mini) TARGET_CFLAGS += -DTCPDUMP_MINI CONFIGURE_ARGS += --disable-smb MAKE_FLAGS += TCPDUMP_MINI=1 endif MAKE_FLAGS += \ CCOPT="$(TARGET_CFLAGS)" INCLS="-I. $(TARGET_CPPFLAGS)" define Package/tcpdump/install $(INSTALL_DIR) $(1)/usr/sbin $(INSTALL_BIN) $(PKG_INSTALL_DIR)/usr/sbin/tcpdump $(1)/usr/sbin/ endef Package/tcpdump-mini/install = $(Package/tcpdump/install) $(eval $(call BuildPackage,tcpdump)) $(eval $(call BuildPackage,tcpdump-mini))
Java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package entities; import java.io.Serializable; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.Lob; import javax.persistence.ManyToOne; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.Table; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import javax.xml.bind.annotation.XmlRootElement; /** * * @author Maarten De Weerdt */ @Entity @Table(name = "Product") @XmlRootElement @NamedQueries({ @NamedQuery(name = "Product.findAll", query = "SELECT p FROM Product p") , @NamedQuery(name = "Product.findById", query = "SELECT p FROM Product p WHERE p.id = :id") , @NamedQuery(name = "Product.findByPrijs", query = "SELECT p FROM Product p WHERE p.prijs = :prijs") , @NamedQuery(name = "Product.findByCategorie", query = "SELECT p FROM Product p WHERE p.categorieNaam = :categorieNaam") }) public class Product implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Basic(optional = false) @Column(name = "ID") private Integer id; @Basic(optional = false) @NotNull @Lob @Size(min = 1, max = 65535) @Column(name = "Naam") private String naam; @Basic(optional = false) @NotNull @Lob @Size(min = 1, max = 65535) @Column(name = "Omschrijving") private String omschrijving; @Basic(optional = false) @NotNull @Column(name = "Prijs") private double prijs; @Basic(optional = false) @NotNull @Lob @Size(min = 1, max = 65535) @Column(name = "Afbeelding") private String afbeelding; @Lob @Size(max = 65535) @Column(name = "Informatie") private String informatie; @JoinColumn(name = "CategorieNaam", referencedColumnName = "CategorieNaam") @ManyToOne(optional = false) private Categorie categorieNaam; public Product() { } public Product(Integer id) { this.id = id; } public Product(Integer id, String naam, String omschrijving, double prijs, String afbeelding) { this.id = id; this.naam = naam; this.omschrijving = omschrijving; this.prijs = prijs; this.afbeelding = afbeelding; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getNaam() { return naam; } public void setNaam(String naam) { this.naam = naam; } public String getOmschrijving() { return omschrijving; } public void setOmschrijving(String omschrijving) { this.omschrijving = omschrijving; } public double getPrijs() { return prijs; } public void setPrijs(double prijs) { this.prijs = prijs; } public String getAfbeelding() { return afbeelding; } public void setAfbeelding(String afbeelding) { this.afbeelding = afbeelding; } public String getInformatie() { return informatie; } public void setInformatie(String informatie) { this.informatie = informatie; } public Categorie getCategorieNaam() { return categorieNaam; } public void setCategorieNaam(Categorie categorieNaam) { this.categorieNaam = categorieNaam; } @Override public int hashCode() { int hash = 0; hash += (id != null ? id.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set if (!(object instanceof Product)) { return false; } Product other = (Product) object; if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { return false; } return true; } @Override public String toString() { return "entities.Product[ id=" + id + " ]"; } }
Java
/* * Copyright (C) 2018 The Dagger Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dagger.internal.codegen.validation; import static com.google.auto.common.MoreTypes.asDeclared; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Predicates.in; import static com.google.common.collect.Collections2.transform; import static com.google.common.collect.Iterables.getOnlyElement; import static dagger.internal.codegen.base.ComponentAnnotation.rootComponentAnnotation; import static dagger.internal.codegen.base.DiagnosticFormatting.stripCommonTypePrefixes; import static dagger.internal.codegen.base.Formatter.INDENT; import static dagger.internal.codegen.base.Scopes.getReadableSource; import static dagger.internal.codegen.base.Scopes.scopesOf; import static dagger.internal.codegen.base.Scopes.singletonScope; import static dagger.internal.codegen.base.Util.reentrantComputeIfAbsent; import static dagger.internal.codegen.extension.DaggerStreams.toImmutableSet; import static dagger.internal.codegen.extension.DaggerStreams.toImmutableSetMultimap; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; import static javax.tools.Diagnostic.Kind.ERROR; import com.google.auto.common.MoreElements; import com.google.auto.common.MoreTypes; import com.google.common.base.Equivalence.Wrapper; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.Multimaps; import com.google.common.collect.Sets; import dagger.internal.codegen.binding.ComponentCreatorDescriptor; import dagger.internal.codegen.binding.ComponentDescriptor; import dagger.internal.codegen.binding.ComponentRequirement; import dagger.internal.codegen.binding.ComponentRequirement.NullPolicy; import dagger.internal.codegen.binding.ContributionBinding; import dagger.internal.codegen.binding.ErrorMessages; import dagger.internal.codegen.binding.ErrorMessages.ComponentCreatorMessages; import dagger.internal.codegen.binding.MethodSignatureFormatter; import dagger.internal.codegen.binding.ModuleDescriptor; import dagger.internal.codegen.compileroption.CompilerOptions; import dagger.internal.codegen.compileroption.ValidationType; import dagger.internal.codegen.langmodel.DaggerElements; import dagger.internal.codegen.langmodel.DaggerTypes; import dagger.model.Scope; import java.util.ArrayDeque; import java.util.Collection; import java.util.Deque; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.StringJoiner; import javax.inject.Inject; import javax.lang.model.element.Element; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.ExecutableType; import javax.lang.model.type.TypeMirror; import javax.tools.Diagnostic; /** * Reports errors in the component hierarchy. * * <ul> * <li>Validates scope hierarchy of component dependencies and subcomponents. * <li>Reports errors if there are component dependency cycles. * <li>Reports errors if any abstract modules have non-abstract instance binding methods. * <li>Validates component creator types. * </ul> */ // TODO(dpb): Combine with ComponentHierarchyValidator. public final class ComponentDescriptorValidator { private final DaggerElements elements; private final DaggerTypes types; private final CompilerOptions compilerOptions; private final MethodSignatureFormatter methodSignatureFormatter; private final ComponentHierarchyValidator componentHierarchyValidator; @Inject ComponentDescriptorValidator( DaggerElements elements, DaggerTypes types, CompilerOptions compilerOptions, MethodSignatureFormatter methodSignatureFormatter, ComponentHierarchyValidator componentHierarchyValidator) { this.elements = elements; this.types = types; this.compilerOptions = compilerOptions; this.methodSignatureFormatter = methodSignatureFormatter; this.componentHierarchyValidator = componentHierarchyValidator; } public ValidationReport<TypeElement> validate(ComponentDescriptor component) { ComponentValidation validation = new ComponentValidation(component); validation.visitComponent(component); validation.report(component).addSubreport(componentHierarchyValidator.validate(component)); return validation.buildReport(); } private final class ComponentValidation { final ComponentDescriptor rootComponent; final Map<ComponentDescriptor, ValidationReport.Builder<TypeElement>> reports = new LinkedHashMap<>(); ComponentValidation(ComponentDescriptor rootComponent) { this.rootComponent = checkNotNull(rootComponent); } /** Returns a report that contains all validation messages found during traversal. */ ValidationReport<TypeElement> buildReport() { ValidationReport.Builder<TypeElement> report = ValidationReport.about(rootComponent.typeElement()); reports.values().forEach(subreport -> report.addSubreport(subreport.build())); return report.build(); } /** Returns the report builder for a (sub)component. */ private ValidationReport.Builder<TypeElement> report(ComponentDescriptor component) { return reentrantComputeIfAbsent( reports, component, descriptor -> ValidationReport.about(descriptor.typeElement())); } private void reportComponentItem( Diagnostic.Kind kind, ComponentDescriptor component, String message) { report(component) .addItem(message, kind, component.typeElement(), component.annotation().annotation()); } private void reportComponentError(ComponentDescriptor component, String error) { reportComponentItem(ERROR, component, error); } void visitComponent(ComponentDescriptor component) { validateDependencyScopes(component); validateComponentDependencyHierarchy(component); validateModules(component); validateCreators(component); component.childComponents().forEach(this::visitComponent); } /** Validates that component dependencies do not form a cycle. */ private void validateComponentDependencyHierarchy(ComponentDescriptor component) { validateComponentDependencyHierarchy(component, component.typeElement(), new ArrayDeque<>()); } /** Recursive method to validate that component dependencies do not form a cycle. */ private void validateComponentDependencyHierarchy( ComponentDescriptor component, TypeElement dependency, Deque<TypeElement> dependencyStack) { if (dependencyStack.contains(dependency)) { // Current component has already appeared in the component chain. StringBuilder message = new StringBuilder(); message.append(component.typeElement().getQualifiedName()); message.append(" contains a cycle in its component dependencies:\n"); dependencyStack.push(dependency); appendIndentedComponentsList(message, dependencyStack); dependencyStack.pop(); reportComponentItem( compilerOptions.scopeCycleValidationType().diagnosticKind().get(), component, message.toString()); } else { rootComponentAnnotation(dependency) .ifPresent( componentAnnotation -> { dependencyStack.push(dependency); for (TypeElement nextDependency : componentAnnotation.dependencies()) { validateComponentDependencyHierarchy( component, nextDependency, dependencyStack); } dependencyStack.pop(); }); } } /** * Validates that among the dependencies are at most one scoped dependency, that there are no * cycles within the scoping chain, and that singleton components have no scoped dependencies. */ private void validateDependencyScopes(ComponentDescriptor component) { ImmutableSet<Scope> scopes = component.scopes(); ImmutableSet<TypeElement> scopedDependencies = scopedTypesIn( component .dependencies() .stream() .map(ComponentRequirement::typeElement) .collect(toImmutableSet())); if (!scopes.isEmpty()) { Scope singletonScope = singletonScope(elements); // Dagger 1.x scope compatibility requires this be suppress-able. if (compilerOptions.scopeCycleValidationType().diagnosticKind().isPresent() && scopes.contains(singletonScope)) { // Singleton is a special-case representing the longest lifetime, and therefore // @Singleton components may not depend on scoped components if (!scopedDependencies.isEmpty()) { StringBuilder message = new StringBuilder( "This @Singleton component cannot depend on scoped components:\n"); appendIndentedComponentsList(message, scopedDependencies); reportComponentItem( compilerOptions.scopeCycleValidationType().diagnosticKind().get(), component, message.toString()); } } else if (scopedDependencies.size() > 1) { // Scoped components may depend on at most one scoped component. StringBuilder message = new StringBuilder(); for (Scope scope : scopes) { message.append(getReadableSource(scope)).append(' '); } message .append(component.typeElement().getQualifiedName()) .append(" depends on more than one scoped component:\n"); appendIndentedComponentsList(message, scopedDependencies); reportComponentError(component, message.toString()); } else { // Dagger 1.x scope compatibility requires this be suppress-able. if (!compilerOptions.scopeCycleValidationType().equals(ValidationType.NONE)) { validateDependencyScopeHierarchy( component, component.typeElement(), new ArrayDeque<>(), new ArrayDeque<>()); } } } else { // Scopeless components may not depend on scoped components. if (!scopedDependencies.isEmpty()) { StringBuilder message = new StringBuilder(component.typeElement().getQualifiedName()) .append(" (unscoped) cannot depend on scoped components:\n"); appendIndentedComponentsList(message, scopedDependencies); reportComponentError(component, message.toString()); } } } private void validateModules(ComponentDescriptor component) { for (ModuleDescriptor module : component.modules()) { if (module.moduleElement().getModifiers().contains(Modifier.ABSTRACT)) { for (ContributionBinding binding : module.bindings()) { if (binding.requiresModuleInstance()) { report(component).addError(abstractModuleHasInstanceBindingMethodsError(module)); break; } } } } } private String abstractModuleHasInstanceBindingMethodsError(ModuleDescriptor module) { String methodAnnotations; switch (module.kind()) { case MODULE: methodAnnotations = "@Provides"; break; case PRODUCER_MODULE: methodAnnotations = "@Provides or @Produces"; break; default: throw new AssertionError(module.kind()); } return String.format( "%s is abstract and has instance %s methods. Consider making the methods static or " + "including a non-abstract subclass of the module instead.", module.moduleElement(), methodAnnotations); } private void validateCreators(ComponentDescriptor component) { if (!component.creatorDescriptor().isPresent()) { // If no builder, nothing to validate. return; } ComponentCreatorDescriptor creator = component.creatorDescriptor().get(); ComponentCreatorMessages messages = ErrorMessages.creatorMessagesFor(creator.annotation()); // Requirements for modules and dependencies that the creator can set Set<ComponentRequirement> creatorModuleAndDependencyRequirements = creator.moduleAndDependencyRequirements(); // Modules and dependencies the component requires Set<ComponentRequirement> componentModuleAndDependencyRequirements = component.dependenciesAndConcreteModules(); // Requirements that the creator can set that don't match any requirements that the component // actually has. Set<ComponentRequirement> inapplicableRequirementsOnCreator = Sets.difference( creatorModuleAndDependencyRequirements, componentModuleAndDependencyRequirements); DeclaredType container = asDeclared(creator.typeElement().asType()); if (!inapplicableRequirementsOnCreator.isEmpty()) { Collection<Element> excessElements = Multimaps.filterKeys( creator.unvalidatedRequirementElements(), in(inapplicableRequirementsOnCreator)) .values(); String formatted = excessElements.stream() .map(element -> formatElement(element, container)) .collect(joining(", ", "[", "]")); report(component) .addError(String.format(messages.extraSetters(), formatted), creator.typeElement()); } // Component requirements that the creator must be able to set Set<ComponentRequirement> mustBePassed = Sets.filter( componentModuleAndDependencyRequirements, input -> input.nullPolicy(elements, types).equals(NullPolicy.THROW)); // Component requirements that the creator must be able to set, but can't Set<ComponentRequirement> missingRequirements = Sets.difference(mustBePassed, creatorModuleAndDependencyRequirements); if (!missingRequirements.isEmpty()) { report(component) .addError( String.format( messages.missingSetters(), missingRequirements.stream().map(ComponentRequirement::type).collect(toList())), creator.typeElement()); } // Validate that declared creator requirements (modules, dependencies) have unique types. ImmutableSetMultimap<Wrapper<TypeMirror>, Element> declaredRequirementsByType = Multimaps.filterKeys( creator.unvalidatedRequirementElements(), creatorModuleAndDependencyRequirements::contains) .entries().stream() .collect( toImmutableSetMultimap(entry -> entry.getKey().wrappedType(), Entry::getValue)); declaredRequirementsByType .asMap() .forEach( (typeWrapper, elementsForType) -> { if (elementsForType.size() > 1) { TypeMirror type = typeWrapper.get(); // TODO(cgdecker): Attach this error message to the factory method rather than // the component type if the elements are factory method parameters AND the // factory method is defined by the factory type itself and not by a supertype. report(component) .addError( String.format( messages.multipleSettersForModuleOrDependencyType(), type, transform( elementsForType, element -> formatElement(element, container))), creator.typeElement()); } }); // TODO(cgdecker): Duplicate binding validation should handle the case of multiple elements // that set the same bound-instance Key, but validating that here would make it fail faster // for subcomponents. } private String formatElement(Element element, DeclaredType container) { // TODO(cgdecker): Extract some or all of this to another class? // But note that it does different formatting for parameters than // DaggerElements.elementToString(Element). switch (element.getKind()) { case METHOD: return methodSignatureFormatter.format( MoreElements.asExecutable(element), Optional.of(container)); case PARAMETER: return formatParameter(MoreElements.asVariable(element), container); default: // This method shouldn't be called with any other type of element. throw new AssertionError(); } } private String formatParameter(VariableElement parameter, DeclaredType container) { // TODO(cgdecker): Possibly leave the type (and annotations?) off of the parameters here and // just use their names, since the type will be redundant in the context of the error message. StringJoiner joiner = new StringJoiner(" "); parameter.getAnnotationMirrors().stream().map(Object::toString).forEach(joiner::add); TypeMirror parameterType = resolveParameterType(parameter, container); return joiner .add(stripCommonTypePrefixes(parameterType.toString())) .add(parameter.getSimpleName()) .toString(); } private TypeMirror resolveParameterType(VariableElement parameter, DeclaredType container) { ExecutableElement method = MoreElements.asExecutable(parameter.getEnclosingElement()); int parameterIndex = method.getParameters().indexOf(parameter); ExecutableType methodType = MoreTypes.asExecutable(types.asMemberOf(container, method)); return methodType.getParameterTypes().get(parameterIndex); } /** * Validates that scopes do not participate in a scoping cycle - that is to say, scoped * components are in a hierarchical relationship terminating with Singleton. * * <p>As a side-effect, this means scoped components cannot have a dependency cycle between * themselves, since a component's presence within its own dependency path implies a cyclical * relationship between scopes. However, cycles in component dependencies are explicitly checked * in {@link #validateComponentDependencyHierarchy(ComponentDescriptor)}. */ private void validateDependencyScopeHierarchy( ComponentDescriptor component, TypeElement dependency, Deque<ImmutableSet<Scope>> scopeStack, Deque<TypeElement> scopedDependencyStack) { ImmutableSet<Scope> scopes = scopesOf(dependency); if (stackOverlaps(scopeStack, scopes)) { scopedDependencyStack.push(dependency); // Current scope has already appeared in the component chain. StringBuilder message = new StringBuilder(); message.append(component.typeElement().getQualifiedName()); message.append(" depends on scoped components in a non-hierarchical scope ordering:\n"); appendIndentedComponentsList(message, scopedDependencyStack); if (compilerOptions.scopeCycleValidationType().diagnosticKind().isPresent()) { reportComponentItem( compilerOptions.scopeCycleValidationType().diagnosticKind().get(), component, message.toString()); } scopedDependencyStack.pop(); } else { // TODO(beder): transitively check scopes of production components too. rootComponentAnnotation(dependency) .filter(componentAnnotation -> !componentAnnotation.isProduction()) .ifPresent( componentAnnotation -> { ImmutableSet<TypeElement> scopedDependencies = scopedTypesIn(componentAnnotation.dependencies()); if (scopedDependencies.size() == 1) { // empty can be ignored (base-case), and > 1 is a separately-reported error. scopeStack.push(scopes); scopedDependencyStack.push(dependency); validateDependencyScopeHierarchy( component, getOnlyElement(scopedDependencies), scopeStack, scopedDependencyStack); scopedDependencyStack.pop(); scopeStack.pop(); } }); // else: we skip component dependencies which are not components } } private <T> boolean stackOverlaps(Deque<ImmutableSet<T>> stack, ImmutableSet<T> set) { for (ImmutableSet<T> entry : stack) { if (!Sets.intersection(entry, set).isEmpty()) { return true; } } return false; } /** Appends and formats a list of indented component types (with their scope annotations). */ private void appendIndentedComponentsList(StringBuilder message, Iterable<TypeElement> types) { for (TypeElement scopedComponent : types) { message.append(INDENT); for (Scope scope : scopesOf(scopedComponent)) { message.append(getReadableSource(scope)).append(' '); } message .append(stripCommonTypePrefixes(scopedComponent.getQualifiedName().toString())) .append('\n'); } } /** * Returns a set of type elements containing only those found in the input set that have a * scoping annotation. */ private ImmutableSet<TypeElement> scopedTypesIn(Collection<TypeElement> types) { return types.stream().filter(type -> !scopesOf(type).isEmpty()).collect(toImmutableSet()); } } }
Java
.home { display: flex; flex-direction: column; justify-content: center; align-items: center; padding-top: 5rem; text-align: center; color: $colorTheme; } .link { margin-bottom: $s1; }
Java
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using System.Windows.Forms; namespace ungarc0r { static class Program { /// <summary> /// Der Haupteinstiegspunkt für die Anwendung. /// </summary> [STAThread] static void Main() { Application.EnableVisualStyles(); Application.SetCompatibleTextRenderingDefault(false); Application.Run(new Form1()); } } }
Java
import errno import os import pwd import shutil import sys from jinja2 import Environment, FileSystemLoader class TutorialEnv: LOCAL_MACHINE = ("Local Machine Condor Pool", "submit-host") USC_HPCC_CLUSTER = ("USC HPCC Cluster", "usc-hpcc") OSG_FROM_ISI = ("OSG from ISI submit node", "osg") XSEDE_BOSCO = ("XSEDE, with Bosco", "xsede-bosco") BLUEWATERS_GLITE = ("Bluewaters, with Glite", "bw-glite") TACC_WRANGLER = ("TACC Wrangler with Glite", "wrangler-glite") OLCF_TITAN = ("OLCF TITAN with Glite", "titan-glite") OLCF_SUMMIT_KUBERNETES_BOSCO = ( "OLCF Summit from Kubernetes using BOSCO", "summit-kub-bosco", ) class TutorialExample: PROCESS = ("Process", "process") PIPELINE = ("Pipeline", "pipeline") SPLIT = ("Split", "split") MERGE = ("Merge", "merge") EPA = ("EPA (requires R)", "r-epa") DIAMOND = ("Diamond", "diamond") CONTAINER = ("Population Modeling using Containers", "population") MPI = ("MPI Hello World", "mpi-hw") def choice(question, options, default): "Ask the user to choose from a short list of named options" while True: sys.stdout.write("{} ({}) [{}]: ".format(question, "/".join(options), default)) answer = sys.stdin.readline().strip() if len(answer) == 0: return default for opt in options: if answer == opt: return answer def yesno(question, default="y"): "Ask the user a yes/no question" while True: sys.stdout.write("{} (y/n) [{}]: ".format(question, default)) answer = sys.stdin.readline().strip().lower() if len(answer) == 0: answer = default if answer == "y": return True elif answer == "n": return False def query(question, default=None): "Ask the user a question and return the response" while True: if default: sys.stdout.write("{} [{}]: ".format(question, default)) else: sys.stdout.write("%s: " % question) answer = sys.stdin.readline().strip().replace(" ", "_") if answer == "": if default: return default else: return answer def optionlist(question, options, default=0): "Ask the user to choose from a list of options" for i, option in enumerate(options): print("%d: %s" % (i + 1, option[0])) while True: sys.stdout.write("%s (1-%d) [%d]: " % (question, len(options), default + 1)) answer = sys.stdin.readline().strip() if len(answer) == 0: return options[default][1] try: optno = int(answer) if optno > 0 and optno <= len(options): return options[optno - 1][1] except Exception: pass class Workflow: def __init__(self, workflowdir, sharedir): self.jinja = Environment(loader=FileSystemLoader(sharedir), trim_blocks=True) self.name = os.path.basename(workflowdir) self.workflowdir = workflowdir self.sharedir = sharedir self.properties = {} self.home = os.environ["HOME"] self.user = pwd.getpwuid(os.getuid())[0] self.tutorial = None self.generate_tutorial = False self.tutorial_setup = None self.compute_queue = "default" self.project = "MYPROJ123" sysname, _, _, _, machine = os.uname() if sysname == "Darwin": self.os = "MACOSX" else: # Probably Linux self.os = sysname.upper() self.arch = machine def copy_template(self, template, dest, mode=0o644): "Copy template to dest in workflowdir with mode" path = os.path.join(self.workflowdir, dest) t = self.jinja.get_template(template) t.stream(**self.__dict__).dump(path) os.chmod(path, mode) def copy_dir(self, src, dest): # self.mkdir(dest) if not src.startswith("/"): src = os.path.join(self.sharedir, src) try: dest = os.path.join(self.workflowdir, dest) shutil.copytree(src, dest) except OSError as exc: # python >2.5 if exc.errno == errno.ENOTDIR: shutil.copy(src, dest) else: raise def mkdir(self, path): "Make relative directory in workflowdir" path = os.path.join(self.workflowdir, path) if not os.path.exists(path): os.makedirs(path) def configure(self): # The tutorial is a special case if yesno("Do you want to generate a tutorial workflow?", "n"): self.config = "tutorial" self.daxgen = "tutorial" self.generate_tutorial = True # determine the environment to setup tutorial for self.tutorial_setup = optionlist( "What environment is tutorial to be setup for?", [ TutorialEnv.LOCAL_MACHINE, TutorialEnv.USC_HPCC_CLUSTER, TutorialEnv.OSG_FROM_ISI, TutorialEnv.XSEDE_BOSCO, TutorialEnv.BLUEWATERS_GLITE, TutorialEnv.TACC_WRANGLER, TutorialEnv.OLCF_TITAN, TutorialEnv.OLCF_SUMMIT_KUBERNETES_BOSCO, ], ) # figure out what example options to provide examples = [ TutorialExample.PROCESS, TutorialExample.PIPELINE, TutorialExample.SPLIT, TutorialExample.MERGE, TutorialExample.EPA, TutorialExample.CONTAINER, ] if self.tutorial_setup != "osg": examples.append(TutorialExample.DIAMOND) if self.tutorial_setup in [ "bw-glite", "wrangler-glite", "titan-glite", "summit-kub-bosco", ]: examples.append(TutorialExample.MPI) self.project = query( "What project your jobs should run under. For example on TACC there are like : TG-DDM160003 ?" ) self.tutorial = optionlist("What tutorial workflow do you want?", examples) self.setup_tutorial() return # Determine which DAX generator API to use self.daxgen = choice( "What DAX generator API do you want to use?", ["python", "perl", "java", "r"], "python", ) # Determine what kind of site catalog we need to generate self.config = optionlist( "What does your computing infrastructure look like?", [ ("Local Machine Condor Pool", "condorpool"), ("Remote Cluster using Globus GRAM", "globus"), ("Remote Cluster using CREAMCE", "creamce"), ("Local PBS Cluster with Glite", "glite"), ("Remote PBS Cluster with BOSCO and SSH", "bosco"), ], ) # Find out some information about the site self.sitename = query("What do you want to call your compute site?", "compute") self.os = choice( "What OS does your compute site have?", ["LINUX", "MACOSX"], self.os ) self.arch = choice( "What architecture does your compute site have?", ["x86_64", "x86"], self.arch, ) def setup_tutorial(self): """ Set up tutorial for pre-defined computing environments :return: """ if self.tutorial_setup is None: self.tutorial_setup = "submit-host" if self.tutorial_setup == "submit-host": self.sitename = "condorpool" elif self.tutorial_setup == "usc-hpcc": self.sitename = "usc-hpcc" self.config = "glite" self.compute_queue = "quick" # for running the whole workflow as mpi job self.properties["pegasus.job.aggregator"] = "mpiexec" elif self.tutorial_setup == "osg": self.sitename = "osg" self.os = "linux" if not yesno("Do you want to use Condor file transfers", "y"): self.staging_site = "isi_workflow" elif self.tutorial_setup == "xsede-bosco": self.sitename = "condorpool" elif self.tutorial_setup == "bw-glite": self.sitename = "bluewaters" self.config = "glite" self.compute_queue = "normal" elif self.tutorial_setup == "wrangler-glite": self.sitename = "wrangler" self.config = "glite" self.compute_queue = "normal" elif self.tutorial_setup == "titan-glite": self.sitename = "titan" self.config = "glite" self.compute_queue = "titan" elif self.tutorial_setup == "summit-kub-bosco": self.sitename = "summit" self.config = "bosco" self.compute_queue = "batch" return def generate(self): os.makedirs(self.workflowdir) if self.tutorial != "population": self.mkdir("input") self.mkdir("output") if self.generate_tutorial: self.copy_template("%s/tc.txt" % self.tutorial, "tc.txt") if self.tutorial == "r-epa": self.copy_template("%s/daxgen.R" % self.tutorial, "daxgen.R") elif self.tutorial != "mpi-hw": self.copy_template("%s/daxgen.py" % self.tutorial, "daxgen.py") if self.tutorial == "diamond": # Executables used by the diamond workflow self.mkdir("bin") self.copy_template( "diamond/transformation.py", "bin/preprocess", mode=0o755 ) self.copy_template( "diamond/transformation.py", "bin/findrange", mode=0o755 ) self.copy_template( "diamond/transformation.py", "bin/analyze", mode=0o755 ) # Diamond input file self.copy_template("diamond/f.a", "input/f.a") elif self.tutorial == "split": # Split workflow input file self.mkdir("bin") self.copy_template("split/pegasus.html", "input/pegasus.html") elif self.tutorial == "r-epa": # Executables used by the R-EPA workflow self.mkdir("bin") self.copy_template( "r-epa/epa-wrapper.sh", "bin/epa-wrapper.sh", mode=0o755 ) self.copy_template("r-epa/setupvar.R", "bin/setupvar.R", mode=0o755) self.copy_template( "r-epa/weighted.average.R", "bin/weighted.average.R", mode=0o755 ) self.copy_template( "r-epa/cumulative.percentiles.R", "bin/cumulative.percentiles.R", mode=0o755, ) elif self.tutorial == "population": self.copy_template("%s/Dockerfile" % self.tutorial, "Dockerfile") self.copy_template("%s/Singularity" % self.tutorial, "Singularity") self.copy_template( "%s/tc.txt.containers" % self.tutorial, "tc.txt.containers" ) self.copy_dir("%s/scripts" % self.tutorial, "scripts") self.copy_dir("%s/data" % self.tutorial, "input") # copy the mpi wrapper, c code and mpi elif self.tutorial == "mpi-hw": # copy the mpi wrapper, c code and mpi example # Executables used by the mpi-hw workflow self.mkdir("bin") self.copy_template( "%s/pegasus-mpi-hw.c" % self.tutorial, "pegasus-mpi-hw.c" ) self.copy_template("%s/Makefile" % self.tutorial, "Makefile") self.copy_template("%s/daxgen.py.template" % self.tutorial, "daxgen.py") self.copy_template( "%s/mpi-hello-world-wrapper" % self.tutorial, "bin/mpi-hello-world-wrapper", mode=0o755, ) self.copy_template("split/pegasus.html", "input/f.in") else: self.copy_template("tc.txt", "tc.txt") if self.daxgen == "python": self.copy_template("daxgen/daxgen.py", "daxgen.py") elif self.daxgen == "perl": self.copy_template("daxgen/daxgen.pl", "daxgen.pl") elif self.daxgen == "java": self.copy_template("daxgen/DAXGen.java", "DAXGen.java") elif self.daxgen == "r": self.copy_template("daxgen/daxgen.R", "daxgen.R") else: assert False self.copy_template("sites.xml", "sites.xml") self.copy_template("plan_dax.sh", "plan_dax.sh", mode=0o755) self.copy_template("plan_cluster_dax.sh", "plan_cluster_dax.sh", mode=0o755) self.copy_template("generate_dax.sh", "generate_dax.sh", mode=0o755) self.copy_template("README.md", "README.md") self.copy_template("rc.txt", "rc.txt") self.copy_template("pegasus.properties", "pegasus.properties") if self.tutorial == "diamond": if self.tutorial_setup == "wrangler-glite": self.copy_template( "pmc-wrapper.wrangler", "bin/pmc-wrapper", mode=0o755 ) elif self.tutorial_setup == "titan-glite": self.copy_template("pmc-wrapper.titan", "bin/pmc-wrapper", mode=0o755) elif self.tutorial_setup == "wrangler-glite": self.copy_template( "pmc-wrapper.wrangler", "bin/pmc-wrapper", mode=0o755 ) elif self.tutorial_setup == "summit-kub-bosco": self.copy_template("pmc-wrapper.summit", "bin/pmc-wrapper", mode=0o755) if self.generate_tutorial: sys.stdout.write( "Pegasus Tutorial setup for example workflow - %s for execution on %s in directory %s\n" % (self.tutorial, self.tutorial_setup, self.workflowdir) ) def usage(): print("Usage: %s WORKFLOW_DIR" % sys.argv[0]) def main(pegasus_share_dir): if len(sys.argv) != 2: usage() exit(1) if "-h" in sys.argv: usage() exit(1) workflowdir = sys.argv[1] if os.path.exists(workflowdir): print("ERROR: WORKFLOW_DIR '%s' already exists" % workflowdir) exit(1) workflowdir = os.path.abspath(workflowdir) sharedir = os.path.join(pegasus_share_dir, "init") w = Workflow(workflowdir, sharedir) w.configure() w.generate()
Java
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gagvr = Google.Ads.GoogleAds.V10.Resources; namespace Google.Ads.GoogleAds.V10.Services { public partial class BiddingDataExclusionOperation { /// <summary> /// <see cref="gagvr::BiddingDataExclusionName"/>-typed view over the <see cref="Remove"/> resource name /// property. /// </summary> public gagvr::BiddingDataExclusionName RemoveAsBiddingDataExclusionName { get => string.IsNullOrEmpty(Remove) ? null : gagvr::BiddingDataExclusionName.Parse(Remove, allowUnparsed: true); set => Remove = value?.ToString() ?? ""; } } public partial class MutateBiddingDataExclusionsResult { /// <summary> /// <see cref="gagvr::BiddingDataExclusionName"/>-typed view over the <see cref="ResourceName"/> resource name /// property. /// </summary> public gagvr::BiddingDataExclusionName ResourceNameAsBiddingDataExclusionName { get => string.IsNullOrEmpty(ResourceName) ? null : gagvr::BiddingDataExclusionName.Parse(ResourceName, allowUnparsed: true); set => ResourceName = value?.ToString() ?? ""; } } }
Java
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/> <meta http-equiv="X-UA-Compatible" content="IE=9"/> <meta name="generator" content="Doxygen 1.8.7"/> <title>FlatBuffers: Use in C++</title> <link href="tabs.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="jquery.js"></script> <script type="text/javascript" src="dynsections.js"></script> <link href="navtree.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="resize.js"></script> <script type="text/javascript" src="navtree.js"></script> <script type="text/javascript"> $(document).ready(initResizable); $(window).load(resizeHeight); </script> <link href="doxygen.css" rel="stylesheet" type="text/css" /> </head> <body> <div id="top"><!-- do not remove this div, it is closed by doxygen! --> <div id="titlearea"> <table cellspacing="0" cellpadding="0"> <tbody> <tr style="height: 56px;"> <td style="padding-left: 0.5em;"> <div id="projectname">FlatBuffers </div> </td> </tr> </tbody> </table> </div> <!-- end header part --> <!-- Generated by Doxygen 1.8.7 --> </div><!-- top --> <div id="side-nav" class="ui-resizable side-nav-resizable"> <div id="nav-tree"> <div id="nav-tree-contents"> <div id="nav-sync" class="sync"></div> </div> </div> <div id="splitbar" style="-moz-user-select:none;" class="ui-resizable-handle"> </div> </div> <script type="text/javascript"> $(document).ready(function(){initNavTree('md__cpp_usage.html','');}); </script> <div id="doc-content"> <div class="header"> <div class="headertitle"> <div class="title">Use in C++ </div> </div> </div><!--header--> <div class="contents"> <div class="textblock"><p>Assuming you have written a schema using the above language in say <code>mygame.fbs</code> (FlatBuffer Schema, though the extension doesn't matter), you've generated a C++ header called <code>mygame_generated.h</code> using the compiler (e.g. <code>flatc -c mygame.fbs</code>), you can now start using this in your program by including the header. As noted, this header relies on <code>flatbuffers/flatbuffers.h</code>, which should be in your include path.</p> <h3>Writing in C++</h3> <p>To start creating a buffer, create an instance of <code>FlatBufferBuilder</code> which will contain the buffer as it grows:</p> <div class="fragment"><div class="line">FlatBufferBuilder fbb;</div> </div><!-- fragment --><p>Before we serialize a Monster, we need to first serialize any objects that are contained there-in, i.e. we serialize the data tree using depth first, pre-order traversal. This is generally easy to do on any tree structures. For example:</p> <div class="fragment"><div class="line"><span class="keyword">auto</span> name = fbb.CreateString(<span class="stringliteral">&quot;MyMonster&quot;</span>);</div> <div class="line"></div> <div class="line"><span class="keywordtype">unsigned</span> <span class="keywordtype">char</span> inv[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };</div> <div class="line"><span class="keyword">auto</span> inventory = fbb.CreateVector(inv, 10);</div> </div><!-- fragment --><p><code>CreateString</code> and <code>CreateVector</code> serialize these two built-in datatypes, and return offsets into the serialized data indicating where they are stored, such that <code>Monster</code> below can refer to them.</p> <p><code>CreateString</code> can also take an <code>std::string</code>, or a <code>const char *</code> with an explicit length, and is suitable for holding UTF-8 and binary data if needed.</p> <p><code>CreateVector</code> can also take an <code>std::vector</code>. The offset it returns is typed, i.e. can only be used to set fields of the correct type below. To create a vector of struct objects (which will be stored as contiguous memory in the buffer, use <code>CreateVectorOfStructs</code> instead.</p> <p>To create a vector of nested objects (e.g. tables, strings or other vectors) collect their offsets in a temporary array/vector, then call <code>CreateVector</code> on that (see e.g. the array of strings example in <code>test.cpp</code> <code>CreateFlatBufferTest</code>).</p> <div class="fragment"><div class="line">Vec3 vec(1, 2, 3);</div> </div><!-- fragment --><p><code>Vec3</code> is the first example of code from our generated header. Structs (unlike tables) translate to simple structs in C++, so we can construct them in a familiar way.</p> <p>We have now serialized the non-scalar components of of the monster example, so we could create the monster something like this:</p> <div class="fragment"><div class="line"><span class="keyword">auto</span> mloc = CreateMonster(fbb, &amp;vec, 150, 80, name, inventory, Color_Red, 0, Any_NONE);</div> </div><!-- fragment --><p>Note that we're passing <code>150</code> for the <code>mana</code> field, which happens to be the default value: this means the field will not actually be written to the buffer, since we'll get that value anyway when we query it. This is a nice space savings, since it is very common for fields to be at their default. It means we also don't need to be scared to add fields only used in a minority of cases, since they won't bloat up the buffer sizes if they're not actually used.</p> <p>We do something similarly for the union field <code>test</code> by specifying a <code>0</code> offset and the <code>NONE</code> enum value (part of every union) to indicate we don't actually want to write this field. You can use <code>0</code> also as a default for other non-scalar types, such as strings, vectors and tables.</p> <p>Tables (like <code>Monster</code>) give you full flexibility on what fields you write (unlike <code>Vec3</code>, which always has all fields set because it is a <code>struct</code>). If you want even more control over this (i.e. skip fields even when they are not default), instead of the convenient <code>CreateMonster</code> call we can also build the object field-by-field manually:</p> <div class="fragment"><div class="line">MonsterBuilder mb(fbb);</div> <div class="line">mb.add_pos(&amp;vec);</div> <div class="line">mb.add_hp(80);</div> <div class="line">mb.add_name(name);</div> <div class="line">mb.add_inventory(inventory);</div> <div class="line"><span class="keyword">auto</span> mloc = mb.Finish();</div> </div><!-- fragment --><p>We start with a temporary helper class <code>MonsterBuilder</code> (which is defined in our generated code also), then call the various <code>add_</code> methods to set fields, and <code>Finish</code> to complete the object. This is pretty much the same code as you find inside <code>CreateMonster</code>, except we're leaving out a few fields. Fields may also be added in any order, though orderings with fields of the same size adjacent to each other most efficient in size, due to alignment. You should not nest these Builder classes (serialize your data in pre-order).</p> <p>Regardless of whether you used <code>CreateMonster</code> or <code>MonsterBuilder</code>, you now have an offset to the root of your data, and you can finish the buffer using:</p> <div class="fragment"><div class="line">FinishMonsterBuffer(fbb, mloc);</div> </div><!-- fragment --><p>The buffer is now ready to be stored somewhere, sent over the network, be compressed, or whatever you'd like to do with it. You can access the start of the buffer with <code>fbb.GetBufferPointer()</code>, and it's size from <code>fbb.GetSize()</code>.</p> <p><code>samples/sample_binary.cpp</code> is a complete code sample similar to the code above, that also includes the reading code below.</p> <h3>Reading in C++</h3> <p>If you've received a buffer from somewhere (disk, network, etc.) you can directly start traversing it using:</p> <div class="fragment"><div class="line"><span class="keyword">auto</span> monster = GetMonster(buffer_pointer);</div> </div><!-- fragment --><p><code>monster</code> is of type <code>Monster *</code>, and points to somewhere <em>inside</em> your buffer (root object pointers are not the same as <code>buffer_pointer</code> !). If you look in your generated header, you'll see it has convenient accessors for all fields, e.g.</p> <div class="fragment"><div class="line">assert(monster-&gt;hp() == 80);</div> <div class="line">assert(monster-&gt;mana() == 150); <span class="comment">// default</span></div> <div class="line">assert(strcmp(monster-&gt;name()-&gt;c_str(), <span class="stringliteral">&quot;MyMonster&quot;</span>) == 0);</div> </div><!-- fragment --><p>These should all be true. Note that we never stored a <code>mana</code> value, so it will return the default.</p> <p>To access sub-objects, in this case the <code>Vec3</code>:</p> <div class="fragment"><div class="line"><span class="keyword">auto</span> pos = monster-&gt;pos();</div> <div class="line">assert(pos);</div> <div class="line">assert(pos-&gt;z() == 3);</div> </div><!-- fragment --><p>If we had not set the <code>pos</code> field during serialization, it would be <code>NULL</code>.</p> <p>Similarly, we can access elements of the inventory array:</p> <div class="fragment"><div class="line"><span class="keyword">auto</span> inv = monster-&gt;inventory();</div> <div class="line">assert(inv);</div> <div class="line">assert(inv-&gt;Get(9) == 9);</div> </div><!-- fragment --><h3>Storing maps / dictionaries in a FlatBuffer</h3> <p>FlatBuffers doesn't support maps natively, but there is support to emulate their behavior with vectors and binary search, which means you can have fast lookups directly from a FlatBuffer without having to unpack your data into a <code>std::map</code> or similar.</p> <p>To use it:</p><ul> <li>Designate one of the fields in a table as they "key" field. You do this by setting the <code>key</code> attribute on this field, e.g. <code>name:string (key)</code>. You may only have one key field, and it must be of string or scalar type.</li> <li>Write out tables of this type as usual, collect their offsets in an array or vector.</li> <li>Instead of <code>CreateVector</code>, call <code>CreateVectorOfSortedTables</code>, which will first sort all offsets such that the tables they refer to are sorted by the key field, then serialize it.</li> <li>Now when you're accessing the FlatBuffer, you can use <code>Vector::LookupByKey</code> instead of just <code>Vector::Get</code> to access elements of the vector, e.g.: <code>myvector-&gt;LookupByKey("Fred")</code>, which returns a pointer to the corresponding table type, or <code>nullptr</code> if not found. <code>LookupByKey</code> performs a binary search, so should have a similar speed to <code>std::map</code>, though may be faster because of better caching. <code>LookupByKey</code> only works if the vector has been sorted, it will likely not find elements if it hasn't been sorted.</li> </ul> <h3>Direct memory access</h3> <p>As you can see from the above examples, all elements in a buffer are accessed through generated accessors. This is because everything is stored in little endian format on all platforms (the accessor performs a swap operation on big endian machines), and also because the layout of things is generally not known to the user.</p> <p>For structs, layout is deterministic and guaranteed to be the same accross platforms (scalars are aligned to their own size, and structs themselves to their largest member), and you are allowed to access this memory directly by using <code>sizeof()</code> and <code>memcpy</code> on the pointer to a struct, or even an array of structs.</p> <p>To compute offsets to sub-elements of a struct, make sure they are a structs themselves, as then you can use the pointers to figure out the offset without having to hardcode it. This is handy for use of arrays of structs with calls like <code>glVertexAttribPointer</code> in OpenGL or similar APIs.</p> <p>It is important to note is that structs are still little endian on all machines, so only use tricks like this if you can guarantee you're not shipping on a big endian machine (an <code>assert(FLATBUFFERS_LITTLEENDIAN)</code> would be wise).</p> <h3>Access of untrusted buffers</h3> <p>The generated accessor functions access fields over offsets, which is very quick. These offsets are not verified at run-time, so a malformed buffer could cause a program to crash by accessing random memory.</p> <p>When you're processing large amounts of data from a source you know (e.g. your own generated data on disk), this is acceptable, but when reading data from the network that can potentially have been modified by an attacker, this is undesirable.</p> <p>For this reason, you can optionally use a buffer verifier before you access the data. This verifier will check all offsets, all sizes of fields, and null termination of strings to ensure that when a buffer is accessed, all reads will end up inside the buffer.</p> <p>Each root type will have a verification function generated for it, e.g. for <code>Monster</code>, you can call:</p> <div class="fragment"><div class="line"><span class="keywordtype">bool</span> ok = VerifyMonsterBuffer(Verifier(buf, len));</div> </div><!-- fragment --><p>if <code>ok</code> is true, the buffer is safe to read.</p> <p>Besides untrusted data, this function may be useful to call in debug mode, as extra insurance against data being corrupted somewhere along the way.</p> <p>While verifying a buffer isn't "free", it is typically faster than a full traversal (since any scalar data is not actually touched), and since it may cause the buffer to be brought into cache before reading, the actual overhead may be even lower than expected.</p> <p>In specialized cases where a denial of service attack is possible, the verifier has two additional constructor arguments that allow you to limit the nesting depth and total amount of tables the verifier may encounter before declaring the buffer malformed.</p> <h2>Text &amp; schema parsing</h2> <p>Using binary buffers with the generated header provides a super low overhead use of FlatBuffer data. There are, however, times when you want to use text formats, for example because it interacts better with source control, or you want to give your users easy access to data.</p> <p>Another reason might be that you already have a lot of data in JSON format, or a tool that generates JSON, and if you can write a schema for it, this will provide you an easy way to use that data directly.</p> <p>(see the schema documentation for some specifics on the JSON format accepted).</p> <p>There are two ways to use text formats:</p> <h3>Using the compiler as a conversion tool</h3> <p>This is the preferred path, as it doesn't require you to add any new code to your program, and is maximally efficient since you can ship with binary data. The disadvantage is that it is an extra step for your users/developers to perform, though you might be able to automate it. </p><pre class="fragment">flatc -b myschema.fbs mydata.json </pre><p>This will generate the binary file <code>mydata_wire.bin</code> which can be loaded as before.</p> <h3>Making your program capable of loading text directly</h3> <p>This gives you maximum flexibility. You could even opt to support both, i.e. check for both files, and regenerate the binary from text when required, otherwise just load the binary.</p> <p>This option is currently only available for C++, or Java through JNI.</p> <p>As mentioned in the section "Building" above, this technique requires you to link a few more files into your program, and you'll want to include <code>flatbuffers/idl.h</code>.</p> <p>Load text (either a schema or json) into an in-memory buffer (there is a convenient <code>LoadFile()</code> utility function in <code>flatbuffers/util.h</code> if you wish). Construct a parser:</p> <div class="fragment"><div class="line">flatbuffers::Parser parser;</div> </div><!-- fragment --><p>Now you can parse any number of text files in sequence:</p> <div class="fragment"><div class="line">parser.Parse(text_file.c_str());</div> </div><!-- fragment --><p>This works similarly to how the command-line compiler works: a sequence of files parsed by the same <code>Parser</code> object allow later files to reference definitions in earlier files. Typically this means you first load a schema file (which populates <code>Parser</code> with definitions), followed by one or more JSON files.</p> <p>As optional argument to <code>Parse</code>, you may specify a null-terminated list of include paths. If not specified, any include statements try to resolve from the current directory.</p> <p>If there were any parsing errors, <code>Parse</code> will return <code>false</code>, and <code>Parser::err</code> contains a human readable error string with a line number etc, which you should present to the creator of that file.</p> <p>After each JSON file, the <code>Parser::fbb</code> member variable is the <code>FlatBufferBuilder</code> that contains the binary buffer version of that file, that you can access as described above.</p> <p><code>samples/sample_text.cpp</code> is a code sample showing the above operations.</p> <h3>Threading</h3> <p>Reading a FlatBuffer does not touch any memory outside the original buffer, and is entirely read-only (all const), so is safe to access from multiple threads even without synchronisation primitives.</p> <p>Creating a FlatBuffer is not thread safe. All state related to building a FlatBuffer is contained in a FlatBufferBuilder instance, and no memory outside of it is touched. To make this thread safe, either do not share instances of FlatBufferBuilder between threads (recommended), or manually wrap it in synchronisation primites. There's no automatic way to accomplish this, by design, as we feel multithreaded construction of a single buffer will be rare, and synchronisation overhead would be costly. </p> </div></div><!-- contents --> </div><!-- doc-content --> <!-- Google Analytics --> <script> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-49880327-7', 'auto'); ga('send', 'pageview'); </script> </body> </html>
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head><!-- start favicons snippet, use https://realfavicongenerator.net/ --><link rel="apple-touch-icon" sizes="180x180" href="/assets/apple-touch-icon.png"><link rel="icon" type="image/png" sizes="32x32" href="/assets/favicon-32x32.png"><link rel="icon" type="image/png" sizes="16x16" href="/assets/favicon-16x16.png"><link rel="manifest" href="/assets/site.webmanifest"><link rel="mask-icon" href="/assets/safari-pinned-tab.svg" color="#fc4d50"><link rel="shortcut icon" href="/assets/favicon.ico"><meta name="msapplication-TileColor" content="#ffc40d"><meta name="msapplication-config" content="/assets/browserconfig.xml"><meta name="theme-color" content="#ffffff"><!-- end favicons snippet --> <title>com.google.android.exoplayer2.source.ads (ExoPlayer library)</title> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> <link rel="stylesheet" type="text/css" href="../../../../../../jquery/jquery-ui.css" title="Style"> <script type="text/javascript" src="../../../../../../script.js"></script> <script type="text/javascript" src="../../../../../../jquery/jszip/dist/jszip.min.js"></script> <script type="text/javascript" src="../../../../../../jquery/jszip-utils/dist/jszip-utils.min.js"></script> <!--[if IE]> <script type="text/javascript" src="../../../../../../jquery/jszip-utils/dist/jszip-utils-ie.min.js"></script> <![endif]--> <script type="text/javascript" src="../../../../../../jquery/jquery-1.10.2.js"></script> <script type="text/javascript" src="../../../../../../jquery/jquery-ui.js"></script> </head> <body> <h1 class="bar"><a href="../../../../../../com/google/android/exoplayer2/source/ads/package-summary.html" target="classFrame">com.google.android.exoplayer2.source.ads</a></h1> <div class="indexContainer"> <h2 title="Interfaces">Interfaces</h2> <ul title="Interfaces"> <li><a href="AdsLoader.html" title="interface in com.google.android.exoplayer2.source.ads" target="classFrame"><span class="interfaceName">AdsLoader</span></a></li> <li><a href="AdsLoader.AdViewProvider.html" title="interface in com.google.android.exoplayer2.source.ads" target="classFrame"><span class="interfaceName">AdsLoader.AdViewProvider</span></a></li> <li><a href="AdsLoader.EventListener.html" title="interface in com.google.android.exoplayer2.source.ads" target="classFrame"><span class="interfaceName">AdsLoader.EventListener</span></a></li> </ul> <h2 title="Classes">Classes</h2> <ul title="Classes"> <li><a href="AdPlaybackState.html" title="class in com.google.android.exoplayer2.source.ads" target="classFrame">AdPlaybackState</a></li> <li><a href="AdPlaybackState.AdGroup.html" title="class in com.google.android.exoplayer2.source.ads" target="classFrame">AdPlaybackState.AdGroup</a></li> <li><a href="AdsLoader.OverlayInfo.html" title="class in com.google.android.exoplayer2.source.ads" target="classFrame">AdsLoader.OverlayInfo</a></li> <li><a href="AdsMediaSource.html" title="class in com.google.android.exoplayer2.source.ads" target="classFrame">AdsMediaSource</a></li> <li><a href="SinglePeriodAdTimeline.html" title="class in com.google.android.exoplayer2.source.ads" target="classFrame">SinglePeriodAdTimeline</a></li> </ul> <h2 title="Exceptions">Exceptions</h2> <ul title="Exceptions"> <li><a href="AdsMediaSource.AdLoadException.html" title="class in com.google.android.exoplayer2.source.ads" target="classFrame">AdsMediaSource.AdLoadException</a></li> </ul> <h2 title="Annotation Types">Annotation Types</h2> <ul title="Annotation Types"> <li><a href="AdPlaybackState.AdState.html" title="annotation in com.google.android.exoplayer2.source.ads" target="classFrame">AdPlaybackState.AdState</a></li> <li><a href="AdsLoader.OverlayInfo.Purpose.html" title="annotation in com.google.android.exoplayer2.source.ads" target="classFrame">AdsLoader.OverlayInfo.Purpose</a></li> <li><a href="AdsMediaSource.AdLoadException.Type.html" title="annotation in com.google.android.exoplayer2.source.ads" target="classFrame">AdsMediaSource.AdLoadException.Type</a></li> </ul> </div> </body> </html>
Java
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.route53.model.transform; import java.util.ArrayList; import javax.xml.stream.events.XMLEvent; import javax.annotation.Generated; import com.amazonaws.services.route53.model.*; import com.amazonaws.transform.Unmarshaller; import com.amazonaws.transform.StaxUnmarshallerContext; import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*; /** * GetHealthCheckLastFailureReasonResult StAX Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetHealthCheckLastFailureReasonResultStaxUnmarshaller implements Unmarshaller<GetHealthCheckLastFailureReasonResult, StaxUnmarshallerContext> { public GetHealthCheckLastFailureReasonResult unmarshall(StaxUnmarshallerContext context) throws Exception { GetHealthCheckLastFailureReasonResult getHealthCheckLastFailureReasonResult = new GetHealthCheckLastFailureReasonResult(); int originalDepth = context.getCurrentDepth(); int targetDepth = originalDepth + 1; if (context.isStartOfDocument()) targetDepth += 1; while (true) { XMLEvent xmlEvent = context.nextEvent(); if (xmlEvent.isEndDocument()) return getHealthCheckLastFailureReasonResult; if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) { if (context.testExpression("HealthCheckObservations", targetDepth)) { getHealthCheckLastFailureReasonResult.withHealthCheckObservations(new ArrayList<HealthCheckObservation>()); continue; } if (context.testExpression("HealthCheckObservations/HealthCheckObservation", targetDepth)) { getHealthCheckLastFailureReasonResult.withHealthCheckObservations(HealthCheckObservationStaxUnmarshaller.getInstance().unmarshall(context)); continue; } } else if (xmlEvent.isEndElement()) { if (context.getCurrentDepth() < originalDepth) { return getHealthCheckLastFailureReasonResult; } } } } private static GetHealthCheckLastFailureReasonResultStaxUnmarshaller instance; public static GetHealthCheckLastFailureReasonResultStaxUnmarshaller getInstance() { if (instance == null) instance = new GetHealthCheckLastFailureReasonResultStaxUnmarshaller(); return instance; } }
Java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.cache.store.jdbc; import java.nio.ByteBuffer; import java.sql.BatchUpdateException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import javax.cache.Cache; import javax.cache.CacheException; import javax.cache.integration.CacheLoaderException; import javax.cache.integration.CacheWriterException; import javax.sql.DataSource; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cache.CacheTypeFieldMetadata; import org.apache.ignite.cache.CacheTypeMetadata; import org.apache.ignite.cache.store.CacheStore; import org.apache.ignite.cache.store.CacheStoreSession; import org.apache.ignite.cache.store.jdbc.dialect.BasicJdbcDialect; import org.apache.ignite.cache.store.jdbc.dialect.DB2Dialect; import org.apache.ignite.cache.store.jdbc.dialect.H2Dialect; import org.apache.ignite.cache.store.jdbc.dialect.JdbcDialect; import org.apache.ignite.cache.store.jdbc.dialect.MySQLDialect; import org.apache.ignite.cache.store.jdbc.dialect.OracleDialect; import org.apache.ignite.cache.store.jdbc.dialect.SQLServerDialect; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.typedef.C1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiInClosure; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.lifecycle.LifecycleAware; import org.apache.ignite.resources.CacheStoreSessionResource; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.resources.LoggerResource; import org.apache.ignite.transactions.Transaction; import org.jetbrains.annotations.Nullable; import static java.sql.Statement.EXECUTE_FAILED; import static java.sql.Statement.SUCCESS_NO_INFO; /** * Implementation of {@link CacheStore} backed by JDBC. * <p> * Store works with database via SQL dialect. Ignite ships with dialects for most popular databases: * <ul> * <li>{@link DB2Dialect} - dialect for IBM DB2 database.</li> * <li>{@link OracleDialect} - dialect for Oracle database.</li> * <li>{@link SQLServerDialect} - dialect for Microsoft SQL Server database.</li> * <li>{@link MySQLDialect} - dialect for Oracle MySQL database.</li> * <li>{@link H2Dialect} - dialect for H2 database.</li> * <li>{@link BasicJdbcDialect} - dialect for any database via plain JDBC.</li> * </ul> * <p> * <h2 class="header">Configuration</h2> * <ul> * <li>Data source (see {@link #setDataSource(DataSource)}</li> * <li>Dialect (see {@link #setDialect(JdbcDialect)}</li> * <li>Maximum batch size for writeAll and deleteAll operations. (see {@link #setBatchSize(int)})</li> * <li>Max workers thread count. These threads are responsible for load cache. (see {@link #setMaximumPoolSize(int)})</li> * <li>Parallel load cache minimum threshold. (see {@link #setParallelLoadCacheMinimumThreshold(int)})</li> * </ul> * <h2 class="header">Java Example</h2> * <pre name="code" class="java"> * ... * CacheConfiguration ccfg = new CacheConfiguration&lt;&gt;(); * * // Configure cache store. * ccfg.setCacheStoreFactory(new FactoryBuilder.SingletonFactory(ConfigurationSnippet.store())); * ccfg.setReadThrough(true); * ccfg.setWriteThrough(true); * * // Configure cache types metadata. * ccfg.setTypeMetadata(ConfigurationSnippet.typeMetadata()); * * cfg.setCacheConfiguration(ccfg); * ... * </pre> */ public abstract class CacheAbstractJdbcStore<K, V> implements CacheStore<K, V>, LifecycleAware { /** Max attempt write count. */ protected static final int MAX_ATTEMPT_WRITE_COUNT = 2; /** Default batch size for put and remove operations. */ protected static final int DFLT_BATCH_SIZE = 512; /** Default batch size for put and remove operations. */ protected static final int DFLT_PARALLEL_LOAD_CACHE_MINIMUM_THRESHOLD = 512; /** Connection attribute property name. */ protected static final String ATTR_CONN_PROP = "JDBC_STORE_CONNECTION"; /** Empty column value. */ protected static final Object[] EMPTY_COLUMN_VALUE = new Object[] { null }; /** Auto-injected store session. */ @CacheStoreSessionResource private CacheStoreSession ses; /** Auto injected ignite instance. */ @IgniteInstanceResource private Ignite ignite; /** Auto-injected logger instance. */ @LoggerResource protected IgniteLogger log; /** Lock for metadata cache. */ @GridToStringExclude private final Lock cacheMappingsLock = new ReentrantLock(); /** Data source. */ protected DataSource dataSrc; /** Cache with entry mapping description. (cache name, (key id, mapping description)). */ protected volatile Map<String, Map<Object, EntryMapping>> cacheMappings = Collections.emptyMap(); /** Database dialect. */ protected JdbcDialect dialect; /** Max workers thread count. These threads are responsible for load cache. */ private int maxPoolSz = Runtime.getRuntime().availableProcessors(); /** Maximum batch size for writeAll and deleteAll operations. */ private int batchSz = DFLT_BATCH_SIZE; /** Parallel load cache minimum threshold. If {@code 0} then load sequentially. */ private int parallelLoadCacheMinThreshold = DFLT_PARALLEL_LOAD_CACHE_MINIMUM_THRESHOLD; /** * Get field value from object for use as query parameter. * * @param cacheName Cache name. * @param typeName Type name. * @param fieldName Field name. * @param obj Cache object. * @return Field value from object. * @throws CacheException in case of error. */ @Nullable protected abstract Object extractParameter(@Nullable String cacheName, String typeName, String fieldName, Object obj) throws CacheException; /** * Construct object from query result. * * @param <R> Type of result object. * @param cacheName Cache name. * @param typeName Type name. * @param fields Fields descriptors. * @param loadColIdxs Select query columns index. * @param rs ResultSet. * @return Constructed object. * @throws CacheLoaderException If failed to construct cache object. */ protected abstract <R> R buildObject(@Nullable String cacheName, String typeName, Collection<CacheTypeFieldMetadata> fields, Map<String, Integer> loadColIdxs, ResultSet rs) throws CacheLoaderException; /** * Extract key type id from key object. * * @param key Key object. * @return Key type id. * @throws CacheException If failed to get type key id from object. */ protected abstract Object keyTypeId(Object key) throws CacheException; /** * Extract key type id from key class name. * * @param type String description of key type. * @return Key type id. * @throws CacheException If failed to get type key id from object. */ protected abstract Object keyTypeId(String type) throws CacheException; /** * Prepare internal store specific builders for provided types metadata. * * @param cacheName Cache name to prepare builders for. * @param types Collection of types. * @throws CacheException If failed to prepare internal builders for types. */ protected abstract void prepareBuilders(@Nullable String cacheName, Collection<CacheTypeMetadata> types) throws CacheException; /** * Perform dialect resolution. * * @return The resolved dialect. * @throws CacheException Indicates problems accessing the metadata. */ protected JdbcDialect resolveDialect() throws CacheException { Connection conn = null; String dbProductName = null; try { conn = openConnection(false); dbProductName = conn.getMetaData().getDatabaseProductName(); } catch (SQLException e) { throw new CacheException("Failed access to metadata for detect database dialect.", e); } finally { U.closeQuiet(conn); } if ("H2".equals(dbProductName)) return new H2Dialect(); if ("MySQL".equals(dbProductName)) return new MySQLDialect(); if (dbProductName.startsWith("Microsoft SQL Server")) return new SQLServerDialect(); if ("Oracle".equals(dbProductName)) return new OracleDialect(); if (dbProductName.startsWith("DB2/")) return new DB2Dialect(); U.warn(log, "Failed to resolve dialect (BasicJdbcDialect will be used): " + dbProductName); return new BasicJdbcDialect(); } /** {@inheritDoc} */ @Override public void start() throws IgniteException { if (dataSrc == null) throw new IgniteException("Failed to initialize cache store (data source is not provided)."); if (dialect == null) { dialect = resolveDialect(); if (log.isDebugEnabled() && dialect.getClass() != BasicJdbcDialect.class) log.debug("Resolved database dialect: " + U.getSimpleName(dialect.getClass())); } } /** {@inheritDoc} */ @Override public void stop() throws IgniteException { // No-op. } /** * Gets connection from a pool. * * @param autocommit {@code true} If connection should use autocommit mode. * @return Pooled connection. * @throws SQLException In case of error. */ protected Connection openConnection(boolean autocommit) throws SQLException { Connection conn = dataSrc.getConnection(); conn.setAutoCommit(autocommit); return conn; } /** * @return Connection. * @throws SQLException In case of error. */ protected Connection connection() throws SQLException { CacheStoreSession ses = session(); if (ses.transaction() != null) { Map<String, Connection> prop = ses.properties(); Connection conn = prop.get(ATTR_CONN_PROP); if (conn == null) { conn = openConnection(false); // Store connection in session to used it for other operations in the same session. prop.put(ATTR_CONN_PROP, conn); } return conn; } // Transaction can be null in case of simple load operation. else return openConnection(true); } /** * Closes connection. * * @param conn Connection to close. */ protected void closeConnection(@Nullable Connection conn) { CacheStoreSession ses = session(); // Close connection right away if there is no transaction. if (ses.transaction() == null) U.closeQuiet(conn); } /** * Closes allocated resources depending on transaction status. * * @param conn Allocated connection. * @param st Created statement, */ protected void end(@Nullable Connection conn, @Nullable Statement st) { U.closeQuiet(st); closeConnection(conn); } /** {@inheritDoc} */ @Override public void sessionEnd(boolean commit) throws CacheWriterException { CacheStoreSession ses = session(); Transaction tx = ses.transaction(); if (tx != null) { Map<String, Connection> sesProps = ses.properties(); Connection conn = sesProps.get(ATTR_CONN_PROP); if (conn != null) { sesProps.remove(ATTR_CONN_PROP); try { if (commit) conn.commit(); else conn.rollback(); } catch (SQLException e) { throw new CacheWriterException( "Failed to end transaction [xid=" + tx.xid() + ", commit=" + commit + ']', e); } finally { U.closeQuiet(conn); } } if (log.isDebugEnabled()) log.debug("Transaction ended [xid=" + tx.xid() + ", commit=" + commit + ']'); } } /** * Retrieves the value of the designated column in the current row of this <code>ResultSet</code> object and * will convert to the requested Java data type. * * @param rs Result set. * @param colIdx Column index in result set. * @param type Class representing the Java data type to convert the designated column to. * @return Value in column. * @throws SQLException If a database access error occurs or this method is called. */ protected Object getColumnValue(ResultSet rs, int colIdx, Class<?> type) throws SQLException { Object val = rs.getObject(colIdx); if (val == null) return null; if (type == int.class) return rs.getInt(colIdx); if (type == long.class) return rs.getLong(colIdx); if (type == double.class) return rs.getDouble(colIdx); if (type == boolean.class || type == Boolean.class) return rs.getBoolean(colIdx); if (type == byte.class) return rs.getByte(colIdx); if (type == short.class) return rs.getShort(colIdx); if (type == float.class) return rs.getFloat(colIdx); if (type == Integer.class || type == Long.class || type == Double.class || type == Byte.class || type == Short.class || type == Float.class) { Number num = (Number)val; if (type == Integer.class) return num.intValue(); else if (type == Long.class) return num.longValue(); else if (type == Double.class) return num.doubleValue(); else if (type == Byte.class) return num.byteValue(); else if (type == Short.class) return num.shortValue(); else if (type == Float.class) return num.floatValue(); } if (type == UUID.class) { if (val instanceof UUID) return val; if (val instanceof byte[]) { ByteBuffer bb = ByteBuffer.wrap((byte[])val); long most = bb.getLong(); long least = bb.getLong(); return new UUID(most, least); } if (val instanceof String) return UUID.fromString((String)val); } return val; } /** * Construct load cache from range. * * @param em Type mapping description. * @param clo Closure that will be applied to loaded values. * @param lowerBound Lower bound for range. * @param upperBound Upper bound for range. * @return Callable for pool submit. */ private Callable<Void> loadCacheRange(final EntryMapping em, final IgniteBiInClosure<K, V> clo, @Nullable final Object[] lowerBound, @Nullable final Object[] upperBound) { return new Callable<Void>() { @Override public Void call() throws Exception { Connection conn = null; PreparedStatement stmt = null; try { conn = openConnection(true); stmt = conn.prepareStatement(lowerBound == null && upperBound == null ? em.loadCacheQry : em.loadCacheRangeQuery(lowerBound != null, upperBound != null)); int ix = 1; if (lowerBound != null) for (int i = lowerBound.length; i > 0; i--) for (int j = 0; j < i; j++) stmt.setObject(ix++, lowerBound[j]); if (upperBound != null) for (int i = upperBound.length; i > 0; i--) for (int j = 0; j < i; j++) stmt.setObject(ix++, upperBound[j]); ResultSet rs = stmt.executeQuery(); while (rs.next()) { K key = buildObject(em.cacheName, em.keyType(), em.keyColumns(), em.loadColIdxs, rs); V val = buildObject(em.cacheName, em.valueType(), em.valueColumns(), em.loadColIdxs, rs); clo.apply(key, val); } } catch (SQLException e) { throw new IgniteCheckedException("Failed to load cache", e); } finally { U.closeQuiet(stmt); U.closeQuiet(conn); } return null; } }; } /** * Construct load cache in one select. * * @param m Type mapping description. * @param clo Closure for loaded values. * @return Callable for pool submit. */ private Callable<Void> loadCacheFull(EntryMapping m, IgniteBiInClosure<K, V> clo) { return loadCacheRange(m, clo, null, null); } /** * Object is a simple type. * * @param cls Class. * @return {@code True} if object is a simple type. */ protected static boolean simpleType(Class<?> cls) { return (Number.class.isAssignableFrom(cls) || String.class.isAssignableFrom(cls) || java.util.Date.class.isAssignableFrom(cls) || Boolean.class.isAssignableFrom(cls) || UUID.class.isAssignableFrom(cls)); } /** * @param cacheName Cache name to check mapping for. * @param clsName Class name. * @param fields Fields descriptors. * @throws CacheException If failed to check type metadata. */ private static void checkMapping(@Nullable String cacheName, String clsName, Collection<CacheTypeFieldMetadata> fields) throws CacheException { try { Class<?> cls = Class.forName(clsName); if (simpleType(cls)) { if (fields.size() != 1) throw new CacheException("More than one field for simple type [cache name=" + cacheName + ", type=" + clsName + " ]"); CacheTypeFieldMetadata field = F.first(fields); if (field.getDatabaseName() == null) throw new CacheException("Missing database name in mapping description [cache name=" + cacheName + ", type=" + clsName + " ]"); field.setJavaType(cls); } else for (CacheTypeFieldMetadata field : fields) { if (field.getDatabaseName() == null) throw new CacheException("Missing database name in mapping description [cache name=" + cacheName + ", type=" + clsName + " ]"); if (field.getJavaName() == null) throw new CacheException("Missing field name in mapping description [cache name=" + cacheName + ", type=" + clsName + " ]"); if (field.getJavaType() == null) throw new CacheException("Missing field type in mapping description [cache name=" + cacheName + ", type=" + clsName + " ]"); } } catch (ClassNotFoundException e) { throw new CacheException("Failed to find class: " + clsName, e); } } /** * @param cacheName Cache name to check mappings for. * @return Type mappings for specified cache name. * @throws CacheException If failed to initialize cache mappings. */ private Map<Object, EntryMapping> cacheMappings(@Nullable String cacheName) throws CacheException { Map<Object, EntryMapping> entryMappings = cacheMappings.get(cacheName); if (entryMappings != null) return entryMappings; cacheMappingsLock.lock(); try { entryMappings = cacheMappings.get(cacheName); if (entryMappings != null) return entryMappings; CacheConfiguration ccfg = ignite().cache(cacheName).getConfiguration(CacheConfiguration.class); Collection<CacheTypeMetadata> types = ccfg.getTypeMetadata(); entryMappings = U.newHashMap(types.size()); for (CacheTypeMetadata type : types) { Object keyTypeId = keyTypeId(type.getKeyType()); if (entryMappings.containsKey(keyTypeId)) throw new CacheException("Key type must be unique in type metadata [cache name=" + cacheName + ", key type=" + type.getKeyType() + "]"); checkMapping(cacheName, type.getKeyType(), type.getKeyFields()); checkMapping(cacheName, type.getValueType(), type.getValueFields()); entryMappings.put(keyTypeId(type.getKeyType()), new EntryMapping(cacheName, dialect, type)); } Map<String, Map<Object, EntryMapping>> mappings = new HashMap<>(cacheMappings); mappings.put(cacheName, entryMappings); prepareBuilders(cacheName, types); cacheMappings = mappings; return entryMappings; } finally { cacheMappingsLock.unlock(); } } /** * @param cacheName Cache name. * @param keyTypeId Key type id. * @param key Key object. * @return Entry mapping. * @throws CacheException If mapping for key was not found. */ private EntryMapping entryMapping(String cacheName, Object keyTypeId, Object key) throws CacheException { EntryMapping em = cacheMappings(cacheName).get(keyTypeId); if (em == null) { String maskedCacheName = U.maskName(cacheName); throw new CacheException("Failed to find mapping description [key=" + key + ", cache=" + maskedCacheName + "]. Please configure CacheTypeMetadata to associate '" + maskedCacheName + "' with JdbcPojoStore."); } return em; } /** {@inheritDoc} */ @Override public void loadCache(final IgniteBiInClosure<K, V> clo, @Nullable Object... args) throws CacheLoaderException { ExecutorService pool = null; String cacheName = session().cacheName(); try { pool = Executors.newFixedThreadPool(maxPoolSz); Collection<Future<?>> futs = new ArrayList<>(); if (args != null && args.length > 0) { if (args.length % 2 != 0) throw new CacheLoaderException("Expected even number of arguments, but found: " + args.length); if (log.isDebugEnabled()) log.debug("Start loading entries from db using user queries from arguments"); for (int i = 0; i < args.length; i += 2) { String keyType = args[i].toString(); String selQry = args[i + 1].toString(); EntryMapping em = entryMapping(cacheName, keyTypeId(keyType), keyType); futs.add(pool.submit(new LoadCacheCustomQueryWorker<>(em, selQry, clo))); } } else { Collection<EntryMapping> entryMappings = cacheMappings(session().cacheName()).values(); for (EntryMapping em : entryMappings) { if (parallelLoadCacheMinThreshold > 0) { log.debug("Multithread loading entries from db [cache name=" + cacheName + ", key type=" + em.keyType() + " ]"); Connection conn = null; try { conn = connection(); PreparedStatement stmt = conn.prepareStatement(em.loadCacheSelRangeQry); stmt.setInt(1, parallelLoadCacheMinThreshold); ResultSet rs = stmt.executeQuery(); if (rs.next()) { int keyCnt = em.keyCols.size(); Object[] upperBound = new Object[keyCnt]; for (int i = 0; i < keyCnt; i++) upperBound[i] = rs.getObject(i + 1); futs.add(pool.submit(loadCacheRange(em, clo, null, upperBound))); while (rs.next()) { Object[] lowerBound = upperBound; upperBound = new Object[keyCnt]; for (int i = 0; i < keyCnt; i++) upperBound[i] = rs.getObject(i + 1); futs.add(pool.submit(loadCacheRange(em, clo, lowerBound, upperBound))); } futs.add(pool.submit(loadCacheRange(em, clo, upperBound, null))); } else futs.add(pool.submit(loadCacheFull(em, clo))); } catch (SQLException ignored) { futs.add(pool.submit(loadCacheFull(em, clo))); } finally { U.closeQuiet(conn); } } else { if (log.isDebugEnabled()) log.debug("Single thread loading entries from db [cache name=" + cacheName + ", key type=" + em.keyType() + " ]"); futs.add(pool.submit(loadCacheFull(em, clo))); } } } for (Future<?> fut : futs) U.get(fut); if (log.isDebugEnabled()) log.debug("Cache loaded from db: " + cacheName); } catch (IgniteCheckedException e) { throw new CacheLoaderException("Failed to load cache: " + cacheName, e.getCause()); } finally { U.shutdownNow(getClass(), pool, log); } } /** {@inheritDoc} */ @Nullable @Override public V load(K key) throws CacheLoaderException { assert key != null; EntryMapping em = entryMapping(session().cacheName(), keyTypeId(key), key); if (log.isDebugEnabled()) log.debug("Load value from db [table= " + em.fullTableName() + ", key=" + key + "]"); Connection conn = null; PreparedStatement stmt = null; try { conn = connection(); stmt = conn.prepareStatement(em.loadQrySingle); fillKeyParameters(stmt, em, key); ResultSet rs = stmt.executeQuery(); if (rs.next()) return buildObject(em.cacheName, em.valueType(), em.valueColumns(), em.loadColIdxs, rs); } catch (SQLException e) { throw new CacheLoaderException("Failed to load object [table=" + em.fullTableName() + ", key=" + key + "]", e); } finally { end(conn, stmt); } return null; } /** {@inheritDoc} */ @Override public Map<K, V> loadAll(Iterable<? extends K> keys) throws CacheLoaderException { assert keys != null; Connection conn = null; try { conn = connection(); String cacheName = session().cacheName(); Map<Object, LoadWorker<K, V>> workers = U.newHashMap(cacheMappings(cacheName).size()); Map<K, V> res = new HashMap<>(); for (K key : keys) { Object keyTypeId = keyTypeId(key); EntryMapping em = entryMapping(cacheName, keyTypeId, key); LoadWorker<K, V> worker = workers.get(keyTypeId); if (worker == null) workers.put(keyTypeId, worker = new LoadWorker<>(conn, em)); worker.keys.add(key); if (worker.keys.size() == em.maxKeysPerStmt) res.putAll(workers.remove(keyTypeId).call()); } for (LoadWorker<K, V> worker : workers.values()) res.putAll(worker.call()); return res; } catch (Exception e) { throw new CacheWriterException("Failed to load entries from database", e); } finally { closeConnection(conn); } } /** * @param insStmt Insert statement. * @param updStmt Update statement. * @param em Entry mapping. * @param entry Cache entry. * @throws CacheWriterException If failed to update record in database. */ private void writeUpsert(PreparedStatement insStmt, PreparedStatement updStmt, EntryMapping em, Cache.Entry<? extends K, ? extends V> entry) throws CacheWriterException { try { CacheWriterException we = null; for (int attempt = 0; attempt < MAX_ATTEMPT_WRITE_COUNT; attempt++) { int paramIdx = fillValueParameters(updStmt, 1, em, entry.getValue()); fillKeyParameters(updStmt, paramIdx, em, entry.getKey()); if (updStmt.executeUpdate() == 0) { paramIdx = fillKeyParameters(insStmt, em, entry.getKey()); fillValueParameters(insStmt, paramIdx, em, entry.getValue()); try { insStmt.executeUpdate(); if (attempt > 0) U.warn(log, "Entry was inserted in database on second try [table=" + em.fullTableName() + ", entry=" + entry + "]"); } catch (SQLException e) { String sqlState = e.getSQLState(); SQLException nested = e.getNextException(); while (sqlState == null && nested != null) { sqlState = nested.getSQLState(); nested = nested.getNextException(); } // The error with code 23505 or 23000 is thrown when trying to insert a row that // would violate a unique index or primary key. if ("23505".equals(sqlState) || "23000".equals(sqlState)) { if (we == null) we = new CacheWriterException("Failed insert entry in database, violate a unique" + " index or primary key [table=" + em.fullTableName() + ", entry=" + entry + "]"); we.addSuppressed(e); U.warn(log, "Failed insert entry in database, violate a unique index or primary key" + " [table=" + em.fullTableName() + ", entry=" + entry + "]"); continue; } throw new CacheWriterException("Failed insert entry in database [table=" + em.fullTableName() + ", entry=" + entry, e); } } if (attempt > 0) U.warn(log, "Entry was updated in database on second try [table=" + em.fullTableName() + ", entry=" + entry + "]"); return; } throw we; } catch (SQLException e) { throw new CacheWriterException("Failed update entry in database [table=" + em.fullTableName() + ", entry=" + entry + "]", e); } } /** {@inheritDoc} */ @Override public void write(Cache.Entry<? extends K, ? extends V> entry) throws CacheWriterException { assert entry != null; K key = entry.getKey(); EntryMapping em = entryMapping(session().cacheName(), keyTypeId(key), key); if (log.isDebugEnabled()) log.debug("Start write entry to database [table=" + em.fullTableName() + ", entry=" + entry + "]"); Connection conn = null; try { conn = connection(); if (dialect.hasMerge()) { PreparedStatement stmt = null; try { stmt = conn.prepareStatement(em.mergeQry); int i = fillKeyParameters(stmt, em, key); fillValueParameters(stmt, i, em, entry.getValue()); int updCnt = stmt.executeUpdate(); if (updCnt != 1) U.warn(log, "Unexpected number of updated entries [table=" + em.fullTableName() + ", entry=" + entry + "expected=1, actual=" + updCnt + "]"); } finally { U.closeQuiet(stmt); } } else { PreparedStatement insStmt = null; PreparedStatement updStmt = null; try { insStmt = conn.prepareStatement(em.insQry); updStmt = conn.prepareStatement(em.updQry); writeUpsert(insStmt, updStmt, em, entry); } finally { U.closeQuiet(insStmt); U.closeQuiet(updStmt); } } } catch (SQLException e) { throw new CacheWriterException("Failed to write entry to database [table=" + em.fullTableName() + ", entry=" + entry + "]", e); } finally { closeConnection(conn); } } /** {@inheritDoc} */ @Override public void writeAll(final Collection<Cache.Entry<? extends K, ? extends V>> entries) throws CacheWriterException { assert entries != null; Connection conn = null; try { conn = connection(); String cacheName = session().cacheName(); Object currKeyTypeId = null; if (dialect.hasMerge()) { PreparedStatement mergeStmt = null; try { EntryMapping em = null; LazyValue<Object[]> lazyEntries = new LazyValue<Object[]>() { @Override public Object[] create() { return entries.toArray(); } }; int fromIdx = 0, prepared = 0; for (Cache.Entry<? extends K, ? extends V> entry : entries) { K key = entry.getKey(); Object keyTypeId = keyTypeId(key); em = entryMapping(cacheName, keyTypeId, key); if (currKeyTypeId == null || !currKeyTypeId.equals(keyTypeId)) { if (mergeStmt != null) { if (log.isDebugEnabled()) log.debug("Write entries to db [cache name=" + cacheName + ", key type=" + em.keyType() + ", count=" + prepared + "]"); executeBatch(em, mergeStmt, "writeAll", fromIdx, prepared, lazyEntries); U.closeQuiet(mergeStmt); } mergeStmt = conn.prepareStatement(em.mergeQry); currKeyTypeId = keyTypeId; fromIdx += prepared; prepared = 0; } int i = fillKeyParameters(mergeStmt, em, key); fillValueParameters(mergeStmt, i, em, entry.getValue()); mergeStmt.addBatch(); if (++prepared % batchSz == 0) { if (log.isDebugEnabled()) log.debug("Write entries to db [cache name=" + cacheName + ", key type=" + em.keyType() + ", count=" + prepared + "]"); executeBatch(em, mergeStmt, "writeAll", fromIdx, prepared, lazyEntries); fromIdx += prepared; prepared = 0; } } if (mergeStmt != null && prepared % batchSz != 0) { if (log.isDebugEnabled()) log.debug("Write entries to db [cache name=" + cacheName + ", key type=" + em.keyType() + ", count=" + prepared + "]"); executeBatch(em, mergeStmt, "writeAll", fromIdx, prepared, lazyEntries); } } finally { U.closeQuiet(mergeStmt); } } else { log.debug("Write entries to db one by one using update and insert statements [cache name=" + cacheName + ", count=" + entries.size() + "]"); PreparedStatement insStmt = null; PreparedStatement updStmt = null; try { for (Cache.Entry<? extends K, ? extends V> entry : entries) { K key = entry.getKey(); Object keyTypeId = keyTypeId(key); EntryMapping em = entryMapping(cacheName, keyTypeId, key); if (currKeyTypeId == null || !currKeyTypeId.equals(keyTypeId)) { U.closeQuiet(insStmt); insStmt = conn.prepareStatement(em.insQry); U.closeQuiet(updStmt); updStmt = conn.prepareStatement(em.updQry); currKeyTypeId = keyTypeId; } writeUpsert(insStmt, updStmt, em, entry); } } finally { U.closeQuiet(insStmt); U.closeQuiet(updStmt); } } } catch (SQLException e) { throw new CacheWriterException("Failed to write entries in database", e); } finally { closeConnection(conn); } } /** {@inheritDoc} */ @Override public void delete(Object key) throws CacheWriterException { assert key != null; EntryMapping em = entryMapping(session().cacheName(), keyTypeId(key), key); if (log.isDebugEnabled()) log.debug("Remove value from db [table=" + em.fullTableName() + ", key=" + key + "]"); Connection conn = null; PreparedStatement stmt = null; try { conn = connection(); stmt = conn.prepareStatement(em.remQry); fillKeyParameters(stmt, em, key); int delCnt = stmt.executeUpdate(); if (delCnt != 1) U.warn(log, "Unexpected number of deleted entries [table=" + em.fullTableName() + ", key=" + key + ", expected=1, actual=" + delCnt + "]"); } catch (SQLException e) { throw new CacheWriterException("Failed to remove value from database [table=" + em.fullTableName() + ", key=" + key + "]", e); } finally { end(conn, stmt); } } /** * @param em Entry mapping. * @param stmt Statement. * @param desc Statement description for error message. * @param fromIdx Objects in batch start from index. * @param prepared Expected objects in batch. * @param lazyObjs All objects used in batch statement as array. * @throws SQLException If failed to execute batch statement. */ private void executeBatch(EntryMapping em, Statement stmt, String desc, int fromIdx, int prepared, LazyValue<Object[]> lazyObjs) throws SQLException { try { int[] rowCounts = stmt.executeBatch(); int numOfRowCnt = rowCounts.length; if (numOfRowCnt != prepared) U.warn(log, "Unexpected number of updated rows [table=" + em.fullTableName() + ", expected=" + prepared + ", actual=" + numOfRowCnt + "]"); for (int i = 0; i < numOfRowCnt; i++) { int cnt = rowCounts[i]; if (cnt != 1 && cnt != SUCCESS_NO_INFO) { Object[] objs = lazyObjs.value(); U.warn(log, "Batch " + desc + " returned unexpected updated row count [table=" + em.fullTableName() + ", entry=" + objs[fromIdx + i] + ", expected=1, actual=" + cnt + "]"); } } } catch (BatchUpdateException be) { int[] rowCounts = be.getUpdateCounts(); for (int i = 0; i < rowCounts.length; i++) { if (rowCounts[i] == EXECUTE_FAILED) { Object[] objs = lazyObjs.value(); U.warn(log, "Batch " + desc + " failed on execution [table=" + em.fullTableName() + ", entry=" + objs[fromIdx + i] + "]"); } } throw be; } } /** {@inheritDoc} */ @Override public void deleteAll(final Collection<?> keys) throws CacheWriterException { assert keys != null; Connection conn = null; try { conn = connection(); LazyValue<Object[]> lazyKeys = new LazyValue<Object[]>() { @Override public Object[] create() { return keys.toArray(); } }; String cacheName = session().cacheName(); Object currKeyTypeId = null; EntryMapping em = null; PreparedStatement delStmt = null; int fromIdx = 0, prepared = 0; for (Object key : keys) { Object keyTypeId = keyTypeId(key); em = entryMapping(cacheName, keyTypeId, key); if (delStmt == null) { delStmt = conn.prepareStatement(em.remQry); currKeyTypeId = keyTypeId; } if (!currKeyTypeId.equals(keyTypeId)) { if (log.isDebugEnabled()) log.debug("Delete entries from db [cache name=" + cacheName + ", key type=" + em.keyType() + ", count=" + prepared + "]"); executeBatch(em, delStmt, "deleteAll", fromIdx, prepared, lazyKeys); fromIdx += prepared; prepared = 0; currKeyTypeId = keyTypeId; } fillKeyParameters(delStmt, em, key); delStmt.addBatch(); if (++prepared % batchSz == 0) { if (log.isDebugEnabled()) log.debug("Delete entries from db [cache name=" + cacheName + ", key type=" + em.keyType() + ", count=" + prepared + "]"); executeBatch(em, delStmt, "deleteAll", fromIdx, prepared, lazyKeys); fromIdx += prepared; prepared = 0; } } if (delStmt != null && prepared % batchSz != 0) { if (log.isDebugEnabled()) log.debug("Delete entries from db [cache name=" + cacheName + ", key type=" + em.keyType() + ", count=" + prepared + "]"); executeBatch(em, delStmt, "deleteAll", fromIdx, prepared, lazyKeys); } } catch (SQLException e) { throw new CacheWriterException("Failed to remove values from database", e); } finally { closeConnection(conn); } } /** * Sets the value of the designated parameter using the given object. * * @param stmt Prepare statement. * @param i Index for parameters. * @param field Field descriptor. * @param fieldVal Field value. * @throws CacheException If failed to set statement parameter. */ protected void fillParameter(PreparedStatement stmt, int i, CacheTypeFieldMetadata field, @Nullable Object fieldVal) throws CacheException { try { if (fieldVal != null) { if (field.getJavaType() == UUID.class) { switch (field.getDatabaseType()) { case Types.BINARY: fieldVal = U.uuidToBytes((UUID)fieldVal); break; case Types.CHAR: case Types.VARCHAR: fieldVal = fieldVal.toString(); break; } } stmt.setObject(i, fieldVal); } else stmt.setNull(i, field.getDatabaseType()); } catch (SQLException e) { throw new CacheException("Failed to set statement parameter name: " + field.getDatabaseName(), e); } } /** * @param stmt Prepare statement. * @param idx Start index for parameters. * @param em Entry mapping. * @param key Key object. * @return Next index for parameters. * @throws CacheException If failed to set statement parameters. */ protected int fillKeyParameters(PreparedStatement stmt, int idx, EntryMapping em, Object key) throws CacheException { for (CacheTypeFieldMetadata field : em.keyColumns()) { Object fieldVal = extractParameter(em.cacheName, em.keyType(), field.getJavaName(), key); fillParameter(stmt, idx++, field, fieldVal); } return idx; } /** * @param stmt Prepare statement. * @param m Type mapping description. * @param key Key object. * @return Next index for parameters. * @throws CacheException If failed to set statement parameters. */ protected int fillKeyParameters(PreparedStatement stmt, EntryMapping m, Object key) throws CacheException { return fillKeyParameters(stmt, 1, m, key); } /** * @param stmt Prepare statement. * @param idx Start index for parameters. * @param em Type mapping description. * @param val Value object. * @return Next index for parameters. * @throws CacheException If failed to set statement parameters. */ protected int fillValueParameters(PreparedStatement stmt, int idx, EntryMapping em, Object val) throws CacheWriterException { for (CacheTypeFieldMetadata field : em.uniqValFields) { Object fieldVal = extractParameter(em.cacheName, em.valueType(), field.getJavaName(), val); fillParameter(stmt, idx++, field, fieldVal); } return idx; } /** * @return Data source. */ public DataSource getDataSource() { return dataSrc; } /** * @param dataSrc Data source. */ public void setDataSource(DataSource dataSrc) { this.dataSrc = dataSrc; } /** * Get database dialect. * * @return Database dialect. */ public JdbcDialect getDialect() { return dialect; } /** * Set database dialect. * * @param dialect Database dialect. */ public void setDialect(JdbcDialect dialect) { this.dialect = dialect; } /** * Get Max workers thread count. These threads are responsible for execute query. * * @return Max workers thread count. */ public int getMaximumPoolSize() { return maxPoolSz; } /** * Set Max workers thread count. These threads are responsible for execute query. * * @param maxPoolSz Max workers thread count. */ public void setMaximumPoolSize(int maxPoolSz) { this.maxPoolSz = maxPoolSz; } /** * Get maximum batch size for delete and delete operations. * * @return Maximum batch size. */ public int getBatchSize() { return batchSz; } /** * Set maximum batch size for write and delete operations. * * @param batchSz Maximum batch size. */ public void setBatchSize(int batchSz) { this.batchSz = batchSz; } /** * Parallel load cache minimum row count threshold. * * @return If {@code 0} then load sequentially. */ public int getParallelLoadCacheMinimumThreshold() { return parallelLoadCacheMinThreshold; } /** * Parallel load cache minimum row count threshold. * * @param parallelLoadCacheMinThreshold Minimum row count threshold. If {@code 0} then load sequentially. */ public void setParallelLoadCacheMinimumThreshold(int parallelLoadCacheMinThreshold) { this.parallelLoadCacheMinThreshold = parallelLoadCacheMinThreshold; } /** * @return Ignite instance. */ protected Ignite ignite() { return ignite; } /** * @return Store session. */ protected CacheStoreSession session() { return ses; } /** * Entry mapping description. */ protected static class EntryMapping { /** Cache name. */ private final String cacheName; /** Database dialect. */ private final JdbcDialect dialect; /** Select border for range queries. */ private final String loadCacheSelRangeQry; /** Select all items query. */ private final String loadCacheQry; /** Select item query. */ private final String loadQrySingle; /** Select items query. */ private final String loadQry; /** Merge item(s) query. */ private final String mergeQry; /** Update item query. */ private final String insQry; /** Update item query. */ private final String updQry; /** Remove item(s) query. */ private final String remQry; /** Max key count for load query per statement. */ private final int maxKeysPerStmt; /** Database key columns. */ private final Collection<String> keyCols; /** Database unique value columns. */ private final Collection<String> cols; /** Select query columns index. */ private final Map<String, Integer> loadColIdxs; /** Unique value fields. */ private final Collection<CacheTypeFieldMetadata> uniqValFields; /** Type metadata. */ private final CacheTypeMetadata typeMeta; /** Full table name. */ private final String fullTblName; /** * @param cacheName Cache name. * @param dialect JDBC dialect. * @param typeMeta Type metadata. */ public EntryMapping(@Nullable String cacheName, JdbcDialect dialect, CacheTypeMetadata typeMeta) { this.cacheName = cacheName; this.dialect = dialect; this.typeMeta = typeMeta; Collection<CacheTypeFieldMetadata> keyFields = typeMeta.getKeyFields(); Collection<CacheTypeFieldMetadata> valFields = typeMeta.getValueFields(); keyCols = databaseColumns(keyFields); uniqValFields = F.view(valFields, new IgnitePredicate<CacheTypeFieldMetadata>() { @Override public boolean apply(CacheTypeFieldMetadata col) { return !keyCols.contains(col.getDatabaseName()); } }); String schema = typeMeta.getDatabaseSchema(); String tblName = typeMeta.getDatabaseTable(); fullTblName = F.isEmpty(schema) ? tblName : schema + "." + tblName; Collection<String> uniqValCols = databaseColumns(uniqValFields); cols = F.concat(false, keyCols, uniqValCols); loadColIdxs = U.newHashMap(cols.size()); int idx = 1; for (String col : cols) loadColIdxs.put(col, idx++); loadCacheQry = dialect.loadCacheQuery(fullTblName, cols); loadCacheSelRangeQry = dialect.loadCacheSelectRangeQuery(fullTblName, keyCols); loadQrySingle = dialect.loadQuery(fullTblName, keyCols, cols, 1); maxKeysPerStmt = dialect.getMaxParameterCount() / keyCols.size(); loadQry = dialect.loadQuery(fullTblName, keyCols, cols, maxKeysPerStmt); insQry = dialect.insertQuery(fullTblName, keyCols, uniqValCols); updQry = dialect.updateQuery(fullTblName, keyCols, uniqValCols); mergeQry = dialect.mergeQuery(fullTblName, keyCols, uniqValCols); remQry = dialect.removeQuery(fullTblName, keyCols); } /** * Extract database column names from {@link CacheTypeFieldMetadata}. * * @param dsc collection of {@link CacheTypeFieldMetadata}. * @return Collection with database column names. */ private static Collection<String> databaseColumns(Collection<CacheTypeFieldMetadata> dsc) { return F.transform(dsc, new C1<CacheTypeFieldMetadata, String>() { /** {@inheritDoc} */ @Override public String apply(CacheTypeFieldMetadata col) { return col.getDatabaseName(); } }); } /** * Construct query for select values with key count less or equal {@code maxKeysPerStmt} * * @param keyCnt Key count. * @return Load query statement text. */ protected String loadQuery(int keyCnt) { assert keyCnt <= maxKeysPerStmt; if (keyCnt == maxKeysPerStmt) return loadQry; if (keyCnt == 1) return loadQrySingle; return dialect.loadQuery(fullTblName, keyCols, cols, keyCnt); } /** * Construct query for select values in range. * * @param appendLowerBound Need add lower bound for range. * @param appendUpperBound Need add upper bound for range. * @return Query with range. */ protected String loadCacheRangeQuery(boolean appendLowerBound, boolean appendUpperBound) { return dialect.loadCacheRangeQuery(fullTblName, keyCols, cols, appendLowerBound, appendUpperBound); } /** * @return Key type. */ protected String keyType() { return typeMeta.getKeyType(); } /** * @return Value type. */ protected String valueType() { return typeMeta.getValueType(); } /** * Gets key columns. * * @return Key columns. */ protected Collection<CacheTypeFieldMetadata> keyColumns() { return typeMeta.getKeyFields(); } /** * Gets value columns. * * @return Value columns. */ protected Collection<CacheTypeFieldMetadata> valueColumns() { return typeMeta.getValueFields(); } /** * Get full table name. * * @return &lt;schema&gt;.&lt;table name&gt */ protected String fullTableName() { return fullTblName; } } /** * Worker for load cache using custom user query. * * @param <K1> Key type. * @param <V1> Value type. */ private class LoadCacheCustomQueryWorker<K1, V1> implements Callable<Void> { /** Entry mapping description. */ private final EntryMapping em; /** User query. */ private final String qry; /** Closure for loaded values. */ private final IgniteBiInClosure<K1, V1> clo; /** * @param em Entry mapping description. * @param qry User query. * @param clo Closure for loaded values. */ private LoadCacheCustomQueryWorker(EntryMapping em, String qry, IgniteBiInClosure<K1, V1> clo) { this.em = em; this.qry = qry; this.clo = clo; } /** {@inheritDoc} */ @Override public Void call() throws Exception { if (log.isDebugEnabled()) log.debug("Load cache using custom query [cache name= " + em.cacheName + ", key type=" + em.keyType() + ", query=" + qry + "]"); Connection conn = null; PreparedStatement stmt = null; try { conn = openConnection(true); stmt = conn.prepareStatement(qry); ResultSet rs = stmt.executeQuery(); ResultSetMetaData meta = rs.getMetaData(); Map<String, Integer> colIdxs = U.newHashMap(meta.getColumnCount()); for (int i = 1; i <= meta.getColumnCount(); i++) colIdxs.put(meta.getColumnLabel(i), i); while (rs.next()) { K1 key = buildObject(em.cacheName, em.keyType(), em.keyColumns(), colIdxs, rs); V1 val = buildObject(em.cacheName, em.valueType(), em.valueColumns(), colIdxs, rs); clo.apply(key, val); } return null; } catch (SQLException e) { throw new CacheLoaderException("Failed to execute custom query for load cache", e); } finally { U.closeQuiet(stmt); U.closeQuiet(conn); } } } /** * Lazy initialization of value. * * @param <T> Cached object type */ private abstract static class LazyValue<T> { /** Cached value. */ private T val; /** * @return Construct value. */ protected abstract T create(); /** * @return Value. */ public T value() { if (val == null) val = create(); return val; } } /** * Worker for load by keys. * * @param <K1> Key type. * @param <V1> Value type. */ private class LoadWorker<K1, V1> implements Callable<Map<K1, V1>> { /** Connection. */ private final Connection conn; /** Keys for load. */ private final Collection<K1> keys; /** Entry mapping description. */ private final EntryMapping em; /** * @param conn Connection. * @param em Entry mapping description. */ private LoadWorker(Connection conn, EntryMapping em) { this.conn = conn; this.em = em; keys = new ArrayList<>(em.maxKeysPerStmt); } /** {@inheritDoc} */ @Override public Map<K1, V1> call() throws Exception { if (log.isDebugEnabled()) log.debug("Load values from db [table= " + em.fullTableName() + ", key count=" + keys.size() + "]"); PreparedStatement stmt = null; try { stmt = conn.prepareStatement(em.loadQuery(keys.size())); int idx = 1; for (Object key : keys) for (CacheTypeFieldMetadata field : em.keyColumns()) { Object fieldVal = extractParameter(em.cacheName, em.keyType(), field.getJavaName(), key); fillParameter(stmt, idx++, field, fieldVal); } ResultSet rs = stmt.executeQuery(); Map<K1, V1> entries = U.newHashMap(keys.size()); while (rs.next()) { K1 key = buildObject(em.cacheName, em.keyType(), em.keyColumns(), em.loadColIdxs, rs); V1 val = buildObject(em.cacheName, em.valueType(), em.valueColumns(), em.loadColIdxs, rs); entries.put(key, val); } return entries; } finally { U.closeQuiet(stmt); } } } }
Java
connection = ActiveRecord::Base.connection puts 'adding govt data' %w[states districts zip_codes districts_zip_codes legislators].each do |table| puts "loading #{table}" connection.execute(IO.read("db/seed_data/#{table}.sql")) end %w[states districts zip_codes legislators].each do |table| puts "updating table IDs for #{table}" result = connection.execute("SELECT id FROM #{table} ORDER BY id DESC LIMIT 1") connection.execute( "ALTER SEQUENCE #{table}_id_seq RESTART WITH #{result.first['id'].to_i + 1}" ) end
Java
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_111) on Wed Jan 04 22:31:31 EST 2017 --> <title>Uses of Class org.drip.sample.stochasticvolatility.HestonAMSTPayoffTransform</title> <meta name="date" content="2017-01-04"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.drip.sample.stochasticvolatility.HestonAMSTPayoffTransform"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/drip/sample/stochasticvolatility/HestonAMSTPayoffTransform.html" title="class in org.drip.sample.stochasticvolatility">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/drip/sample/stochasticvolatility/class-use/HestonAMSTPayoffTransform.html" target="_top">Frames</a></li> <li><a href="HestonAMSTPayoffTransform.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.drip.sample.stochasticvolatility.HestonAMSTPayoffTransform" class="title">Uses of Class<br>org.drip.sample.stochasticvolatility.HestonAMSTPayoffTransform</h2> </div> <div class="classUseContainer">No usage of org.drip.sample.stochasticvolatility.HestonAMSTPayoffTransform</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/drip/sample/stochasticvolatility/HestonAMSTPayoffTransform.html" title="class in org.drip.sample.stochasticvolatility">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/drip/sample/stochasticvolatility/class-use/HestonAMSTPayoffTransform.html" target="_top">Frames</a></li> <li><a href="HestonAMSTPayoffTransform.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
Java
<?php /** * Activity * * PHP version 5 * * @category Class * @package ultracart\v2 * @author Swagger Codegen team * @link https://github.com/swagger-api/swagger-codegen */ /** * UltraCart Rest API V2 * * UltraCart REST API Version 2 * * OpenAPI spec version: 2.0.0 * Contact: support@ultracart.com * Generated by: https://github.com/swagger-api/swagger-codegen.git * Swagger Codegen version: 2.4.15-SNAPSHOT */ /** * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen * Do not edit the class manually. */ namespace ultracart\v2\models; use \ArrayAccess; use \ultracart\v2\ObjectSerializer; /** * Activity Class Doc Comment * * @category Class * @package ultracart\v2 * @author Swagger Codegen team * @link https://github.com/swagger-api/swagger-codegen */ class Activity implements ModelInterface, ArrayAccess { const DISCRIMINATOR = null; /** * The original name of the model. * * @var string */ protected static $swaggerModelName = 'Activity'; /** * Array of property to type mappings. Used for (de)serialization * * @var string[] */ protected static $swaggerTypes = [ 'action' => 'string', 'metric' => 'string', 'subject' => 'string', 'ts' => 'int', 'type' => 'string', 'uuid' => 'string' ]; /** * Array of property to format mappings. Used for (de)serialization * * @var string[] */ protected static $swaggerFormats = [ 'action' => null, 'metric' => null, 'subject' => null, 'ts' => 'int64', 'type' => null, 'uuid' => null ]; /** * Array of property to type mappings. Used for (de)serialization * * @return array */ public static function swaggerTypes() { return self::$swaggerTypes; } /** * Array of property to format mappings. Used for (de)serialization * * @return array */ public static function swaggerFormats() { return self::$swaggerFormats; } /** * Array of attributes where the key is the local name, * and the value is the original name * * @var string[] */ protected static $attributeMap = [ 'action' => 'action', 'metric' => 'metric', 'subject' => 'subject', 'ts' => 'ts', 'type' => 'type', 'uuid' => 'uuid' ]; /** * Array of attributes to setter functions (for deserialization of responses) * * @var string[] */ protected static $setters = [ 'action' => 'setAction', 'metric' => 'setMetric', 'subject' => 'setSubject', 'ts' => 'setTs', 'type' => 'setType', 'uuid' => 'setUuid' ]; /** * Array of attributes to getter functions (for serialization of requests) * * @var string[] */ protected static $getters = [ 'action' => 'getAction', 'metric' => 'getMetric', 'subject' => 'getSubject', 'ts' => 'getTs', 'type' => 'getType', 'uuid' => 'getUuid' ]; /** * Array of attributes where the key is the local name, * and the value is the original name * * @return array */ public static function attributeMap() { return self::$attributeMap; } /** * Array of attributes to setter functions (for deserialization of responses) * * @return array */ public static function setters() { return self::$setters; } /** * Array of attributes to getter functions (for serialization of requests) * * @return array */ public static function getters() { return self::$getters; } /** * The original name of the model. * * @return string */ public function getModelName() { return self::$swaggerModelName; } /** * Associative array for storing property values * * @var mixed[] */ protected $container = []; /** * Constructor * * @param mixed[] $data Associated array of property values * initializing the model */ public function __construct(array $data = null) { $this->container['action'] = isset($data['action']) ? $data['action'] : null; $this->container['metric'] = isset($data['metric']) ? $data['metric'] : null; $this->container['subject'] = isset($data['subject']) ? $data['subject'] : null; $this->container['ts'] = isset($data['ts']) ? $data['ts'] : null; $this->container['type'] = isset($data['type']) ? $data['type'] : null; $this->container['uuid'] = isset($data['uuid']) ? $data['uuid'] : null; } /** * Show all the invalid properties with reasons. * * @return array invalid properties with reasons */ public function listInvalidProperties() { $invalidProperties = []; return $invalidProperties; } /** * Validate all the properties in the model * return true if all passed * * @return bool True if all properties are valid */ public function valid() { return count($this->listInvalidProperties()) === 0; } /** * Gets action * * @return string */ public function getAction() { return $this->container['action']; } /** * Sets action * * @param string $action action * * @return $this */ public function setAction($action) { $this->container['action'] = $action; return $this; } /** * Gets metric * * @return string */ public function getMetric() { return $this->container['metric']; } /** * Sets metric * * @param string $metric metric * * @return $this */ public function setMetric($metric) { $this->container['metric'] = $metric; return $this; } /** * Gets subject * * @return string */ public function getSubject() { return $this->container['subject']; } /** * Sets subject * * @param string $subject subject * * @return $this */ public function setSubject($subject) { $this->container['subject'] = $subject; return $this; } /** * Gets ts * * @return int */ public function getTs() { return $this->container['ts']; } /** * Sets ts * * @param int $ts ts * * @return $this */ public function setTs($ts) { $this->container['ts'] = $ts; return $this; } /** * Gets type * * @return string */ public function getType() { return $this->container['type']; } /** * Sets type * * @param string $type type * * @return $this */ public function setType($type) { $this->container['type'] = $type; return $this; } /** * Gets uuid * * @return string */ public function getUuid() { return $this->container['uuid']; } /** * Sets uuid * * @param string $uuid uuid * * @return $this */ public function setUuid($uuid) { $this->container['uuid'] = $uuid; return $this; } /** * Returns true if offset exists. False otherwise. * * @param integer $offset Offset * * @return boolean */ public function offsetExists($offset) { return isset($this->container[$offset]); } /** * Gets offset. * * @param integer $offset Offset * * @return mixed */ public function offsetGet($offset) { return isset($this->container[$offset]) ? $this->container[$offset] : null; } /** * Sets value based on offset. * * @param integer $offset Offset * @param mixed $value Value to be set * * @return void */ public function offsetSet($offset, $value) { if (is_null($offset)) { $this->container[] = $value; } else { $this->container[$offset] = $value; } } /** * Unsets offset. * * @param integer $offset Offset * * @return void */ public function offsetUnset($offset) { unset($this->container[$offset]); } /** * Gets the string presentation of the object * * @return string */ public function __toString() { if (defined('JSON_PRETTY_PRINT')) { // use JSON pretty print return json_encode( ObjectSerializer::sanitizeForSerialization($this), JSON_PRETTY_PRINT ); } return json_encode(ObjectSerializer::sanitizeForSerialization($this)); } }
Java
# Evelyna oligantha Poepp. & Endl. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
Java
using System; using System.Linq.Expressions; namespace storagecore.EntityFrameworkCore.Query { public class Filter<TEntity> { public Filter(Expression<Func<TEntity, bool>> expression) { Expression = expression; } public Expression<Func<TEntity, bool>> Expression { get; private set; } public void AddExpression(Expression<Func<TEntity, bool>> newExpression) { if (newExpression == null) throw new ArgumentNullException(nameof(newExpression), $"{nameof(newExpression)} is null."); if (Expression == null) Expression = newExpression; var parameter = System.Linq.Expressions.Expression.Parameter(typeof(TEntity)); var leftVisitor = new ReplaceExpressionVisitor(newExpression.Parameters[0], parameter); var left = leftVisitor.Visit(newExpression.Body); var rightVisitor = new ReplaceExpressionVisitor(Expression.Parameters[0], parameter); var right = rightVisitor.Visit(Expression.Body); Expression = System.Linq.Expressions.Expression.Lambda<Func<TEntity, bool>>(System.Linq.Expressions.Expression.AndAlso(left, right), parameter); } } }
Java
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} {-# OPTIONS_GHC -Wall #-} -- TODO: Complex Numbers {-| Embeds Fortran's type system in Haskell via the 'D' GADT. == Note: Phantom Types and GADTs Lots of the data types in this module are parameterised by phantom types. These are types which appear at the type-level, but not at the value level. They are there to make things more type-safe. In addition, a lot of the data types are GADTs. In a phantom-type-indexed GADT, the phantom type often restricts which GADT constructors a particular value may be an instance of. This is very useful for restricting value-level terms based on type-level information. -} module Language.Fortran.Model.Types where import Data.Int (Int16, Int32, Int64, Int8) import Data.List (intersperse) import Data.Monoid (Endo (..)) import Data.Typeable (Typeable) import Data.Word (Word8) import Data.Singletons.TypeLits import Data.Vinyl hiding (Field) import Data.Vinyl.Functor import Language.Expression.Pretty import Language.Fortran.Model.Singletons -------------------------------------------------------------------------------- -- * Fortran Types {-| This is the main embedding of Fortran types. A value of type @D a@ represents the Fortran type which corresponds to the Haskell type @a@. @a@ is a phantom type parameter. There is at most one instance of @D a@ for each @a@. This means that a value of type @D a@ acts as a kind of proof that it possible to have a Fortran type corresponding to the Haskell type @a@ -- and that when you match on @D a@ knowing the particular @a@ you have, you know which constructor you will get. This is a nice property because it means that GHC (with @-fwarn-incomplete-patterns@) will not warn when you match on an impossible case. It eliminates situations where you'd otherwise write @error "impossible: ..."@. * @'DPrim' p :: D ('PrimS' a)@ is for primitive types. It contains a value @p@ of type @'Prim' p k a@ for some @p@, @k@, @a@. When matching on something of type @D ('PrimS' a)@, you know it can only contain a primitive type. * @'DArray' i v :: D ('Array' i v)@ is for arrays. It contains instances of @'Index' i@ and @'ArrValue' a@. @'Index' i@ is a proof that @i@ can be used as an index, and @'ArrValue' a@ is a proof that @a@ can be stored in arrays. * @'DData' s xs :: D ('Record' name fs)@ is for user-defined data types. The type has a name, represented at the type level by the type parameter @name@ of kind 'Symbol'. The constructor contains @s :: 'SSymbol' name@, which acts as a sort of value-level representation of the name. 'SSymbol' is from the @singletons@ library. It also contains @xs :: 'Rec' ('Field' D) fs@. @fs@ is a type-level list of pairs, pairing field names with field types. @'Field' D '(fname, b)@ is a value-level pair of @'SSymbol' fname@ and @D b@. The vinyl record is a list of fields, one for each pair in @fs@. -} data D a where DPrim :: Prim p k a -> D (PrimS a) DArray :: Index i -> ArrValue a -> D (Array i a) DData :: SSymbol name -> Rec (Field D) fs -> D (Record name fs) -------------------------------------------------------------------------------- -- * Semantic Types newtype Bool8 = Bool8 { getBool8 :: Int8 } deriving (Show, Num, Eq, Typeable) newtype Bool16 = Bool16 { getBool16 :: Int16 } deriving (Show, Num, Eq, Typeable) newtype Bool32 = Bool32 { getBool32 :: Int32 } deriving (Show, Num, Eq, Typeable) newtype Bool64 = Bool64 { getBool64 :: Int64 } deriving (Show, Num, Eq, Typeable) newtype Char8 = Char8 { getChar8 :: Word8 } deriving (Show, Num, Eq, Typeable) {-| This newtype wrapper is used in 'DPrim' for semantic primitive types. This means that when matching on something of type @'D' ('PrimS' a)@, we know it can't be an array or a record. -} newtype PrimS a = PrimS { getPrimS :: a } deriving (Show, Eq, Typeable) -------------------------------------------------------------------------------- -- * Primitive Types {-| Lists the allowed primitive Fortran types. For example, @'PInt8' :: 'Prim' 'P8 ''BTInt' 'Int8'@ represents 8-bit integers. 'Prim' has three phantom type parameters: precision, base type and semantic Haskell type. Precision is the number of bits used to store values of that type. The base type represents the corresponding Fortran base type, e.g. @integer@ or @real@. Constructors are only provided for those Fortran types which are semantically valid, so for example no constructor is provided for a 16-bit real. A value of type @'Prim' p k a@ can be seen as a proof that there is some Fortran primitive type with those parameters. -} data Prim p k a where PInt8 :: Prim 'P8 'BTInt Int8 PInt16 :: Prim 'P16 'BTInt Int16 PInt32 :: Prim 'P32 'BTInt Int32 PInt64 :: Prim 'P64 'BTInt Int64 PBool8 :: Prim 'P8 'BTLogical Bool8 PBool16 :: Prim 'P16 'BTLogical Bool16 PBool32 :: Prim 'P32 'BTLogical Bool32 PBool64 :: Prim 'P64 'BTLogical Bool64 PFloat :: Prim 'P32 'BTReal Float PDouble :: Prim 'P64 'BTReal Double PChar :: Prim 'P8 'BTChar Char8 -------------------------------------------------------------------------------- -- * Arrays -- | Specifies which types can be used as array indices. data Index a where Index :: Prim p 'BTInt a -> Index (PrimS a) -- | Specifies which types can be stored in arrays. Currently arrays of arrays -- are not supported. data ArrValue a where ArrPrim :: Prim p k a -> ArrValue (PrimS a) ArrData :: SSymbol name -> Rec (Field ArrValue) fs -> ArrValue (Record name fs) -- | An array with a phantom index type. Mostly used at the type-level to -- constrain instances of @'D' (Array i a)@ etc. newtype Array i a = Array [a] -------------------------------------------------------------------------------- -- * Records -- | A field over a pair of name and value type. data Field f field where Field :: SSymbol name -> f a -> Field f '(name, a) -- | A type of records with the given @name@ and @fields@. Mostly used at the -- type level to constrain instances of @'D' (Record name fields)@ etc. data Record name fields where Record :: SSymbol name -> Rec (Field Identity) fields -> Record name fields -------------------------------------------------------------------------------- -- * Combinators -- | Any Fortran index type is a valid Fortran type. dIndex :: Index i -> D i dIndex (Index p) = DPrim p -- | Anything that can be stored in Fortran arrays is a valid Fortran type. dArrValue :: ArrValue a -> D a dArrValue (ArrPrim p) = DPrim p dArrValue (ArrData nameSym fieldArrValues) = DData nameSym (rmap (overField' dArrValue) fieldArrValues) -- | Given a field with known contents, we can change the functor and value -- type. overField :: (f a -> g b) -> Field f '(name, a) -> Field g '(name, b) overField f (Field n x) = Field n (f x) -- | Given a field with unknown contents, we can change the functor but not the -- value type. overField' :: (forall a. f a -> g a) -> Field f nv -> Field g nv overField' f (Field n x) = Field n (f x) traverseField' :: (Functor t) => (forall a. f a -> t (g a)) -> Field f nv -> t (Field g nv) traverseField' f (Field n x) = Field n <$> f x -- | Combine two fields over the same name-value pair but (potentially) -- different functors. zipFieldsWith :: (forall a. f a -> g a -> h a) -> Field f nv -> Field g nv -> Field h nv zipFieldsWith f (Field _ x) (Field n y) = Field n (f x y) zip3FieldsWith :: (forall a. f a -> g a -> h a -> i a) -> Field f nv -> Field g nv -> Field h nv -> Field i nv zip3FieldsWith f (Field _ x) (Field _ y) (Field n z) = Field n (f x y z) -------------------------------------------------------------------------------- -- Pretty Printing instance Pretty1 (Prim p k) where prettys1Prec p = \case PInt8 -> showString "integer8" PInt16 -> showString "integer16" PInt32 -> showString "integer32" PInt64 -> showString "integer64" PFloat -> showString "real" PDouble -> showParen (p > 8) $ showString "double precision" PBool8 -> showString "logical8" PBool16 -> showString "logical16" PBool32 -> showString "logical32" PBool64 -> showString "logical64" PChar -> showString "character" instance Pretty1 ArrValue where prettys1Prec p = prettys1Prec p . dArrValue instance (Pretty1 f) => Pretty1 (Field f) where prettys1Prec _ = \case Field fname x -> prettys1Prec 0 x . showString " " . withKnownSymbol fname (showString (symbolVal fname)) -- | e.g. "type custom_type { character a, integer array b }" instance Pretty1 D where prettys1Prec p = \case DPrim px -> prettys1Prec p px DArray _ pv -> prettys1Prec p pv . showString " array" DData rname fields -> showParen (p > 8) $ showString "type " . withKnownSymbol rname (showString (symbolVal rname)) . showString "{ " . appEndo ( mconcat . intersperse (Endo $ showString ", ") . recordToList . rmap (Const . Endo . prettys1Prec 0) $ fields) . showString " }"
Java