target
stringlengths 20
113k
| src_fm
stringlengths 11
86.3k
| src_fm_fc
stringlengths 21
86.4k
| src_fm_fc_co
stringlengths 30
86.4k
| src_fm_fc_ms
stringlengths 42
86.8k
| src_fm_fc_ms_ff
stringlengths 43
86.8k
|
|---|---|---|---|---|---|
@Test public void testGetOutputResourceFields() throws Exception { XMLField[] outputFields = new XMLField[2]; XMLField field1 = mock( XMLField.class ); XMLField field2 = mock( XMLField.class ); outputFields[0] = field1; outputFields[1] = field2; when( field1.getFieldName() ).thenReturn( "field1" ); when( field2.getFieldName() ).thenReturn( "field2" ); when( meta.getOutputFields() ).thenReturn( outputFields ); Set<String> outputResourceFields = analyzer.getOutputResourceFields( meta ); assertEquals( outputFields.length, outputResourceFields.size() ); for ( XMLField outputField : outputFields ) { assertTrue( outputResourceFields.contains( outputField.getFieldName() ) ); } }
|
@Override public Set<String> getOutputResourceFields( XMLOutputMeta meta ) { Set<String> fields = new HashSet<>(); XMLField[] outputFields = meta.getOutputFields(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField outputField = outputFields[ i ]; fields.add( outputField.getFieldName() ); } return fields; }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public Set<String> getOutputResourceFields( XMLOutputMeta meta ) { Set<String> fields = new HashSet<>(); XMLField[] outputFields = meta.getOutputFields(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField outputField = outputFields[ i ]; fields.add( outputField.getFieldName() ); } return fields; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public Set<String> getOutputResourceFields( XMLOutputMeta meta ) { Set<String> fields = new HashSet<>(); XMLField[] outputFields = meta.getOutputFields(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField outputField = outputFields[ i ]; fields.add( outputField.getFieldName() ); } return fields; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public Set<String> getOutputResourceFields( XMLOutputMeta meta ) { Set<String> fields = new HashSet<>(); XMLField[] outputFields = meta.getOutputFields(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField outputField = outputFields[ i ]; fields.add( outputField.getFieldName() ); } return fields; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public Set<String> getOutputResourceFields( XMLOutputMeta meta ) { Set<String> fields = new HashSet<>(); XMLField[] outputFields = meta.getOutputFields(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField outputField = outputFields[ i ]; fields.add( outputField.getFieldName() ); } return fields; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
@Test public void testCreateResourceNode() throws Exception { IExternalResourceInfo res = mock( IExternalResourceInfo.class ); when( res.getName() ).thenReturn( "file: IMetaverseNode resourceNode = analyzer.createResourceNode( res ); assertNotNull( resourceNode ); assertEquals( DictionaryConst.NODE_TYPE_FILE, resourceNode.getType() ); }
|
@Override public IMetaverseNode createResourceNode( IExternalResourceInfo resource ) throws MetaverseException { return createFileNode( resource.getName(), getDescriptor() ); }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public IMetaverseNode createResourceNode( IExternalResourceInfo resource ) throws MetaverseException { return createFileNode( resource.getName(), getDescriptor() ); } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public IMetaverseNode createResourceNode( IExternalResourceInfo resource ) throws MetaverseException { return createFileNode( resource.getName(), getDescriptor() ); } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public IMetaverseNode createResourceNode( IExternalResourceInfo resource ) throws MetaverseException { return createFileNode( resource.getName(), getDescriptor() ); } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public IMetaverseNode createResourceNode( IExternalResourceInfo resource ) throws MetaverseException { return createFileNode( resource.getName(), getDescriptor() ); } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
@Test public void testGetResourceInputNodeType() throws Exception { assertNull( analyzer.getResourceInputNodeType() ); }
|
@Override public String getResourceInputNodeType() { return null; }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public String getResourceInputNodeType() { return null; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public String getResourceInputNodeType() { return null; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public String getResourceInputNodeType() { return null; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public String getResourceInputNodeType() { return null; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
@Test public void testGetResourceOutputNodeType() throws Exception { assertEquals( DictionaryConst.NODE_TYPE_FILE_FIELD, analyzer.getResourceOutputNodeType() ); }
|
@Override public String getResourceOutputNodeType() { return DictionaryConst.NODE_TYPE_FILE_FIELD; }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public String getResourceOutputNodeType() { return DictionaryConst.NODE_TYPE_FILE_FIELD; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public String getResourceOutputNodeType() { return DictionaryConst.NODE_TYPE_FILE_FIELD; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public String getResourceOutputNodeType() { return DictionaryConst.NODE_TYPE_FILE_FIELD; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public String getResourceOutputNodeType() { return DictionaryConst.NODE_TYPE_FILE_FIELD; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
@Test public void testIsOutput() throws Exception { assertTrue( analyzer.isOutput() ); }
|
@Override public boolean isOutput() { return true; }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public boolean isOutput() { return true; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public boolean isOutput() { return true; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public boolean isOutput() { return true; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public boolean isOutput() { return true; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
@Test public void testInsertJobEntryDatabase() throws KettleException { doReturn( getNullIntegerRow() ).when( repo.connectionDelegate ).getOneRow( anyString(), anyString(), any( ObjectId.class ) ); ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); repo.insertJobEntryDatabase( new LongObjectId( 234 ), new LongObjectId( 345 ), new LongObjectId( 456 ) ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 234 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 345 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 456 ), insertRecord.getInteger( 2 ) ); }
|
public synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ) throws KettleException { RowMetaAndData check = getJobEntryDatabase( id_jobentry ); if ( check.getInteger( 0 ) == null ) { RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB ), id_job ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY ), id_jobentry ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE ), id_database ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE, table ); } }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ) throws KettleException { RowMetaAndData check = getJobEntryDatabase( id_jobentry ); if ( check.getInteger( 0 ) == null ) { RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB ), id_job ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY ), id_jobentry ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE ), id_database ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE, table ); } } }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ) throws KettleException { RowMetaAndData check = getJobEntryDatabase( id_jobentry ); if ( check.getInteger( 0 ) == null ) { RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB ), id_job ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY ), id_jobentry ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE ), id_database ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE, table ); } } KettleDatabaseRepository(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ) throws KettleException { RowMetaAndData check = getJobEntryDatabase( id_jobentry ); if ( check.getInteger( 0 ) == null ) { RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB ), id_job ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY ), id_jobentry ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE ), id_database ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE, table ); } } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ) throws KettleException { RowMetaAndData check = getJobEntryDatabase( id_jobentry ); if ( check.getInteger( 0 ) == null ) { RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB ), id_job ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY ), id_jobentry ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE ), id_database ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE, table ); } } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); public KettleDatabaseRepositoryTransDelegate transDelegate; public KettleDatabaseRepositoryJobDelegate jobDelegate; public KettleDatabaseRepositoryDatabaseDelegate databaseDelegate; public KettleDatabaseRepositorySlaveServerDelegate slaveServerDelegate; public KettleDatabaseRepositoryClusterSchemaDelegate clusterSchemaDelegate; public KettleDatabaseRepositoryPartitionSchemaDelegate partitionSchemaDelegate; public KettleDatabaseRepositoryDirectoryDelegate directoryDelegate; public KettleDatabaseRepositoryConnectionDelegate connectionDelegate; public KettleDatabaseRepositoryUserDelegate userDelegate; public KettleDatabaseRepositoryConditionDelegate conditionDelegate; public KettleDatabaseRepositoryValueDelegate valueDelegate; public KettleDatabaseRepositoryNotePadDelegate notePadDelegate; public KettleDatabaseRepositoryStepDelegate stepDelegate; public KettleDatabaseRepositoryJobEntryDelegate jobEntryDelegate; public KettleDatabaseRepositoryMetaStoreDelegate metaStoreDelegate; public KettleDatabaseRepositoryMetaStore metaStore; }
|
@Test public void testIsInput() throws Exception { assertFalse( analyzer.isInput() ); }
|
@Override public boolean isInput() { return false; }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public boolean isInput() { return false; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public boolean isInput() { return false; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public boolean isInput() { return false; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override public boolean isInput() { return false; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
@Test public void testGetUsedFields() throws Exception { assertNull( analyzer.getUsedFields( meta ) ); }
|
@Override protected Set<StepField> getUsedFields( XMLOutputMeta meta ) { return null; }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override protected Set<StepField> getUsedFields( XMLOutputMeta meta ) { return null; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override protected Set<StepField> getUsedFields( XMLOutputMeta meta ) { return null; } }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override protected Set<StepField> getUsedFields( XMLOutputMeta meta ) { return null; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
XMLOutputStepAnalyzer extends ExternalResourceStepAnalyzer<XMLOutputMeta> { @Override protected Set<StepField> getUsedFields( XMLOutputMeta meta ) { return null; } @Override Set<Class<? extends BaseStepMeta>> getSupportedSteps(); @Override IMetaverseNode createResourceNode( IExternalResourceInfo resource ); @Override String getResourceInputNodeType(); @Override String getResourceOutputNodeType(); @Override boolean isOutput(); @Override boolean isInput(); @Override Set<String> getOutputResourceFields( XMLOutputMeta meta ); }
|
@Test public void testLoadAndGetXml() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); Node stepnode = getTestNode(); DatabaseMeta dbMeta = mock( DatabaseMeta.class ); IMetaStore metaStore = mock( IMetaStore.class ); xmlOutputMeta.loadXML( stepnode, Collections.singletonList( dbMeta ), metaStore ); assertXmlOutputMeta( xmlOutputMeta ); }
|
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@SuppressWarnings( "ConstantConditions" ) @Test public void testReadRep() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); Repository rep = mock( Repository.class ); IMetaStore metastore = mock( IMetaStore.class ); DatabaseMeta dbMeta = mock( DatabaseMeta.class ); String encoding = "UTF-8"; String namespace = ""; String mainElement = "rows"; String repeatElement = "row"; String fileName = "repFileName"; StringObjectId oid = new StringObjectId( "oid" ); String fileExtension = "repxml"; boolean servletOutput = true; boolean newFile = true; long split = 100L; boolean addStepNbr = false; boolean addDate = false; boolean addTime = true; boolean specifyFormat = true; boolean omitNull = false; String dateTimeFormat = "yyyyMMdd"; boolean addToResult = true; boolean zipped = true; String contentType = "Element"; String fieldName = "aField"; String fieldElement = "field"; String fieldType = "String"; long fieldLength = 20L; long fieldPrecision = 0L; when( rep.getStepAttributeString( oid, "encoding" ) ).thenReturn( encoding ); when( rep.getStepAttributeString( oid, "name_space" ) ).thenReturn( namespace ); when( rep.getStepAttributeString( oid, "xml_main_element" ) ).thenReturn( mainElement ); when( rep.getStepAttributeString( oid, "xml_repeat_element" ) ).thenReturn( repeatElement ); when( rep.getStepAttributeString( oid, "file_name" ) ).thenReturn( fileName ); when( rep.getStepAttributeString( oid, "file_extention" ) ).thenReturn( fileExtension ); when( rep.getStepAttributeBoolean( oid, "file_servlet_output" ) ).thenReturn( servletOutput ); when( rep.getStepAttributeBoolean( oid, "do_not_open_newfile_init" ) ).thenReturn( newFile ); when( rep.getStepAttributeInteger( oid, "file_split" ) ).thenReturn( split ); when( rep.getStepAttributeBoolean( oid, "file_add_stepnr" ) ).thenReturn( addStepNbr ); when( rep.getStepAttributeBoolean( oid, "file_add_date" ) ).thenReturn( addDate ); when( rep.getStepAttributeBoolean( oid, "file_add_time" ) ).thenReturn( addTime ); when( rep.getStepAttributeBoolean( oid, "SpecifyFormat" ) ).thenReturn( specifyFormat ); when( rep.getStepAttributeBoolean( oid, "omit_null_values" ) ).thenReturn( omitNull ); when( rep.getStepAttributeString( oid, "date_time_format" ) ).thenReturn( dateTimeFormat ); when( rep.getStepAttributeBoolean( oid, "add_to_result_filenames" ) ).thenReturn( addToResult ); when( rep.getStepAttributeBoolean( oid, "file_zipped" ) ).thenReturn( zipped ); when( rep.countNrStepAttributes( oid, "field_name" ) ).thenReturn( 1 ); when( rep.getStepAttributeString( oid, 0, "field_content_type" ) ).thenReturn( contentType ); when( rep.getStepAttributeString( oid, 0, "field_name" ) ).thenReturn( fieldName ); when( rep.getStepAttributeString( oid, 0, "field_element" ) ).thenReturn( fieldElement ); when( rep.getStepAttributeString( oid, 0, "field_type" ) ).thenReturn( fieldType ); when( rep.getStepAttributeString( oid, 0, "field_format" ) ).thenReturn( null ); when( rep.getStepAttributeString( oid, 0, "field_currency" ) ).thenReturn( null ); when( rep.getStepAttributeString( oid, 0, "field_decimal" ) ).thenReturn( null ); when( rep.getStepAttributeString( oid, 0, "field_group" ) ).thenReturn( null ); when( rep.getStepAttributeString( oid, 0, "field_nullif" ) ).thenReturn( null ); when( rep.getStepAttributeInteger( oid, 0, "field_length" ) ).thenReturn( fieldLength ); when( rep.getStepAttributeInteger( oid, 0, "field_precision" ) ).thenReturn( fieldPrecision ); xmlOutputMeta.readRep( rep, metastore, oid, Collections.singletonList( dbMeta ) ); assertEquals( fileName, xmlOutputMeta.getFileName() ); assertTrue( xmlOutputMeta.isDoNotOpenNewFileInit() ); assertTrue( xmlOutputMeta.isServletOutput() ); assertEquals( fileExtension, xmlOutputMeta.getExtension() ); assertFalse( xmlOutputMeta.isStepNrInFilename() ); assertFalse( xmlOutputMeta.isDateInFilename() ); assertTrue( xmlOutputMeta.isTimeInFilename() ); assertTrue( xmlOutputMeta.isSpecifyFormat() ); assertEquals( dateTimeFormat, xmlOutputMeta.getDateTimeFormat() ); assertTrue( xmlOutputMeta.isAddToResultFiles() ); assertTrue( xmlOutputMeta.isZipped() ); assertEquals( encoding, xmlOutputMeta.getEncoding() ); assertTrue( StringUtil.isEmpty( xmlOutputMeta.getNameSpace() ) ); assertEquals( mainElement, xmlOutputMeta.getMainElement() ); assertEquals( repeatElement, xmlOutputMeta.getRepeatElement() ); assertEquals( split, xmlOutputMeta.getSplitEvery() ); assertFalse( xmlOutputMeta.isOmitNullValues() ); XMLField[] outputFields = xmlOutputMeta.getOutputFields(); assertEquals( 1, outputFields.length ); assertEquals( fieldName, outputFields[0].getFieldName() ); assertEquals( XMLField.ContentType.Element, outputFields[0].getContentType() ); assertEquals( fieldElement, outputFields[0].getElementName() ); assertEquals( fieldLength, outputFields[0].getLength() ); assertEquals( fieldPrecision, outputFields[0].getPrecision() ); Mockito.reset( rep, metastore ); StringObjectId transid = new StringObjectId( "transid" ); xmlOutputMeta.saveRep( rep, metastore, transid, oid ); verify( rep ).saveStepAttribute( transid, oid, "encoding", encoding ); verify( rep ).saveStepAttribute( transid, oid, "name_space", namespace ); verify( rep ).saveStepAttribute( transid, oid, "xml_main_element", mainElement ); verify( rep ).saveStepAttribute( transid, oid, "xml_repeat_element", repeatElement ); verify( rep ).saveStepAttribute( transid, oid, "file_name", fileName ); verify( rep ).saveStepAttribute( transid, oid, "file_extention", fileExtension ); verify( rep ).saveStepAttribute( transid, oid, "file_servlet_output", servletOutput ); verify( rep ).saveStepAttribute( transid, oid, "do_not_open_newfile_init", newFile ); verify( rep ).saveStepAttribute( transid, oid, "file_split", split ); verify( rep ).saveStepAttribute( transid, oid, "file_add_stepnr", addStepNbr ); verify( rep ).saveStepAttribute( transid, oid, "file_add_date", addDate ); verify( rep ).saveStepAttribute( transid, oid, "file_add_time", addTime ); verify( rep ).saveStepAttribute( transid, oid, "SpecifyFormat", specifyFormat ); verify( rep ).saveStepAttribute( transid, oid, "omit_null_values", omitNull ); verify( rep ).saveStepAttribute( transid, oid, "date_time_format", dateTimeFormat ); verify( rep ).saveStepAttribute( transid, oid, "add_to_result_filenames", addToResult ); verify( rep ).saveStepAttribute( transid, oid, "file_zipped", zipped ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_content_type", contentType ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_name", fieldName ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_element", fieldElement ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_type", fieldType ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_format", null ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_currency", null ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_decimal", null ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_group", null ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_nullif", null ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_length", fieldLength ); verify( rep ).saveStepAttribute( transid, oid, 0, "field_precision", fieldPrecision ); Mockito.verifyNoMoreInteractions( rep, metastore ); }
|
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void testGetNewline() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); assertEquals( "\r\n", xmlOutputMeta.getNewLine( "DOS" ) ); assertEquals( "\n", xmlOutputMeta.getNewLine( "UNIX" ) ); assertEquals( System.getProperty( "line.separator" ), xmlOutputMeta.getNewLine( null ) ); }
|
public String getNewLine( String fformat ) { String nl = System.getProperty( "line.separator" ); if ( fformat != null ) { if ( fformat.equalsIgnoreCase( "DOS" ) ) { nl = "\r\n"; } else if ( fformat.equalsIgnoreCase( "UNIX" ) ) { nl = "\n"; } } return nl; }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String getNewLine( String fformat ) { String nl = System.getProperty( "line.separator" ); if ( fformat != null ) { if ( fformat.equalsIgnoreCase( "DOS" ) ) { nl = "\r\n"; } else if ( fformat.equalsIgnoreCase( "UNIX" ) ) { nl = "\n"; } } return nl; } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String getNewLine( String fformat ) { String nl = System.getProperty( "line.separator" ); if ( fformat != null ) { if ( fformat.equalsIgnoreCase( "DOS" ) ) { nl = "\r\n"; } else if ( fformat.equalsIgnoreCase( "UNIX" ) ) { nl = "\n"; } } return nl; } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String getNewLine( String fformat ) { String nl = System.getProperty( "line.separator" ); if ( fformat != null ) { if ( fformat.equalsIgnoreCase( "DOS" ) ) { nl = "\r\n"; } else if ( fformat.equalsIgnoreCase( "UNIX" ) ) { nl = "\n"; } } return nl; } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String getNewLine( String fformat ) { String nl = System.getProperty( "line.separator" ); if ( fformat != null ) { if ( fformat.equalsIgnoreCase( "DOS" ) ) { nl = "\r\n"; } else if ( fformat.equalsIgnoreCase( "UNIX" ) ) { nl = "\n"; } } return nl; } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void testClone() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); Node stepnode = getTestNode(); DatabaseMeta dbMeta = mock( DatabaseMeta.class ); IMetaStore metaStore = mock( IMetaStore.class ); xmlOutputMeta.loadXML( stepnode, Collections.singletonList( dbMeta ), metaStore ); XMLOutputMeta cloned = (XMLOutputMeta) xmlOutputMeta.clone(); assertNotSame( cloned, xmlOutputMeta ); assertXmlOutputMeta( cloned ); }
|
public Object clone() { XMLOutputMeta retval = (XMLOutputMeta) super.clone(); int nrfields = outputFields.length; retval.allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.outputFields[i] = (XMLField) outputFields[i].clone(); } return retval; }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public Object clone() { XMLOutputMeta retval = (XMLOutputMeta) super.clone(); int nrfields = outputFields.length; retval.allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.outputFields[i] = (XMLField) outputFields[i].clone(); } return retval; } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public Object clone() { XMLOutputMeta retval = (XMLOutputMeta) super.clone(); int nrfields = outputFields.length; retval.allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.outputFields[i] = (XMLField) outputFields[i].clone(); } return retval; } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public Object clone() { XMLOutputMeta retval = (XMLOutputMeta) super.clone(); int nrfields = outputFields.length; retval.allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.outputFields[i] = (XMLField) outputFields[i].clone(); } return retval; } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public Object clone() { XMLOutputMeta retval = (XMLOutputMeta) super.clone(); int nrfields = outputFields.length; retval.allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.outputFields[i] = (XMLField) outputFields[i].clone(); } return retval; } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void testSetDefault() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); xmlOutputMeta.setDefault(); assertEquals( "file", xmlOutputMeta.getFileName() ); assertEquals( "xml", xmlOutputMeta.getExtension() ); assertFalse( xmlOutputMeta.isStepNrInFilename() ); assertFalse( xmlOutputMeta.isDoNotOpenNewFileInit() ); assertFalse( xmlOutputMeta.isDateInFilename() ); assertFalse( xmlOutputMeta.isTimeInFilename() ); assertFalse( xmlOutputMeta.isAddToResultFiles() ); assertFalse( xmlOutputMeta.isZipped() ); assertEquals( 0, xmlOutputMeta.getSplitEvery() ); assertEquals( Const.XML_ENCODING, xmlOutputMeta.getEncoding() ); assertEquals( "", xmlOutputMeta.getNameSpace() ); assertNull( xmlOutputMeta.getDateTimeFormat() ); assertFalse( xmlOutputMeta.isSpecifyFormat() ); assertFalse( xmlOutputMeta.isOmitNullValues() ); assertEquals( "Rows", xmlOutputMeta.getMainElement() ); assertEquals( "Row", xmlOutputMeta.getRepeatElement() ); }
|
public void setDefault() { fileName = "file"; extension = "xml"; stepNrInFilename = false; doNotOpenNewFileInit = false; dateInFilename = false; timeInFilename = false; addToResultFilenames = false; zipped = false; splitEvery = 0; encoding = Const.XML_ENCODING; nameSpace = ""; date_time_format = null; SpecifyFormat = false; omitNullValues = false; mainElement = "Rows"; repeatElement = "Row"; int nrfields = 0; allocate( nrfields ); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void setDefault() { fileName = "file"; extension = "xml"; stepNrInFilename = false; doNotOpenNewFileInit = false; dateInFilename = false; timeInFilename = false; addToResultFilenames = false; zipped = false; splitEvery = 0; encoding = Const.XML_ENCODING; nameSpace = ""; date_time_format = null; SpecifyFormat = false; omitNullValues = false; mainElement = "Rows"; repeatElement = "Row"; int nrfields = 0; allocate( nrfields ); } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void setDefault() { fileName = "file"; extension = "xml"; stepNrInFilename = false; doNotOpenNewFileInit = false; dateInFilename = false; timeInFilename = false; addToResultFilenames = false; zipped = false; splitEvery = 0; encoding = Const.XML_ENCODING; nameSpace = ""; date_time_format = null; SpecifyFormat = false; omitNullValues = false; mainElement = "Rows"; repeatElement = "Row"; int nrfields = 0; allocate( nrfields ); } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void setDefault() { fileName = "file"; extension = "xml"; stepNrInFilename = false; doNotOpenNewFileInit = false; dateInFilename = false; timeInFilename = false; addToResultFilenames = false; zipped = false; splitEvery = 0; encoding = Const.XML_ENCODING; nameSpace = ""; date_time_format = null; SpecifyFormat = false; omitNullValues = false; mainElement = "Rows"; repeatElement = "Row"; int nrfields = 0; allocate( nrfields ); } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void setDefault() { fileName = "file"; extension = "xml"; stepNrInFilename = false; doNotOpenNewFileInit = false; dateInFilename = false; timeInFilename = false; addToResultFilenames = false; zipped = false; splitEvery = 0; encoding = Const.XML_ENCODING; nameSpace = ""; date_time_format = null; SpecifyFormat = false; omitNullValues = false; mainElement = "Rows"; repeatElement = "Row"; int nrfields = 0; allocate( nrfields ); } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void testGetFiles() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); xmlOutputMeta.setDefault(); xmlOutputMeta.setStepNrInFilename( true ); xmlOutputMeta.setSplitEvery( 100 ); xmlOutputMeta.setSpecifyFormat( true ); xmlOutputMeta.setDateTimeFormat( "99" ); String[] files = xmlOutputMeta.getFiles( new Variables() ); assertEquals( 10, files.length ); assertArrayEquals( new String[] { "file99_0_00001.xml", "file99_0_00002.xml", "file99_0_00003.xml", "file99_1_00001.xml", "file99_1_00002.xml", "file99_1_00003.xml", "file99_2_00001.xml", "file99_2_00002.xml", "file99_2_00003.xml", "..." }, files ); }
|
public String[] getFiles( VariableSpace space ) { int copies = 1; int splits = 1; if ( stepNrInFilename ) { copies = 3; } if ( splitEvery != 0 ) { splits = 3; } int nr = copies * splits; if ( nr > 1 ) { nr++; } String[] retval = new String[nr]; int i = 0; for ( int copy = 0; copy < copies; copy++ ) { for ( int split = 0; split < splits; split++ ) { retval[i] = buildFilename( space, copy, split, false ); i++; } } if ( i < nr ) { retval[i] = "..."; } return retval; }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String[] getFiles( VariableSpace space ) { int copies = 1; int splits = 1; if ( stepNrInFilename ) { copies = 3; } if ( splitEvery != 0 ) { splits = 3; } int nr = copies * splits; if ( nr > 1 ) { nr++; } String[] retval = new String[nr]; int i = 0; for ( int copy = 0; copy < copies; copy++ ) { for ( int split = 0; split < splits; split++ ) { retval[i] = buildFilename( space, copy, split, false ); i++; } } if ( i < nr ) { retval[i] = "..."; } return retval; } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String[] getFiles( VariableSpace space ) { int copies = 1; int splits = 1; if ( stepNrInFilename ) { copies = 3; } if ( splitEvery != 0 ) { splits = 3; } int nr = copies * splits; if ( nr > 1 ) { nr++; } String[] retval = new String[nr]; int i = 0; for ( int copy = 0; copy < copies; copy++ ) { for ( int split = 0; split < splits; split++ ) { retval[i] = buildFilename( space, copy, split, false ); i++; } } if ( i < nr ) { retval[i] = "..."; } return retval; } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String[] getFiles( VariableSpace space ) { int copies = 1; int splits = 1; if ( stepNrInFilename ) { copies = 3; } if ( splitEvery != 0 ) { splits = 3; } int nr = copies * splits; if ( nr > 1 ) { nr++; } String[] retval = new String[nr]; int i = 0; for ( int copy = 0; copy < copies; copy++ ) { for ( int split = 0; split < splits; split++ ) { retval[i] = buildFilename( space, copy, split, false ); i++; } } if ( i < nr ) { retval[i] = "..."; } return retval; } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String[] getFiles( VariableSpace space ) { int copies = 1; int splits = 1; if ( stepNrInFilename ) { copies = 3; } if ( splitEvery != 0 ) { splits = 3; } int nr = copies * splits; if ( nr > 1 ) { nr++; } String[] retval = new String[nr]; int i = 0; for ( int copy = 0; copy < copies; copy++ ) { for ( int split = 0; split < splits; split++ ) { retval[i] = buildFilename( space, copy, split, false ); i++; } } if ( i < nr ) { retval[i] = "..."; } return retval; } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void testGetFields() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); xmlOutputMeta.setDefault(); XMLField xmlField = new XMLField(); xmlField.setFieldName( "aField" ); xmlField.setLength( 10 ); xmlField.setPrecision( 3 ); xmlOutputMeta.setOutputFields( new XMLField[] { xmlField } ); RowMetaInterface row = mock( RowMetaInterface.class ); RowMetaInterface rmi = mock( RowMetaInterface.class ); StepMeta nextStep = mock( StepMeta.class ); Repository repo = mock( Repository.class ); IMetaStore metastore = mock( IMetaStore.class ); ValueMetaInterface vmi = mock( ValueMetaInterface.class ); when( row.searchValueMeta( "aField" ) ).thenReturn( vmi ); xmlOutputMeta.getFields( row, "", new RowMetaInterface[] { rmi }, nextStep, new Variables(), repo, metastore ); verify( vmi ).setLength( 10, 3 ); }
|
public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; ValueMetaInterface v = row.searchValueMeta( field.getFieldName() ); if ( v != null ) { v.setLength( field.getLength(), field.getPrecision() ); } } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; ValueMetaInterface v = row.searchValueMeta( field.getFieldName() ); if ( v != null ) { v.setLength( field.getLength(), field.getPrecision() ); } } } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; ValueMetaInterface v = row.searchValueMeta( field.getFieldName() ); if ( v != null ) { v.setLength( field.getLength(), field.getPrecision() ); } } } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; ValueMetaInterface v = row.searchValueMeta( field.getFieldName() ); if ( v != null ) { v.setLength( field.getLength(), field.getPrecision() ); } } } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; ValueMetaInterface v = row.searchValueMeta( field.getFieldName() ); if ( v != null ) { v.setLength( field.getLength(), field.getPrecision() ); } } } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void testLoadXmlException() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); DatabaseMeta dbMeta = mock( DatabaseMeta.class ); IMetaStore metaStore = mock( IMetaStore.class ); Node stepNode = mock( Node.class ); when( stepNode.getChildNodes() ).thenThrow( new RuntimeException( "some words" ) ); try { xmlOutputMeta.loadXML( stepNode, Collections.singletonList( dbMeta ), metaStore ); } catch ( KettleXMLException e ) { assertEquals( "some words", e.getCause().getMessage() ); } }
|
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void testInsertTransformationPartitionSchema() throws KettleException { ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); doReturn( new LongObjectId( 456 ) ).when( repo.connectionDelegate ).getNextTransformationPartitionSchemaID(); ObjectId result = repo.insertTransformationPartitionSchema( new LongObjectId( 147 ), new LongObjectId( 258 ) ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 456 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 147 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 258 ), insertRecord.getInteger( 2 ) ); assertEquals( new LongObjectId( 456 ), result ); }
|
public synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation, ObjectId id_partition_schema ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationPartitionSchemaID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA ), id_partition_schema ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA, table ); return id; }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation, ObjectId id_partition_schema ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationPartitionSchemaID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA ), id_partition_schema ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA, table ); return id; } }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation, ObjectId id_partition_schema ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationPartitionSchemaID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA ), id_partition_schema ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA, table ); return id; } KettleDatabaseRepository(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation, ObjectId id_partition_schema ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationPartitionSchemaID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA ), id_partition_schema ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation, ObjectId id_partition_schema ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationPartitionSchemaID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA ), id_partition_schema ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); public KettleDatabaseRepositoryTransDelegate transDelegate; public KettleDatabaseRepositoryJobDelegate jobDelegate; public KettleDatabaseRepositoryDatabaseDelegate databaseDelegate; public KettleDatabaseRepositorySlaveServerDelegate slaveServerDelegate; public KettleDatabaseRepositoryClusterSchemaDelegate clusterSchemaDelegate; public KettleDatabaseRepositoryPartitionSchemaDelegate partitionSchemaDelegate; public KettleDatabaseRepositoryDirectoryDelegate directoryDelegate; public KettleDatabaseRepositoryConnectionDelegate connectionDelegate; public KettleDatabaseRepositoryUserDelegate userDelegate; public KettleDatabaseRepositoryConditionDelegate conditionDelegate; public KettleDatabaseRepositoryValueDelegate valueDelegate; public KettleDatabaseRepositoryNotePadDelegate notePadDelegate; public KettleDatabaseRepositoryStepDelegate stepDelegate; public KettleDatabaseRepositoryJobEntryDelegate jobEntryDelegate; public KettleDatabaseRepositoryMetaStoreDelegate metaStoreDelegate; public KettleDatabaseRepositoryMetaStore metaStore; }
|
@Test public void testReadRepException() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); Repository rep = mock( Repository.class ); IMetaStore metastore = mock( IMetaStore.class ); DatabaseMeta dbMeta = mock( DatabaseMeta.class ); ObjectId oid = new StringObjectId( "oid" ); when( rep.getStepAttributeString( oid, "encoding" ) ).thenThrow( new RuntimeException( "encoding exception" ) ); try { xmlOutputMeta.readRep( rep, metastore, oid, Collections.singletonList( dbMeta ) ); } catch ( KettleException e ) { assertEquals( "encoding exception", e.getCause().getMessage() ); } }
|
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { encoding = rep.getStepAttributeString( id_step, "encoding" ); nameSpace = rep.getStepAttributeString( id_step, "name_space" ); mainElement = rep.getStepAttributeString( id_step, "xml_main_element" ); repeatElement = rep.getStepAttributeString( id_step, "xml_repeat_element" ); fileName = rep.getStepAttributeString( id_step, "file_name" ); extension = rep.getStepAttributeString( id_step, "file_extention" ); servletOutput = rep.getStepAttributeBoolean( id_step, "file_servlet_output" ); doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" ); splitEvery = (int) rep.getStepAttributeInteger( id_step, "file_split" ); stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" ); dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" ); timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" ); SpecifyFormat = rep.getStepAttributeBoolean( id_step, "SpecifyFormat" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omit_null_values" ); date_time_format = rep.getStepAttributeString( id_step, "date_time_format" ); addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" ); zipped = rep.getStepAttributeBoolean( id_step, "file_zipped" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { outputFields[i] = new XMLField(); outputFields[i].setContentType( ContentType.valueOf( Const.NVL( rep.getStepAttributeString( id_step, i, "field_content_type" ), ContentType.Element.name() ) ) ); outputFields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); outputFields[i].setElementName( rep.getStepAttributeString( id_step, i, "field_element" ) ); outputFields[i].setType( rep.getStepAttributeString( id_step, i, "field_type" ) ); outputFields[i].setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); outputFields[i].setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); outputFields[i].setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); outputFields[i].setGroupingSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); outputFields[i].setNullString( rep.getStepAttributeString( id_step, i, "field_nullif" ) ); outputFields[i].setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); outputFields[i].setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void testGetRequiredFields() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); xmlOutputMeta.setDefault(); XMLField xmlField = new XMLField(); xmlField.setFieldName( "aField" ); xmlField.setType( 1 ); xmlField.setLength( 10 ); xmlField.setPrecision( 3 ); XMLField xmlField2 = new XMLField(); xmlField2.setFieldName( "bField" ); xmlField2.setType( 3 ); xmlField2.setLength( 4 ); xmlField2.setPrecision( 5 ); xmlOutputMeta.setOutputFields( new XMLField[] { xmlField, xmlField2 } ); RowMetaInterface requiredFields = xmlOutputMeta.getRequiredFields( new Variables() ); List<ValueMetaInterface> valueMetaList = requiredFields.getValueMetaList(); assertEquals( 2, valueMetaList.size() ); assertEquals( "aField", valueMetaList.get( 0 ).getName() ); assertEquals( 1, valueMetaList.get( 0 ).getType() ); assertEquals( 10, valueMetaList.get( 0 ).getLength() ); assertEquals( 3, valueMetaList.get( 0 ).getPrecision() ); assertEquals( "bField", valueMetaList.get( 1 ).getName() ); assertEquals( 3, valueMetaList.get( 1 ).getType() ); assertEquals( 4, valueMetaList.get( 1 ).getLength() ); assertEquals( 5, valueMetaList.get( 1 ).getPrecision() ); }
|
public RowMetaInterface getRequiredFields( VariableSpace space ) throws KettleException { RowMeta row = new RowMeta(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; row.addValueMeta( new ValueMeta( field.getFieldName(), field.getType(), field.getLength(), field.getPrecision() ) ); } return row; }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public RowMetaInterface getRequiredFields( VariableSpace space ) throws KettleException { RowMeta row = new RowMeta(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; row.addValueMeta( new ValueMeta( field.getFieldName(), field.getType(), field.getLength(), field.getPrecision() ) ); } return row; } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public RowMetaInterface getRequiredFields( VariableSpace space ) throws KettleException { RowMeta row = new RowMeta(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; row.addValueMeta( new ValueMeta( field.getFieldName(), field.getType(), field.getLength(), field.getPrecision() ) ); } return row; } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public RowMetaInterface getRequiredFields( VariableSpace space ) throws KettleException { RowMeta row = new RowMeta(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; row.addValueMeta( new ValueMeta( field.getFieldName(), field.getType(), field.getLength(), field.getPrecision() ) ); } return row; } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public RowMetaInterface getRequiredFields( VariableSpace space ) throws KettleException { RowMeta row = new RowMeta(); for ( int i = 0; i < outputFields.length; i++ ) { XMLField field = outputFields[i]; row.addValueMeta( new ValueMeta( field.getFieldName(), field.getType(), field.getLength(), field.getPrecision() ) ); } return row; } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void testExportResources() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); xmlOutputMeta.setDefault(); ResourceNamingInterface resourceNamingInterface = mock( ResourceNamingInterface.class ); Variables space = new Variables(); when( resourceNamingInterface.nameResource( any( FileObject.class ), eq( space ), eq( true ) ) ).thenReturn( "exportFile" ); xmlOutputMeta.exportResources( space, null, resourceNamingInterface, null, null ); assertEquals( "exportFile", xmlOutputMeta.getFileName() ); }
|
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { if ( !Utils.isEmpty( fileName ) ) { FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName ), space ); fileName = resourceNamingInterface.nameResource( fileObject, space, true ); } return null; } catch ( Exception e ) { throw new KettleException( e ); } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { if ( !Utils.isEmpty( fileName ) ) { FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName ), space ); fileName = resourceNamingInterface.nameResource( fileObject, space, true ); } return null; } catch ( Exception e ) { throw new KettleException( e ); } } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { if ( !Utils.isEmpty( fileName ) ) { FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName ), space ); fileName = resourceNamingInterface.nameResource( fileObject, space, true ); } return null; } catch ( Exception e ) { throw new KettleException( e ); } } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { if ( !Utils.isEmpty( fileName ) ) { FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName ), space ); fileName = resourceNamingInterface.nameResource( fileObject, space, true ); } return null; } catch ( Exception e ) { throw new KettleException( e ); } } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { if ( !Utils.isEmpty( fileName ) ) { FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName ), space ); fileName = resourceNamingInterface.nameResource( fileObject, space, true ); } return null; } catch ( Exception e ) { throw new KettleException( e ); } } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void testCheck() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); xmlOutputMeta.setDefault(); TransMeta transMeta = mock( TransMeta.class ); StepMeta stepInfo = mock( StepMeta.class ); RowMetaInterface prev = mock( RowMetaInterface.class ); Repository repos = mock( Repository.class ); IMetaStore metastore = mock( IMetaStore.class ); RowMetaInterface info = mock( RowMetaInterface.class ); ArrayList<CheckResultInterface> remarks = new ArrayList<>(); xmlOutputMeta.check( remarks, transMeta, stepInfo, prev, new String[] { "input" }, new String[] { "output" }, info, new Variables(), repos, metastore ); assertEquals( 2, remarks.size() ); assertEquals( "Step is receiving info from other steps.", remarks.get( 0 ).getText() ); assertEquals( "File specifications are not checked.", remarks.get( 1 ).getText() ); XMLField xmlField = new XMLField(); xmlField.setFieldName( "aField" ); xmlField.setType( 1 ); xmlField.setLength( 10 ); xmlField.setPrecision( 3 ); xmlOutputMeta.setOutputFields( new XMLField[] { xmlField } ); when( prev.size() ).thenReturn( 1 ); remarks.clear(); xmlOutputMeta.check( remarks, transMeta, stepInfo, prev, new String[] { "input" }, new String[] { "output" }, info, new Variables(), repos, metastore ); assertEquals( 4, remarks.size() ); assertEquals( "Step is connected to previous one, receiving 1 fields", remarks.get( 0 ).getText() ); assertEquals( "All output fields are found in the input stream.", remarks.get( 1 ).getText() ); assertEquals( "Step is receiving info from other steps.", remarks.get( 2 ).getText() ); assertEquals( "File specifications are not checked.", remarks.get( 3 ).getText() ); }
|
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; if ( prev != null && prev.size() > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsReceived", "" + prev.size() ), stepinfo ); remarks.add( cr ); String error_message = ""; boolean error_found = false; for ( int i = 0; i < outputFields.length; i++ ) { int idx = prev.indexOfValue( outputFields[i].getFieldName() ); if ( idx < 0 ) { error_message += "\t\t" + outputFields[i].getFieldName() + Const.CR; error_found = true; } } if ( error_found ) { error_message = BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsNotFound", error_message ); cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.AllFieldsFound" ), stepinfo ); remarks.add( cr ); } } if ( input.length > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputOk" ), stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputError" ), stepinfo ); remarks.add( cr ); } cr = new CheckResult( CheckResultInterface.TYPE_RESULT_COMMENT, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FilesNotChecked" ), stepinfo ); remarks.add( cr ); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; if ( prev != null && prev.size() > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsReceived", "" + prev.size() ), stepinfo ); remarks.add( cr ); String error_message = ""; boolean error_found = false; for ( int i = 0; i < outputFields.length; i++ ) { int idx = prev.indexOfValue( outputFields[i].getFieldName() ); if ( idx < 0 ) { error_message += "\t\t" + outputFields[i].getFieldName() + Const.CR; error_found = true; } } if ( error_found ) { error_message = BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsNotFound", error_message ); cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.AllFieldsFound" ), stepinfo ); remarks.add( cr ); } } if ( input.length > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputOk" ), stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputError" ), stepinfo ); remarks.add( cr ); } cr = new CheckResult( CheckResultInterface.TYPE_RESULT_COMMENT, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FilesNotChecked" ), stepinfo ); remarks.add( cr ); } }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; if ( prev != null && prev.size() > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsReceived", "" + prev.size() ), stepinfo ); remarks.add( cr ); String error_message = ""; boolean error_found = false; for ( int i = 0; i < outputFields.length; i++ ) { int idx = prev.indexOfValue( outputFields[i].getFieldName() ); if ( idx < 0 ) { error_message += "\t\t" + outputFields[i].getFieldName() + Const.CR; error_found = true; } } if ( error_found ) { error_message = BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsNotFound", error_message ); cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.AllFieldsFound" ), stepinfo ); remarks.add( cr ); } } if ( input.length > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputOk" ), stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputError" ), stepinfo ); remarks.add( cr ); } cr = new CheckResult( CheckResultInterface.TYPE_RESULT_COMMENT, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FilesNotChecked" ), stepinfo ); remarks.add( cr ); } XMLOutputMeta(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; if ( prev != null && prev.size() > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsReceived", "" + prev.size() ), stepinfo ); remarks.add( cr ); String error_message = ""; boolean error_found = false; for ( int i = 0; i < outputFields.length; i++ ) { int idx = prev.indexOfValue( outputFields[i].getFieldName() ); if ( idx < 0 ) { error_message += "\t\t" + outputFields[i].getFieldName() + Const.CR; error_found = true; } } if ( error_found ) { error_message = BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsNotFound", error_message ); cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.AllFieldsFound" ), stepinfo ); remarks.add( cr ); } } if ( input.length > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputOk" ), stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputError" ), stepinfo ); remarks.add( cr ); } cr = new CheckResult( CheckResultInterface.TYPE_RESULT_COMMENT, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FilesNotChecked" ), stepinfo ); remarks.add( cr ); } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
XMLOutputMeta extends BaseStepMeta implements StepMetaInterface { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; if ( prev != null && prev.size() > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsReceived", "" + prev.size() ), stepinfo ); remarks.add( cr ); String error_message = ""; boolean error_found = false; for ( int i = 0; i < outputFields.length; i++ ) { int idx = prev.indexOfValue( outputFields[i].getFieldName() ); if ( idx < 0 ) { error_message += "\t\t" + outputFields[i].getFieldName() + Const.CR; error_found = true; } } if ( error_found ) { error_message = BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FieldsNotFound", error_message ); cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.AllFieldsFound" ), stepinfo ); remarks.add( cr ); } } if ( input.length > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputOk" ), stepinfo ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.ExpectedInputError" ), stepinfo ); remarks.add( cr ); } cr = new CheckResult( CheckResultInterface.TYPE_RESULT_COMMENT, BaseMessages.getString( PKG, "XMLOutputMeta.CheckResult.FilesNotChecked" ), stepinfo ); remarks.add( cr ); } XMLOutputMeta(); boolean isDateInFilename(); void setDateInFilename( boolean dateInFilename ); String getExtension(); void setExtension( String extension ); boolean isDoNotOpenNewFileInit(); void setDoNotOpenNewFileInit( boolean doNotOpenNewFileInit ); String getFileName(); void setFileName( String fileName ); int getSplitEvery(); void setSplitEvery( int splitEvery ); boolean isStepNrInFilename(); void setStepNrInFilename( boolean stepNrInFilename ); boolean isTimeInFilename(); void setTimeInFilename( boolean timeInFilename ); boolean isAddToResultFiles(); void setAddToResultFiles( boolean addtoresultfilenamesin ); boolean isSpecifyFormat(); void setSpecifyFormat( boolean SpecifyFormat ); String getDateTimeFormat(); void setDateTimeFormat( String date_time_format ); boolean isZipped(); void setZipped( boolean zipped ); XMLField[] getOutputFields(); void setOutputFields( XMLField[] outputFields ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrfields ); Object clone(); String getNewLine( String fformat ); void setDefault(); String[] getFiles( VariableSpace space ); String buildFilename( VariableSpace space, int stepnr, int splitnr, boolean ziparchive ); void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getRequiredFields( VariableSpace space ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ); StepDataInterface getStepData(); String getEncoding(); void setEncoding( String encoding ); String getMainElement(); void setMainElement( String mainElement ); String getRepeatElement(); void setRepeatElement( String repeatElement ); String getNameSpace(); void setNameSpace( String nameSpace ); void setOmitNullValues( boolean omitNullValues ); boolean isOmitNullValues(); boolean isServletOutput(); void setServletOutput( boolean servletOutput ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); boolean passDataToServletOutput(); }
|
@Test public void exceptionIsThrowsForNonExistingFiles() throws Exception { GoogleAnalyticsApiFacade.createFor( "application-name", "account", path ); }
|
public static GoogleAnalyticsApiFacade createFor( String application, String oauthServiceAccount, String oauthKeyFile ) throws GeneralSecurityException, IOException, KettleFileException { return new GoogleAnalyticsApiFacade( GoogleNetHttpTransport.newTrustedTransport(), JacksonFactory.getDefaultInstance(), application, oauthServiceAccount, new File( KettleVFS.getFileObject( oauthKeyFile ).getURL().getPath() ) ); }
|
GoogleAnalyticsApiFacade { public static GoogleAnalyticsApiFacade createFor( String application, String oauthServiceAccount, String oauthKeyFile ) throws GeneralSecurityException, IOException, KettleFileException { return new GoogleAnalyticsApiFacade( GoogleNetHttpTransport.newTrustedTransport(), JacksonFactory.getDefaultInstance(), application, oauthServiceAccount, new File( KettleVFS.getFileObject( oauthKeyFile ).getURL().getPath() ) ); } }
|
GoogleAnalyticsApiFacade { public static GoogleAnalyticsApiFacade createFor( String application, String oauthServiceAccount, String oauthKeyFile ) throws GeneralSecurityException, IOException, KettleFileException { return new GoogleAnalyticsApiFacade( GoogleNetHttpTransport.newTrustedTransport(), JacksonFactory.getDefaultInstance(), application, oauthServiceAccount, new File( KettleVFS.getFileObject( oauthKeyFile ).getURL().getPath() ) ); } GoogleAnalyticsApiFacade( HttpTransport httpTransport, JsonFactory jsonFactory, String application,
String oathServiceEmail, File keyFile ); }
|
GoogleAnalyticsApiFacade { public static GoogleAnalyticsApiFacade createFor( String application, String oauthServiceAccount, String oauthKeyFile ) throws GeneralSecurityException, IOException, KettleFileException { return new GoogleAnalyticsApiFacade( GoogleNetHttpTransport.newTrustedTransport(), JacksonFactory.getDefaultInstance(), application, oauthServiceAccount, new File( KettleVFS.getFileObject( oauthKeyFile ).getURL().getPath() ) ); } GoogleAnalyticsApiFacade( HttpTransport httpTransport, JsonFactory jsonFactory, String application,
String oathServiceEmail, File keyFile ); static GoogleAnalyticsApiFacade createFor(
String application, String oauthServiceAccount, String oauthKeyFile ); void close(); Analytics getAnalytics(); }
|
GoogleAnalyticsApiFacade { public static GoogleAnalyticsApiFacade createFor( String application, String oauthServiceAccount, String oauthKeyFile ) throws GeneralSecurityException, IOException, KettleFileException { return new GoogleAnalyticsApiFacade( GoogleNetHttpTransport.newTrustedTransport(), JacksonFactory.getDefaultInstance(), application, oauthServiceAccount, new File( KettleVFS.getFileObject( oauthKeyFile ).getURL().getPath() ) ); } GoogleAnalyticsApiFacade( HttpTransport httpTransport, JsonFactory jsonFactory, String application,
String oathServiceEmail, File keyFile ); static GoogleAnalyticsApiFacade createFor(
String application, String oauthServiceAccount, String oauthKeyFile ); void close(); Analytics getAnalytics(); }
|
@Test public void testGetFields() throws Exception { dialog.getFields(); verify( query ).setMaxResults( 1 ); verify( table, times( 19 ) ).getItem( anyInt() ); verify( tableItem, times( 7 ) ).setText( 1, GaInputStepMeta.FIELD_TYPE_DIMENSION ); verify( tableItem, times( 7 ) ).setText( 1, GaInputStepMeta.FIELD_TYPE_METRIC ); verify( tableItem, times( 3 ) ).setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); verify( tableItem, times( 2 ) ).setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); for ( ColumnHeaders header : headers ) { verify( tableItem, times( 1 ) ).setText( 2, header.getName() ); verify( tableItem, times( 1 ) ).setText( 3, header.getName() ); } verify( tableItem, times( 1 ) ).setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); verify( tableItem, times( 1 ) ).setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); verify( tableItem, times( 1 ) ).setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); verify( tableItem, times( 1 ) ).setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); verify( tableItem, times( 1 ) ).setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); verify( tableItem, times( 1 ) ).setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); verify( tableItem, times( 1 ) ).setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); verify( tableItem, times( 1 ) ).setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); verify( tableItem, times( 1 ) ).setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); verify( tableItem, times( 1 ) ).setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); verify( tableItem, times( 1 ) ).setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_DATE ) ); verify( tableItem, times( 5 ) ).setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); verify( tableItem, times( 6 ) ).setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); verify( tableItem, times( 7 ) ).setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); }
|
void getFields() { Analytics.Data.Ga.Get query = getPreviewQuery(); if ( query == null ) { return; } query.setMaxResults( 1 ); try { GaData dataFeed = query.execute(); if ( dataFeed == null || dataFeed.getRows() == null || dataFeed.getRows().size() < 1 ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( "Query yields empty feed" ); mb.setMessage( "The feed did not give any results. Please specify a query that returns data." ); mb.open(); return; } int i = 0; List<GaData.ColumnHeaders> colHeaders = dataFeed.getColumnHeaders(); getTableView().table.setItemCount( colHeaders.size() + dataFeed.getProfileInfo().size() ); for ( GaData.ColumnHeaders colHeader : colHeaders ) { String name = colHeader.getName(); String dataType = colHeader.getDataType(); String columnType = colHeader.getColumnType(); TableItem item = getTableView().table.getItem( i ); if ( columnType.equals( "DIMENSION" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_DIMENSION ); item.setText( 2, name ); item.setText( 3, name ); if ( name.equalsIgnoreCase( "ga:date" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_DATE ) ); item.setText( 5, "yyyyMMdd" ); } else if ( name.equalsIgnoreCase( "ga:daysSinceLastVisit" ) || name.equalsIgnoreCase( "ga:visitLength" ) || name.equalsIgnoreCase( "ga:visitCount" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else if ( name.equalsIgnoreCase( "ga:latitude" ) || name.equalsIgnoreCase( "ga:longitude" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } else if ( columnType.equals( "METRIC" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_METRIC ); item.setText( 2, name ); item.setText( 3, name ); if ( dataType.compareToIgnoreCase( "currency" ) == 0 || dataType.compareToIgnoreCase( "float" ) == 0 || dataType.compareToIgnoreCase( "percent" ) == 0 || dataType.compareToIgnoreCase( "us_currency" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else if ( dataType.compareToIgnoreCase( "time" ) == 0 || dataType.compareToIgnoreCase( "integer" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } } TableItem item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); getTableView().removeEmptyRows(); getTableView().setRowNums(); getTableView().optWidth( true ); getInput().setChanged(); } catch ( IOException ioe ) { Exception exceptionToDisplay = ioe; if ( ioe instanceof GoogleJsonResponseException ) { GoogleJsonResponseException gjre = (GoogleJsonResponseException) ioe; if ( gjre.getDetails() != null && gjre.getDetails().getMessage() != null ) { exceptionToDisplay = new IOException( gjre.getDetails().getMessage(), gjre ); } } new ErrorDialog( shell, BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogTitle" ), BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogMessage" ), exceptionToDisplay ); } }
|
GaInputStepDialog extends BaseStepDialog implements StepDialogInterface { void getFields() { Analytics.Data.Ga.Get query = getPreviewQuery(); if ( query == null ) { return; } query.setMaxResults( 1 ); try { GaData dataFeed = query.execute(); if ( dataFeed == null || dataFeed.getRows() == null || dataFeed.getRows().size() < 1 ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( "Query yields empty feed" ); mb.setMessage( "The feed did not give any results. Please specify a query that returns data." ); mb.open(); return; } int i = 0; List<GaData.ColumnHeaders> colHeaders = dataFeed.getColumnHeaders(); getTableView().table.setItemCount( colHeaders.size() + dataFeed.getProfileInfo().size() ); for ( GaData.ColumnHeaders colHeader : colHeaders ) { String name = colHeader.getName(); String dataType = colHeader.getDataType(); String columnType = colHeader.getColumnType(); TableItem item = getTableView().table.getItem( i ); if ( columnType.equals( "DIMENSION" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_DIMENSION ); item.setText( 2, name ); item.setText( 3, name ); if ( name.equalsIgnoreCase( "ga:date" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_DATE ) ); item.setText( 5, "yyyyMMdd" ); } else if ( name.equalsIgnoreCase( "ga:daysSinceLastVisit" ) || name.equalsIgnoreCase( "ga:visitLength" ) || name.equalsIgnoreCase( "ga:visitCount" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else if ( name.equalsIgnoreCase( "ga:latitude" ) || name.equalsIgnoreCase( "ga:longitude" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } else if ( columnType.equals( "METRIC" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_METRIC ); item.setText( 2, name ); item.setText( 3, name ); if ( dataType.compareToIgnoreCase( "currency" ) == 0 || dataType.compareToIgnoreCase( "float" ) == 0 || dataType.compareToIgnoreCase( "percent" ) == 0 || dataType.compareToIgnoreCase( "us_currency" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else if ( dataType.compareToIgnoreCase( "time" ) == 0 || dataType.compareToIgnoreCase( "integer" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } } TableItem item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); getTableView().removeEmptyRows(); getTableView().setRowNums(); getTableView().optWidth( true ); getInput().setChanged(); } catch ( IOException ioe ) { Exception exceptionToDisplay = ioe; if ( ioe instanceof GoogleJsonResponseException ) { GoogleJsonResponseException gjre = (GoogleJsonResponseException) ioe; if ( gjre.getDetails() != null && gjre.getDetails().getMessage() != null ) { exceptionToDisplay = new IOException( gjre.getDetails().getMessage(), gjre ); } } new ErrorDialog( shell, BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogTitle" ), BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogMessage" ), exceptionToDisplay ); } } }
|
GaInputStepDialog extends BaseStepDialog implements StepDialogInterface { void getFields() { Analytics.Data.Ga.Get query = getPreviewQuery(); if ( query == null ) { return; } query.setMaxResults( 1 ); try { GaData dataFeed = query.execute(); if ( dataFeed == null || dataFeed.getRows() == null || dataFeed.getRows().size() < 1 ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( "Query yields empty feed" ); mb.setMessage( "The feed did not give any results. Please specify a query that returns data." ); mb.open(); return; } int i = 0; List<GaData.ColumnHeaders> colHeaders = dataFeed.getColumnHeaders(); getTableView().table.setItemCount( colHeaders.size() + dataFeed.getProfileInfo().size() ); for ( GaData.ColumnHeaders colHeader : colHeaders ) { String name = colHeader.getName(); String dataType = colHeader.getDataType(); String columnType = colHeader.getColumnType(); TableItem item = getTableView().table.getItem( i ); if ( columnType.equals( "DIMENSION" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_DIMENSION ); item.setText( 2, name ); item.setText( 3, name ); if ( name.equalsIgnoreCase( "ga:date" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_DATE ) ); item.setText( 5, "yyyyMMdd" ); } else if ( name.equalsIgnoreCase( "ga:daysSinceLastVisit" ) || name.equalsIgnoreCase( "ga:visitLength" ) || name.equalsIgnoreCase( "ga:visitCount" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else if ( name.equalsIgnoreCase( "ga:latitude" ) || name.equalsIgnoreCase( "ga:longitude" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } else if ( columnType.equals( "METRIC" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_METRIC ); item.setText( 2, name ); item.setText( 3, name ); if ( dataType.compareToIgnoreCase( "currency" ) == 0 || dataType.compareToIgnoreCase( "float" ) == 0 || dataType.compareToIgnoreCase( "percent" ) == 0 || dataType.compareToIgnoreCase( "us_currency" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else if ( dataType.compareToIgnoreCase( "time" ) == 0 || dataType.compareToIgnoreCase( "integer" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } } TableItem item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); getTableView().removeEmptyRows(); getTableView().setRowNums(); getTableView().optWidth( true ); getInput().setChanged(); } catch ( IOException ioe ) { Exception exceptionToDisplay = ioe; if ( ioe instanceof GoogleJsonResponseException ) { GoogleJsonResponseException gjre = (GoogleJsonResponseException) ioe; if ( gjre.getDetails() != null && gjre.getDetails().getMessage() != null ) { exceptionToDisplay = new IOException( gjre.getDetails().getMessage(), gjre ); } } new ErrorDialog( shell, BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogTitle" ), BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogMessage" ), exceptionToDisplay ); } } GaInputStepDialog( Shell parent, Object in, TransMeta transMeta, String sname ); }
|
GaInputStepDialog extends BaseStepDialog implements StepDialogInterface { void getFields() { Analytics.Data.Ga.Get query = getPreviewQuery(); if ( query == null ) { return; } query.setMaxResults( 1 ); try { GaData dataFeed = query.execute(); if ( dataFeed == null || dataFeed.getRows() == null || dataFeed.getRows().size() < 1 ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( "Query yields empty feed" ); mb.setMessage( "The feed did not give any results. Please specify a query that returns data." ); mb.open(); return; } int i = 0; List<GaData.ColumnHeaders> colHeaders = dataFeed.getColumnHeaders(); getTableView().table.setItemCount( colHeaders.size() + dataFeed.getProfileInfo().size() ); for ( GaData.ColumnHeaders colHeader : colHeaders ) { String name = colHeader.getName(); String dataType = colHeader.getDataType(); String columnType = colHeader.getColumnType(); TableItem item = getTableView().table.getItem( i ); if ( columnType.equals( "DIMENSION" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_DIMENSION ); item.setText( 2, name ); item.setText( 3, name ); if ( name.equalsIgnoreCase( "ga:date" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_DATE ) ); item.setText( 5, "yyyyMMdd" ); } else if ( name.equalsIgnoreCase( "ga:daysSinceLastVisit" ) || name.equalsIgnoreCase( "ga:visitLength" ) || name.equalsIgnoreCase( "ga:visitCount" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else if ( name.equalsIgnoreCase( "ga:latitude" ) || name.equalsIgnoreCase( "ga:longitude" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } else if ( columnType.equals( "METRIC" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_METRIC ); item.setText( 2, name ); item.setText( 3, name ); if ( dataType.compareToIgnoreCase( "currency" ) == 0 || dataType.compareToIgnoreCase( "float" ) == 0 || dataType.compareToIgnoreCase( "percent" ) == 0 || dataType.compareToIgnoreCase( "us_currency" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else if ( dataType.compareToIgnoreCase( "time" ) == 0 || dataType.compareToIgnoreCase( "integer" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } } TableItem item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); getTableView().removeEmptyRows(); getTableView().setRowNums(); getTableView().optWidth( true ); getInput().setChanged(); } catch ( IOException ioe ) { Exception exceptionToDisplay = ioe; if ( ioe instanceof GoogleJsonResponseException ) { GoogleJsonResponseException gjre = (GoogleJsonResponseException) ioe; if ( gjre.getDetails() != null && gjre.getDetails().getMessage() != null ) { exceptionToDisplay = new IOException( gjre.getDetails().getMessage(), gjre ); } } new ErrorDialog( shell, BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogTitle" ), BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogMessage" ), exceptionToDisplay ); } } GaInputStepDialog( Shell parent, Object in, TransMeta transMeta, String sname ); @Override String open(); void readGaProfiles(); void readGaSegments(); void getData(); }
|
GaInputStepDialog extends BaseStepDialog implements StepDialogInterface { void getFields() { Analytics.Data.Ga.Get query = getPreviewQuery(); if ( query == null ) { return; } query.setMaxResults( 1 ); try { GaData dataFeed = query.execute(); if ( dataFeed == null || dataFeed.getRows() == null || dataFeed.getRows().size() < 1 ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setText( "Query yields empty feed" ); mb.setMessage( "The feed did not give any results. Please specify a query that returns data." ); mb.open(); return; } int i = 0; List<GaData.ColumnHeaders> colHeaders = dataFeed.getColumnHeaders(); getTableView().table.setItemCount( colHeaders.size() + dataFeed.getProfileInfo().size() ); for ( GaData.ColumnHeaders colHeader : colHeaders ) { String name = colHeader.getName(); String dataType = colHeader.getDataType(); String columnType = colHeader.getColumnType(); TableItem item = getTableView().table.getItem( i ); if ( columnType.equals( "DIMENSION" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_DIMENSION ); item.setText( 2, name ); item.setText( 3, name ); if ( name.equalsIgnoreCase( "ga:date" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_DATE ) ); item.setText( 5, "yyyyMMdd" ); } else if ( name.equalsIgnoreCase( "ga:daysSinceLastVisit" ) || name.equalsIgnoreCase( "ga:visitLength" ) || name.equalsIgnoreCase( "ga:visitCount" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else if ( name.equalsIgnoreCase( "ga:latitude" ) || name.equalsIgnoreCase( "ga:longitude" ) ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } else if ( columnType.equals( "METRIC" ) ) { item.setText( 1, GaInputStepMeta.FIELD_TYPE_METRIC ); item.setText( 2, name ); item.setText( 3, name ); if ( dataType.compareToIgnoreCase( "currency" ) == 0 || dataType.compareToIgnoreCase( "float" ) == 0 || dataType.compareToIgnoreCase( "percent" ) == 0 || dataType.compareToIgnoreCase( "us_currency" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_NUMBER ) ); item.setText( 5, "#.#;-#.#" ); } else if ( dataType.compareToIgnoreCase( "time" ) == 0 || dataType.compareToIgnoreCase( "integer" ) == 0 ) { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_INTEGER ) ); item.setText( 5, "#;-#" ); } else { item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); } i++; } } TableItem item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_PROFILE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_WEBPROP_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY ); item.setText( 2, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 3, GaInputStepMeta.PROPERTY_DATA_SOURCE_ACCOUNT_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_ID ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); i++; item = getTableView().table.getItem( i ); item.setText( 1, GaInputStepMeta.FIELD_TYPE_DATA_SOURCE_FIELD ); item.setText( 2, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 3, GaInputStepMeta.FIELD_DATA_SOURCE_TABLE_NAME ); item.setText( 4, ValueMetaBase.getTypeDesc( ValueMetaInterface.TYPE_STRING ) ); item.setText( 5, "" ); getTableView().removeEmptyRows(); getTableView().setRowNums(); getTableView().optWidth( true ); getInput().setChanged(); } catch ( IOException ioe ) { Exception exceptionToDisplay = ioe; if ( ioe instanceof GoogleJsonResponseException ) { GoogleJsonResponseException gjre = (GoogleJsonResponseException) ioe; if ( gjre.getDetails() != null && gjre.getDetails().getMessage() != null ) { exceptionToDisplay = new IOException( gjre.getDetails().getMessage(), gjre ); } } new ErrorDialog( shell, BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogTitle" ), BaseMessages.getString( PKG, "GoogleAnalyticsDialog.RequestError.DialogMessage" ), exceptionToDisplay ); } } GaInputStepDialog( Shell parent, Object in, TransMeta transMeta, String sname ); @Override String open(); void readGaProfiles(); void readGaSegments(); void getData(); }
|
@Test public void testSetCalendarStartNull() throws KettleException { SalesforceConnection connection = new SalesforceConnection( logInterface, url, username, password ); GregorianCalendar endDate = new GregorianCalendar( 2000, 2, 10 ); try { connection.setCalendar( recordsFilter, null, endDate ); fail(); } catch ( KettleException expected ) { } }
|
public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
@Test public void testSetCalendarEndNull() throws KettleException { SalesforceConnection connection = new SalesforceConnection( logInterface, url, username, password ); GregorianCalendar startDate = new GregorianCalendar( 2000, 2, 10 ); try { connection.setCalendar( recordsFilter, startDate, null ); fail(); } catch ( KettleException expected ) { } }
|
public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
@Test public void testSetCalendarStartDateTooOlder() throws KettleException { SalesforceConnection connection = new SalesforceConnection( logInterface, url, username, password ); GregorianCalendar startDate = new GregorianCalendar( 2000, 3, 20 ); GregorianCalendar endDate = new GregorianCalendar( 2000, 2, 10 ); try { connection.setCalendar( recordsFilter, startDate, endDate ); fail(); } catch ( KettleException expected ) { } }
|
public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
@Test public void testSetCalendarDatesTooFarApart() throws KettleException { SalesforceConnection connection = new SalesforceConnection( logInterface, url, username, password ); GregorianCalendar startDate = new GregorianCalendar( 2000, 1, 1 ); GregorianCalendar endDate = new GregorianCalendar( 2000, 2, 11 ); try { connection.setCalendar( recordsFilter, startDate, endDate ); fail(); } catch ( KettleException expected ) { } }
|
public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
@Test public void testInsertClusterSlave() throws KettleException { ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); doReturn( new LongObjectId( 357 ) ).when( repo.connectionDelegate ).getNextClusterSlaveID(); SlaveServer testSlave = new SlaveServer( "slave1", "fakelocal", "9081", "fakeuser", "fakepass" ); testSlave.setObjectId( new LongObjectId( 864 ) ); ClusterSchema testSchema = new ClusterSchema( "schema1", Arrays.asList( testSlave ) ); testSchema.setObjectId( new LongObjectId( 159 ) ); ObjectId result = repo.insertClusterSlave( testSchema, testSlave ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 357 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 159 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 864 ), insertRecord.getInteger( 2 ) ); assertEquals( new LongObjectId( 357 ), result ); }
|
public synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ) throws KettleException { ObjectId id = connectionDelegate.getNextClusterSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER ), clusterSchema .getObjectId() ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE ), slaveServer .getObjectId() ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE, table ); return id; }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ) throws KettleException { ObjectId id = connectionDelegate.getNextClusterSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER ), clusterSchema .getObjectId() ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE ), slaveServer .getObjectId() ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE, table ); return id; } }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ) throws KettleException { ObjectId id = connectionDelegate.getNextClusterSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER ), clusterSchema .getObjectId() ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE ), slaveServer .getObjectId() ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE, table ); return id; } KettleDatabaseRepository(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ) throws KettleException { ObjectId id = connectionDelegate.getNextClusterSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER ), clusterSchema .getObjectId() ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE ), slaveServer .getObjectId() ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ) throws KettleException { ObjectId id = connectionDelegate.getNextClusterSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER ), clusterSchema .getObjectId() ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE ), slaveServer .getObjectId() ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); public KettleDatabaseRepositoryTransDelegate transDelegate; public KettleDatabaseRepositoryJobDelegate jobDelegate; public KettleDatabaseRepositoryDatabaseDelegate databaseDelegate; public KettleDatabaseRepositorySlaveServerDelegate slaveServerDelegate; public KettleDatabaseRepositoryClusterSchemaDelegate clusterSchemaDelegate; public KettleDatabaseRepositoryPartitionSchemaDelegate partitionSchemaDelegate; public KettleDatabaseRepositoryDirectoryDelegate directoryDelegate; public KettleDatabaseRepositoryConnectionDelegate connectionDelegate; public KettleDatabaseRepositoryUserDelegate userDelegate; public KettleDatabaseRepositoryConditionDelegate conditionDelegate; public KettleDatabaseRepositoryValueDelegate valueDelegate; public KettleDatabaseRepositoryNotePadDelegate notePadDelegate; public KettleDatabaseRepositoryStepDelegate stepDelegate; public KettleDatabaseRepositoryJobEntryDelegate jobEntryDelegate; public KettleDatabaseRepositoryMetaStoreDelegate metaStoreDelegate; public KettleDatabaseRepositoryMetaStore metaStore; }
|
@Test public void testSetCalendar() { SalesforceConnection conn = mock( SalesforceConnection.class, Mockito.CALLS_REAL_METHODS ); try { conn.setCalendar( new Random().nextInt( SalesforceConnectionUtils.recordsFilterDesc.length ), new GregorianCalendar( 2016, Calendar.JANUARY, 1 ), new GregorianCalendar( 2016, Calendar.JANUARY, 31 ) ); } catch ( KettleException e ) { fail(); } try { conn.setCalendar( new Random().nextInt( SalesforceConnectionUtils.recordsFilterDesc.length ), new GregorianCalendar( 2016, Calendar.JANUARY, 31 ), new GregorianCalendar( 2016, Calendar.JANUARY, 1 ) ); fail(); } catch ( KettleException expected ) { } try { conn.setCalendar( new Random().nextInt( SalesforceConnectionUtils.recordsFilterDesc.length ), null, new GregorianCalendar( 2016, Calendar.JANUARY, 31 ) ); fail(); } catch ( KettleException expected ) { } try { conn.setCalendar( new Random().nextInt( SalesforceConnectionUtils.recordsFilterDesc.length ), new GregorianCalendar( 2016, Calendar.JANUARY, 1 ), null ); fail(); } catch ( KettleException expected ) { } }
|
public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
SalesforceConnection { public void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ) throws KettleException { this.startDate = startDate; this.endDate = endDate; this.recordsFilter = recordsFilter; if ( this.startDate == null || this.endDate == null ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.EmptyStartDateOrEndDate" ) ); } if ( this.startDate.getTime().compareTo( this.endDate.getTime() ) >= 0 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.WrongDates" ) ); } long diffDays = ( this.endDate.getTime().getTime() - this.startDate.getTime().getTime() ) / ( 24 * 60 * 60 * 1000 ); if ( diffDays > 30 ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInput.Error.StartDateTooOlder" ) ); } } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
@Test public void testCreateBinding() throws KettleException, ConnectionException { SalesforceConnection conn = new SalesforceConnection( null, "http: ConnectorConfig config = new ConnectorConfig(); config.setAuthEndpoint( Connector.END_POINT ); config.setManualLogin( true ); assertNull( conn.getBinding() ); conn.createBinding( config ); PartnerConnection binding1 = conn.getBinding(); conn.createBinding( config ); PartnerConnection binding2 = conn.getBinding(); assertSame( binding1, binding2 ); }
|
public PartnerConnection createBinding( ConnectorConfig config ) throws ConnectionException { if ( this.binding == null ) { this.binding = new PartnerConnection( config ); } return this.binding; }
|
SalesforceConnection { public PartnerConnection createBinding( ConnectorConfig config ) throws ConnectionException { if ( this.binding == null ) { this.binding = new PartnerConnection( config ); } return this.binding; } }
|
SalesforceConnection { public PartnerConnection createBinding( ConnectorConfig config ) throws ConnectionException { if ( this.binding == null ) { this.binding = new PartnerConnection( config ); } return this.binding; } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); }
|
SalesforceConnection { public PartnerConnection createBinding( ConnectorConfig config ) throws ConnectionException { if ( this.binding == null ) { this.binding = new PartnerConnection( config ); } return this.binding; } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
SalesforceConnection { public PartnerConnection createBinding( ConnectorConfig config ) throws ConnectionException { if ( this.binding == null ) { this.binding = new PartnerConnection( config ); } return this.binding; } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
@Test public void testGetRecordValue() throws Exception { SalesforceConnection conn = mock( SalesforceConnection.class, Mockito.CALLS_REAL_METHODS ); SObject sObject = new SObject(); sObject.setName( new QName( Constants.PARTNER_SOBJECT_NS, "sObject" ) ); SObject testObject = createObject( "field", "value" ); sObject.addField( "field", testObject ); assertEquals( "Get value of simple record", "value", conn.getRecordValue( sObject, "field" ) ); SObject parentObject = createObject( "parentField", null ); sObject.addField( "parentField", parentObject ); SObject childObject = createObject( "subField", "subValue" ); parentObject.addField( "subField", childObject ); assertEquals( "Get value of record with hierarchy", "subValue", conn.getRecordValue( sObject, "parentField.subField" ) ); XmlObject nullObject = new XmlObject( new QName( "nullField" ) ); sObject.addField( "nullField", nullObject ); assertEquals( "Get null value when relational query id is null", null, conn.getRecordValue( sObject, "nullField.childField" ) ); }
|
public String getRecordValue( SObject con, String fieldname ) throws KettleException { String[] fieldHierarchy = fieldname.split( "\\." ); if ( con == null ) { return null; } else { XmlObject element = getMessageElementForHierarchy( con, fieldHierarchy ); if ( element != null ) { Object object = element.getValue(); if ( object != null ) { if ( object instanceof QueryResult ) { return buildJsonQueryResult( (QueryResult) object ); } return String.valueOf( object ); } else { return (String) element.getValue(); } } } return null; }
|
SalesforceConnection { public String getRecordValue( SObject con, String fieldname ) throws KettleException { String[] fieldHierarchy = fieldname.split( "\\." ); if ( con == null ) { return null; } else { XmlObject element = getMessageElementForHierarchy( con, fieldHierarchy ); if ( element != null ) { Object object = element.getValue(); if ( object != null ) { if ( object instanceof QueryResult ) { return buildJsonQueryResult( (QueryResult) object ); } return String.valueOf( object ); } else { return (String) element.getValue(); } } } return null; } }
|
SalesforceConnection { public String getRecordValue( SObject con, String fieldname ) throws KettleException { String[] fieldHierarchy = fieldname.split( "\\." ); if ( con == null ) { return null; } else { XmlObject element = getMessageElementForHierarchy( con, fieldHierarchy ); if ( element != null ) { Object object = element.getValue(); if ( object != null ) { if ( object instanceof QueryResult ) { return buildJsonQueryResult( (QueryResult) object ); } return String.valueOf( object ); } else { return (String) element.getValue(); } } } return null; } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); }
|
SalesforceConnection { public String getRecordValue( SObject con, String fieldname ) throws KettleException { String[] fieldHierarchy = fieldname.split( "\\." ); if ( con == null ) { return null; } else { XmlObject element = getMessageElementForHierarchy( con, fieldHierarchy ); if ( element != null ) { Object object = element.getValue(); if ( object != null ) { if ( object instanceof QueryResult ) { return buildJsonQueryResult( (QueryResult) object ); } return String.valueOf( object ); } else { return (String) element.getValue(); } } } return null; } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
SalesforceConnection { public String getRecordValue( SObject con, String fieldname ) throws KettleException { String[] fieldHierarchy = fieldname.split( "\\." ); if ( con == null ) { return null; } else { XmlObject element = getMessageElementForHierarchy( con, fieldHierarchy ); if ( element != null ) { Object object = element.getValue(); if ( object != null ) { if ( object instanceof QueryResult ) { return buildJsonQueryResult( (QueryResult) object ); } return String.valueOf( object ); } else { return (String) element.getValue(); } } } return null; } SalesforceConnection( LogChannelInterface logInterface, String url, String username, String password ); boolean isRollbackAllChangesOnError(); @Deprecated void rollbackAllChangesOnError( boolean value ); void setRollbackAllChangesOnError( boolean value ); boolean isQueryAll(); @Deprecated void queryAll( boolean value ); void setQueryAll( boolean value ); void setCalendar( int recordsFilter, GregorianCalendar startDate, GregorianCalendar endDate ); void setSQL( String sql ); void setFieldsList( String fieldsList ); void setModule( String module ); String getURL(); String getSQL(); Date getServerTimestamp(); String getModule(); QueryResult getQueryResult(); PartnerConnection createBinding( ConnectorConfig config ); PartnerConnection getBinding(); void setTimeOut( int timeout ); int getTimeOut(); boolean isUsingCompression(); void setUsingCompression( boolean useCompression ); String getUsername(); void setUsername( String value ); String getPassword(); void setPassword( String value ); void connect(); void query( boolean specifyQuery ); void close(); int getQueryResultSize(); int getRecordsCount(); SalesforceRecordValue getRecord( int recordIndex ); String getRecordValue( SObject con, String fieldname ); XmlObject[] getElements(); boolean queryMore(); String[] getAllAvailableObjects( boolean OnlyQueryableObjects ); Field[] getObjectFields( String objectName ); Field[] getObjectFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( String objectName ); String[] getFields( String objectName, boolean excludeNonUpdatableFields ); String[] getFields( Field[] fields ); String[] getFields( Field[] fields, boolean excludeNonUpdatableFields ); UpsertResult[] upsert( String upsertField, SObject[] sfBuffer ); SaveResult[] insert( SObject[] sfBuffer ); SaveResult[] update( SObject[] sfBuffer ); DeleteResult[] delete( String[] id ); static XmlObject createMessageElement( String name, Object value, boolean useExternalKey ); static XmlObject fromTemplateElement( String name, Object value, boolean setValue ); static XmlObject[] getChildren( SObject object ); }
|
@Test public void testErrorHandling() { SalesforceStepMeta meta = new SalesforceUpdateMeta(); assertTrue( meta.supportsErrorHandling() ); }
|
public boolean supportsErrorHandling() { return true; }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceUpdateMeta(); }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testGetFields() throws KettleStepException { SalesforceUpdateMeta meta = new SalesforceUpdateMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 0, r.size() ); r.clear(); r.addValueMeta( new ValueMetaString( "testString" ) ); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( ValueMetaInterface.TYPE_STRING, r.getValueMeta( 0 ).getType() ); assertEquals( "testString", r.getValueMeta( 0 ).getName() ); }
|
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { } }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { } SalesforceUpdateMeta(); }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testCheck() { SalesforceUpdateMeta meta = new SalesforceUpdateMeta(); meta.setDefault(); List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); meta.check( remarks, null, null, null, null, null, null, null, null, null ); boolean hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setUpdateLookup( new String[]{ "SalesforceField" } ); meta.setUpdateStream( new String[]{ "StreamField" } ); meta.setUseExternalId( new Boolean[]{ false } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); }
|
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceUpdateMeta(); }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpdateMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testSalesforceUpdateMeta() throws KettleException { List<String> attributes = new ArrayList<String>(); attributes.addAll( SalesforceMetaTest.getDefaultAttributes() ); attributes.addAll( Arrays.asList( "batchSize", "updateLookup", "updateStream", "useExternalId", "rollbackAllChangesOnError" ) ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidators = new HashMap<String, FieldLoadSaveValidator<?>>(); fieldLoadSaveValidators.put( "updateLookup", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "updateStream", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "useExternalId", new ArrayLoadSaveValidator<Boolean>( new BooleanLoadSaveValidator(), 50 ) ); LoadSaveTester loadSaveTester = new LoadSaveTester( SalesforceUpdateMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidators, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testRepoRoundTrip(); loadSaveTester.testXmlRoundTrip(); }
|
public SalesforceUpdateMeta() { super(); }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public SalesforceUpdateMeta() { super(); } }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public SalesforceUpdateMeta() { super(); } SalesforceUpdateMeta(); }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public SalesforceUpdateMeta() { super(); } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceUpdateMeta extends SalesforceStepMeta { public SalesforceUpdateMeta() { super(); } SalesforceUpdateMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testErrorHandling() { SalesforceStepMeta meta = new SalesforceUpsertMeta(); assertTrue( meta.supportsErrorHandling() ); }
|
public boolean supportsErrorHandling() { return true; }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceUpsertMeta(); }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testGetFields() throws KettleStepException { SalesforceUpsertMeta meta = new SalesforceUpsertMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( "Id", r.getFieldNames()[0] ); meta.setSalesforceIDFieldName( "id_field" ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( "id_field", r.getFieldNames()[0] ); }
|
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } } }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceUpsertMeta(); }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testCheck() { SalesforceUpsertMeta meta = new SalesforceUpsertMeta(); meta.setDefault(); List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); meta.check( remarks, null, null, null, null, null, null, null, null, null ); boolean hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setUpdateLookup( new String[]{ "SalesforceField" } ); meta.setUpdateStream( new String[]{ "StreamField" } ); meta.setUseExternalId( new Boolean[]{ false } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); }
|
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceUpsertMeta(); }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceUpsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testInsertTransformationCluster() throws KettleException { ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); doReturn( new LongObjectId( 123 ) ).when( repo.connectionDelegate ).getNextTransformationClusterID(); ObjectId result = repo.insertTransformationCluster( new LongObjectId( 456 ), new LongObjectId( 789 ) ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 123 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 456 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 789 ), insertRecord.getInteger( 2 ) ); assertEquals( new LongObjectId( 123 ), result ); }
|
public synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationClusterID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER ), id_cluster ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER, table ); return id; }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationClusterID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER ), id_cluster ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER, table ); return id; } }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationClusterID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER ), id_cluster ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER, table ); return id; } KettleDatabaseRepository(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationClusterID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER ), id_cluster ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationClusterID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER ), id_cluster ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); public KettleDatabaseRepositoryTransDelegate transDelegate; public KettleDatabaseRepositoryJobDelegate jobDelegate; public KettleDatabaseRepositoryDatabaseDelegate databaseDelegate; public KettleDatabaseRepositorySlaveServerDelegate slaveServerDelegate; public KettleDatabaseRepositoryClusterSchemaDelegate clusterSchemaDelegate; public KettleDatabaseRepositoryPartitionSchemaDelegate partitionSchemaDelegate; public KettleDatabaseRepositoryDirectoryDelegate directoryDelegate; public KettleDatabaseRepositoryConnectionDelegate connectionDelegate; public KettleDatabaseRepositoryUserDelegate userDelegate; public KettleDatabaseRepositoryConditionDelegate conditionDelegate; public KettleDatabaseRepositoryValueDelegate valueDelegate; public KettleDatabaseRepositoryNotePadDelegate notePadDelegate; public KettleDatabaseRepositoryStepDelegate stepDelegate; public KettleDatabaseRepositoryJobEntryDelegate jobEntryDelegate; public KettleDatabaseRepositoryMetaStoreDelegate metaStoreDelegate; public KettleDatabaseRepositoryMetaStore metaStore; }
|
@Test public void testSalesforceUpsertMeta() throws KettleException { List<String> attributes = new ArrayList<String>(); attributes.addAll( SalesforceMetaTest.getDefaultAttributes() ); attributes.addAll( Arrays.asList( "upsertField", "batchSize", "salesforceIDFieldName", "updateLookup", "updateStream", "useExternalId", "rollbackAllChangesOnError" ) ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidators = new HashMap<String, FieldLoadSaveValidator<?>>(); fieldLoadSaveValidators.put( "updateLookup", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "updateStream", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "useExternalId", new ArrayLoadSaveValidator<Boolean>( new BooleanLoadSaveValidator(), 50 ) ); LoadSaveTester loadSaveTester = new LoadSaveTester( SalesforceUpsertMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidators, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testRepoRoundTrip(); loadSaveTester.testXmlRoundTrip(); }
|
public SalesforceUpsertMeta() { super(); }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public SalesforceUpsertMeta() { super(); } }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public SalesforceUpsertMeta() { super(); } SalesforceUpsertMeta(); }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public SalesforceUpsertMeta() { super(); } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceUpsertMeta extends SalesforceStepMeta { public SalesforceUpsertMeta() { super(); } SalesforceUpsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setUpsertField( String upsertField ); String getUpsertField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testFieldWithExtIdYes_StandartObject() { inputFieldName = "Account:ExtID_AccountId__c/Account"; expectedFieldName = "AccountId"; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); assertEquals( expectedFieldName, fieldToNullName ); }
|
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
@Test public void testFieldWithExtIdNo_StandartObject() { inputFieldName = "AccountId"; expectedFieldName = "AccountId"; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, false ); assertEquals( expectedFieldName, fieldToNullName ); }
|
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
@Test public void testFieldWithExtIdYes_CustomObject() { inputFieldName = "ParentObject__c:Name/ParentObjectId__r"; expectedFieldName = "ParentObjectId__c"; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); assertEquals( expectedFieldName, fieldToNullName ); }
|
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
@Test public void testFieldWithExtIdNo_CustomObject() { inputFieldName = "ParentObjectId__c"; expectedFieldName = "ParentObjectId__c"; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, false ); assertEquals( expectedFieldName, fieldToNullName ); }
|
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
@Test public void testFieldWithExtIdYesButNameInIncorrectSyntax_StandartObject() { when( logMock.isDebug() ).thenReturn( true ); inputFieldName = "Account"; expectedFieldName = inputFieldName; String fieldToNullName = SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); assertEquals( expectedFieldName, fieldToNullName ); }
|
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
@Test public void testIncorrectExternalKeySyntaxWarnIsLoggedInDebugMode() { when( logMock.isDebug() ).thenReturn( true ); inputFieldName = "AccountId"; verify( logMock, never() ).logDebug( anyString() ); SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); verify( logMock ).logDebug( "The field has incorrect external key syntax: AccountId. Syntax for external key should be : object:externalId/lookupField. Trying to use fieldToNullName=AccountId." ); }
|
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
@Test public void testIncorrectExternalKeySyntaxWarnIsNotLoggedInNotDebugMode() { when( logMock.isDebug() ).thenReturn( false ); inputFieldName = "AccountId"; verify( logMock, never() ).logDebug( anyString() ); SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); verify( logMock, never() ).logDebug( "The field has incorrect external key syntax: AccountId. Syntax for external key should be : object:externalId/lookupField. Trying to use fieldToNullName=AccountId." ); }
|
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
@Test public void testFinalNullFieldNameIsLoggedInDebugMode_StandartObject() { when( logMock.isDebug() ).thenReturn( true ); inputFieldName = "Account:ExtID_AccountId__c/Account"; verify( logMock, never() ).logDebug( anyString() ); SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); verify( logMock ).logDebug( "fieldToNullName=AccountId" ); }
|
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
@Test public void testFinalNullFieldNameIsLoggedInDebugMode_CustomObject() { when( logMock.isDebug() ).thenReturn( true ); inputFieldName = "ParentObject__c:Name/ParentObjectId__r"; verify( logMock, never() ).logDebug( anyString() ); SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true ); verify( logMock ).logDebug( "fieldToNullName=ParentObjectId__c" ); }
|
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
SalesforceUtils { public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) { String fieldToNullName = field; if ( isUseExtId ) { if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field, fieldToNullName ) ); } return fieldToNullName; } String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 ); if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) { fieldToNullName = lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() ) + CUSTOM_OBJECT_SUFFIX; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } fieldToNullName = lookupField + "Id"; } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) ); } return fieldToNullName; } private SalesforceUtils(); static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ); }
|
@Test public void testInsertTransformationSlave() throws KettleException { ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); doReturn( new LongObjectId( 789 ) ).when( repo.connectionDelegate ).getNextTransformationSlaveID(); ObjectId result = repo.insertTransformationSlave( new LongObjectId( 456 ), new LongObjectId( 123 ) ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_TRANS_SLAVE, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 789 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 456 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 123 ), insertRecord.getInteger( 2 ) ); assertEquals( new LongObjectId( 789 ), result ); }
|
public synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE ), id_slave ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_SLAVE, table ); return id; }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE ), id_slave ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_SLAVE, table ); return id; } }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE ), id_slave ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_SLAVE, table ); return id; } KettleDatabaseRepository(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE ), id_slave ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_SLAVE, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ) throws KettleException { ObjectId id = connectionDelegate.getNextTransformationSlaveID(); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE ), id_slave ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_TRANS_SLAVE, table ); return id; } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); public KettleDatabaseRepositoryTransDelegate transDelegate; public KettleDatabaseRepositoryJobDelegate jobDelegate; public KettleDatabaseRepositoryDatabaseDelegate databaseDelegate; public KettleDatabaseRepositorySlaveServerDelegate slaveServerDelegate; public KettleDatabaseRepositoryClusterSchemaDelegate clusterSchemaDelegate; public KettleDatabaseRepositoryPartitionSchemaDelegate partitionSchemaDelegate; public KettleDatabaseRepositoryDirectoryDelegate directoryDelegate; public KettleDatabaseRepositoryConnectionDelegate connectionDelegate; public KettleDatabaseRepositoryUserDelegate userDelegate; public KettleDatabaseRepositoryConditionDelegate conditionDelegate; public KettleDatabaseRepositoryValueDelegate valueDelegate; public KettleDatabaseRepositoryNotePadDelegate notePadDelegate; public KettleDatabaseRepositoryStepDelegate stepDelegate; public KettleDatabaseRepositoryJobEntryDelegate jobEntryDelegate; public KettleDatabaseRepositoryMetaStoreDelegate metaStoreDelegate; public KettleDatabaseRepositoryMetaStore metaStore; }
|
@Test public void testErrorHandling() { SalesforceStepMeta meta = new SalesforceDeleteMeta(); assertTrue( meta.supportsErrorHandling() ); }
|
public boolean supportsErrorHandling() { return true; }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceDeleteMeta(); }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceDeleteMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); void setDeleteField( String DeleteField ); String getDeleteField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceDeleteMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); void setDeleteField( String DeleteField ); String getDeleteField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testGetFields() throws KettleStepException { SalesforceDeleteMeta meta = new SalesforceDeleteMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 0, r.size() ); r.clear(); r.addValueMeta( new ValueMetaString( "testString" ) ); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( ValueMetaInterface.TYPE_STRING, r.getValueMeta( 0 ).getType() ); assertEquals( "testString", r.getValueMeta( 0 ).getName() ); }
|
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { } }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { } SalesforceDeleteMeta(); }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { } SalesforceDeleteMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); void setDeleteField( String DeleteField ); String getDeleteField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { } SalesforceDeleteMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); void setDeleteField( String DeleteField ); String getDeleteField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testCheck() { SalesforceDeleteMeta meta = new SalesforceDeleteMeta(); meta.setDefault(); List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); meta.check( remarks, null, null, null, null, null, null, null, null, null ); boolean hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); }
|
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); } }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); } SalesforceDeleteMeta(); }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); } SalesforceDeleteMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); void setDeleteField( String DeleteField ); String getDeleteField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceDeleteMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceDeleteMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); } SalesforceDeleteMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); void setDeleteField( String DeleteField ); String getDeleteField(); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testSalesforceInputMeta() throws KettleException { List<String> attributes = new ArrayList<String>(); attributes.addAll( SalesforceMetaTest.getDefaultAttributes() ); attributes.addAll( Arrays.asList( "inputFields", "condition", "query", "specifyQuery", "includeTargetURL", "targetURLField", "includeModule", "moduleField", "includeRowNumber", "includeDeletionDate", "deletionDateField", "rowNumberField", "includeSQL", "sqlField", "includeTimestamp", "timestampField", "readFrom", "readTo", "recordsFilter", "queryAll", "rowLimit" ) ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); getterMap.put( "includeTargetURL", "includeTargetURL" ); getterMap.put( "includeModule", "includeModule" ); getterMap.put( "includeRowNumber", "includeRowNumber" ); getterMap.put( "includeDeletionDate", "includeDeletionDate" ); getterMap.put( "includeSQL", "includeSQL" ); getterMap.put( "sqlField", "getSQLField" ); setterMap.put( "sqlField", "setSQLField" ); getterMap.put( "includeTimestamp", "includeTimestamp" ); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidators = new HashMap<String, FieldLoadSaveValidator<?>>(); fieldLoadSaveValidators.put( "inputFields", new ArrayLoadSaveValidator<SalesforceInputField>( new SalesforceInputFieldLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "recordsFilter", new RecordsFilterLoadSaveValidator() ); LoadSaveTester loadSaveTester = new LoadSaveTester( SalesforceInputMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidators, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testRepoRoundTrip(); loadSaveTester.testXmlRoundTrip(); }
|
public SalesforceInputMeta() { super(); }
|
SalesforceInputMeta extends SalesforceStepMeta { public SalesforceInputMeta() { super(); } }
|
SalesforceInputMeta extends SalesforceStepMeta { public SalesforceInputMeta() { super(); } SalesforceInputMeta(); }
|
SalesforceInputMeta extends SalesforceStepMeta { public SalesforceInputMeta() { super(); } SalesforceInputMeta(); SalesforceInputField[] getInputFields(); void setInputFields( SalesforceInputField[] inputFields ); String getQuery(); void setQuery( String query ); boolean isSpecifyQuery(); void setSpecifyQuery( boolean specifyQuery ); boolean isQueryAll(); void setQueryAll( boolean value ); String getCondition(); void setCondition( String condition ); void setTargetURLField( String TargetURLField ); void setSQLField( String sqlField ); void setTimestampField( String timestampField ); void setModuleField( String module_field ); int getRecordsFilter(); void setRecordsFilter( int recordsFilter ); boolean includeTargetURL(); boolean includeSQL(); void setIncludeSQL( boolean includeSQL ); boolean includeTimestamp(); void setIncludeTimestamp( boolean includeTimestamp ); boolean includeModule(); void setIncludeTargetURL( boolean includeTargetURL ); void setIncludeModule( boolean includemodule ); boolean includeRowNumber(); void setIncludeRowNumber( boolean includeRowNumber ); boolean includeDeletionDate(); void setIncludeDeletionDate( boolean includeDeletionDate ); String getRowLimit(); void setRowLimit( String rowLimit ); String getRowNumberField(); String getDeletionDateField(); void setDeletionDateField( String value ); String getTargetURLField(); String getReadFrom(); void setReadFrom( String readFrom ); String getReadTo(); void setReadTo( String readTo ); String getSQLField(); String getTimestampField(); String getModuleField(); void setRowNumberField( String rowNumberField ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrfields ); int getNrFields(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); }
|
SalesforceInputMeta extends SalesforceStepMeta { public SalesforceInputMeta() { super(); } SalesforceInputMeta(); SalesforceInputField[] getInputFields(); void setInputFields( SalesforceInputField[] inputFields ); String getQuery(); void setQuery( String query ); boolean isSpecifyQuery(); void setSpecifyQuery( boolean specifyQuery ); boolean isQueryAll(); void setQueryAll( boolean value ); String getCondition(); void setCondition( String condition ); void setTargetURLField( String TargetURLField ); void setSQLField( String sqlField ); void setTimestampField( String timestampField ); void setModuleField( String module_field ); int getRecordsFilter(); void setRecordsFilter( int recordsFilter ); boolean includeTargetURL(); boolean includeSQL(); void setIncludeSQL( boolean includeSQL ); boolean includeTimestamp(); void setIncludeTimestamp( boolean includeTimestamp ); boolean includeModule(); void setIncludeTargetURL( boolean includeTargetURL ); void setIncludeModule( boolean includemodule ); boolean includeRowNumber(); void setIncludeRowNumber( boolean includeRowNumber ); boolean includeDeletionDate(); void setIncludeDeletionDate( boolean includeDeletionDate ); String getRowLimit(); void setRowLimit( String rowLimit ); String getRowNumberField(); String getDeletionDateField(); void setDeletionDateField( String value ); String getTargetURLField(); String getReadFrom(); void setReadFrom( String readFrom ); String getReadTo(); void setReadTo( String readTo ); String getSQLField(); String getTimestampField(); String getModuleField(); void setRowNumberField( String rowNumberField ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrfields ); int getNrFields(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); static String DATE_TIME_FORMAT; }
|
@Test public void testGetFields() throws KettleStepException { SalesforceInputMeta meta = new SalesforceInputMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 0, r.size() ); meta.setInputFields( new SalesforceInputField[]{ new SalesforceInputField( "field1" ) } ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); meta.setIncludeDeletionDate( true ); meta.setDeletionDateField( "DeletionDate" ); meta.setIncludeModule( true ); meta.setModuleField( "ModuleName" ); meta.setIncludeRowNumber( true ); meta.setRowNumberField( "RN" ); meta.setIncludeSQL( true ); meta.setSQLField( "sqlField" ); meta.setIncludeTargetURL( true ); meta.setTargetURLField( "Target" ); meta.setIncludeTimestamp( true ); meta.setTimestampField( "TS" ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 7, r.size() ); assertTrue( r.indexOfValue( "field1" ) >= 0 ); assertTrue( r.indexOfValue( "DeletionDate" ) >= 0 ); assertTrue( r.indexOfValue( "ModuleName" ) >= 0 ); assertTrue( r.indexOfValue( "RN" ) >= 0 ); assertTrue( r.indexOfValue( "sqlField" ) >= 0 ); assertTrue( r.indexOfValue( "Target" ) >= 0 ); assertTrue( r.indexOfValue( "TS" ) >= 0 ); }
|
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int i; for ( i = 0; i < inputFields.length; i++ ) { SalesforceInputField field = inputFields[i]; int type = field.getType(); if ( type == ValueMetaInterface.TYPE_NONE ) { type = ValueMetaInterface.TYPE_STRING; } try { ValueMetaInterface v = ValueMetaFactory.createValueMeta( space.environmentSubstitute( field.getName() ), type ); v.setLength( field.getLength() ); v.setPrecision( field.getPrecision() ); v.setOrigin( name ); v.setConversionMask( field.getFormat() ); v.setDecimalSymbol( field.getDecimalSymbol() ); v.setGroupingSymbol( field.getGroupSymbol() ); v.setCurrencySymbol( field.getCurrencySymbol() ); r.addValueMeta( v ); } catch ( Exception e ) { throw new KettleStepException( e ); } } if ( includeTargetURL && !Utils.isEmpty( targetURLField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( targetURLField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeModule && !Utils.isEmpty( moduleField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( moduleField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeSQL && !Utils.isEmpty( sqlField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( sqlField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeTimestamp && !Utils.isEmpty( timestampField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( timestampField ) ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeRowNumber && !Utils.isEmpty( rowNumberField ) ) { ValueMetaInterface v = new ValueMetaInteger( space.environmentSubstitute( rowNumberField ) ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeDeletionDate && !Utils.isEmpty( deletionDateField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( deletionDateField ) ); v.setOrigin( name ); r.addValueMeta( v ); } }
|
SalesforceInputMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int i; for ( i = 0; i < inputFields.length; i++ ) { SalesforceInputField field = inputFields[i]; int type = field.getType(); if ( type == ValueMetaInterface.TYPE_NONE ) { type = ValueMetaInterface.TYPE_STRING; } try { ValueMetaInterface v = ValueMetaFactory.createValueMeta( space.environmentSubstitute( field.getName() ), type ); v.setLength( field.getLength() ); v.setPrecision( field.getPrecision() ); v.setOrigin( name ); v.setConversionMask( field.getFormat() ); v.setDecimalSymbol( field.getDecimalSymbol() ); v.setGroupingSymbol( field.getGroupSymbol() ); v.setCurrencySymbol( field.getCurrencySymbol() ); r.addValueMeta( v ); } catch ( Exception e ) { throw new KettleStepException( e ); } } if ( includeTargetURL && !Utils.isEmpty( targetURLField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( targetURLField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeModule && !Utils.isEmpty( moduleField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( moduleField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeSQL && !Utils.isEmpty( sqlField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( sqlField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeTimestamp && !Utils.isEmpty( timestampField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( timestampField ) ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeRowNumber && !Utils.isEmpty( rowNumberField ) ) { ValueMetaInterface v = new ValueMetaInteger( space.environmentSubstitute( rowNumberField ) ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeDeletionDate && !Utils.isEmpty( deletionDateField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( deletionDateField ) ); v.setOrigin( name ); r.addValueMeta( v ); } } }
|
SalesforceInputMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int i; for ( i = 0; i < inputFields.length; i++ ) { SalesforceInputField field = inputFields[i]; int type = field.getType(); if ( type == ValueMetaInterface.TYPE_NONE ) { type = ValueMetaInterface.TYPE_STRING; } try { ValueMetaInterface v = ValueMetaFactory.createValueMeta( space.environmentSubstitute( field.getName() ), type ); v.setLength( field.getLength() ); v.setPrecision( field.getPrecision() ); v.setOrigin( name ); v.setConversionMask( field.getFormat() ); v.setDecimalSymbol( field.getDecimalSymbol() ); v.setGroupingSymbol( field.getGroupSymbol() ); v.setCurrencySymbol( field.getCurrencySymbol() ); r.addValueMeta( v ); } catch ( Exception e ) { throw new KettleStepException( e ); } } if ( includeTargetURL && !Utils.isEmpty( targetURLField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( targetURLField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeModule && !Utils.isEmpty( moduleField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( moduleField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeSQL && !Utils.isEmpty( sqlField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( sqlField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeTimestamp && !Utils.isEmpty( timestampField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( timestampField ) ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeRowNumber && !Utils.isEmpty( rowNumberField ) ) { ValueMetaInterface v = new ValueMetaInteger( space.environmentSubstitute( rowNumberField ) ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeDeletionDate && !Utils.isEmpty( deletionDateField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( deletionDateField ) ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceInputMeta(); }
|
SalesforceInputMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int i; for ( i = 0; i < inputFields.length; i++ ) { SalesforceInputField field = inputFields[i]; int type = field.getType(); if ( type == ValueMetaInterface.TYPE_NONE ) { type = ValueMetaInterface.TYPE_STRING; } try { ValueMetaInterface v = ValueMetaFactory.createValueMeta( space.environmentSubstitute( field.getName() ), type ); v.setLength( field.getLength() ); v.setPrecision( field.getPrecision() ); v.setOrigin( name ); v.setConversionMask( field.getFormat() ); v.setDecimalSymbol( field.getDecimalSymbol() ); v.setGroupingSymbol( field.getGroupSymbol() ); v.setCurrencySymbol( field.getCurrencySymbol() ); r.addValueMeta( v ); } catch ( Exception e ) { throw new KettleStepException( e ); } } if ( includeTargetURL && !Utils.isEmpty( targetURLField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( targetURLField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeModule && !Utils.isEmpty( moduleField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( moduleField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeSQL && !Utils.isEmpty( sqlField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( sqlField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeTimestamp && !Utils.isEmpty( timestampField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( timestampField ) ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeRowNumber && !Utils.isEmpty( rowNumberField ) ) { ValueMetaInterface v = new ValueMetaInteger( space.environmentSubstitute( rowNumberField ) ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeDeletionDate && !Utils.isEmpty( deletionDateField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( deletionDateField ) ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceInputMeta(); SalesforceInputField[] getInputFields(); void setInputFields( SalesforceInputField[] inputFields ); String getQuery(); void setQuery( String query ); boolean isSpecifyQuery(); void setSpecifyQuery( boolean specifyQuery ); boolean isQueryAll(); void setQueryAll( boolean value ); String getCondition(); void setCondition( String condition ); void setTargetURLField( String TargetURLField ); void setSQLField( String sqlField ); void setTimestampField( String timestampField ); void setModuleField( String module_field ); int getRecordsFilter(); void setRecordsFilter( int recordsFilter ); boolean includeTargetURL(); boolean includeSQL(); void setIncludeSQL( boolean includeSQL ); boolean includeTimestamp(); void setIncludeTimestamp( boolean includeTimestamp ); boolean includeModule(); void setIncludeTargetURL( boolean includeTargetURL ); void setIncludeModule( boolean includemodule ); boolean includeRowNumber(); void setIncludeRowNumber( boolean includeRowNumber ); boolean includeDeletionDate(); void setIncludeDeletionDate( boolean includeDeletionDate ); String getRowLimit(); void setRowLimit( String rowLimit ); String getRowNumberField(); String getDeletionDateField(); void setDeletionDateField( String value ); String getTargetURLField(); String getReadFrom(); void setReadFrom( String readFrom ); String getReadTo(); void setReadTo( String readTo ); String getSQLField(); String getTimestampField(); String getModuleField(); void setRowNumberField( String rowNumberField ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrfields ); int getNrFields(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); }
|
SalesforceInputMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int i; for ( i = 0; i < inputFields.length; i++ ) { SalesforceInputField field = inputFields[i]; int type = field.getType(); if ( type == ValueMetaInterface.TYPE_NONE ) { type = ValueMetaInterface.TYPE_STRING; } try { ValueMetaInterface v = ValueMetaFactory.createValueMeta( space.environmentSubstitute( field.getName() ), type ); v.setLength( field.getLength() ); v.setPrecision( field.getPrecision() ); v.setOrigin( name ); v.setConversionMask( field.getFormat() ); v.setDecimalSymbol( field.getDecimalSymbol() ); v.setGroupingSymbol( field.getGroupSymbol() ); v.setCurrencySymbol( field.getCurrencySymbol() ); r.addValueMeta( v ); } catch ( Exception e ) { throw new KettleStepException( e ); } } if ( includeTargetURL && !Utils.isEmpty( targetURLField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( targetURLField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeModule && !Utils.isEmpty( moduleField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( moduleField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeSQL && !Utils.isEmpty( sqlField ) ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( sqlField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeTimestamp && !Utils.isEmpty( timestampField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( timestampField ) ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeRowNumber && !Utils.isEmpty( rowNumberField ) ) { ValueMetaInterface v = new ValueMetaInteger( space.environmentSubstitute( rowNumberField ) ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeDeletionDate && !Utils.isEmpty( deletionDateField ) ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( deletionDateField ) ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceInputMeta(); SalesforceInputField[] getInputFields(); void setInputFields( SalesforceInputField[] inputFields ); String getQuery(); void setQuery( String query ); boolean isSpecifyQuery(); void setSpecifyQuery( boolean specifyQuery ); boolean isQueryAll(); void setQueryAll( boolean value ); String getCondition(); void setCondition( String condition ); void setTargetURLField( String TargetURLField ); void setSQLField( String sqlField ); void setTimestampField( String timestampField ); void setModuleField( String module_field ); int getRecordsFilter(); void setRecordsFilter( int recordsFilter ); boolean includeTargetURL(); boolean includeSQL(); void setIncludeSQL( boolean includeSQL ); boolean includeTimestamp(); void setIncludeTimestamp( boolean includeTimestamp ); boolean includeModule(); void setIncludeTargetURL( boolean includeTargetURL ); void setIncludeModule( boolean includemodule ); boolean includeRowNumber(); void setIncludeRowNumber( boolean includeRowNumber ); boolean includeDeletionDate(); void setIncludeDeletionDate( boolean includeDeletionDate ); String getRowLimit(); void setRowLimit( String rowLimit ); String getRowNumberField(); String getDeletionDateField(); void setDeletionDateField( String value ); String getTargetURLField(); String getReadFrom(); void setReadFrom( String readFrom ); String getReadTo(); void setReadTo( String readTo ); String getSQLField(); String getTimestampField(); String getModuleField(); void setRowNumberField( String rowNumberField ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrfields ); int getNrFields(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); static String DATE_TIME_FORMAT; }
|
@Test public void testCheck() { SalesforceInputMeta meta = new SalesforceInputMeta(); meta.setDefault(); List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); meta.check( remarks, null, null, null, null, null, null, null, null, null ); boolean hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setInputFields( new SalesforceInputField[]{ new SalesforceInputField( "test" ) } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setIncludeDeletionDate( true ); meta.setIncludeModule( true ); meta.setIncludeRowNumber( true ); meta.setIncludeSQL( true ); meta.setIncludeTargetURL( true ); meta.setIncludeTimestamp( true ); meta.setInputFields( new SalesforceInputField[]{ new SalesforceInputField( "test" ) } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; int errorCount = 0; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; errorCount++; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); assertEquals( 6, errorCount ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setIncludeDeletionDate( true ); meta.setDeletionDateField( "delDate" ); meta.setIncludeModule( true ); meta.setModuleField( "mod" ); meta.setIncludeRowNumber( true ); meta.setRowNumberField( "rownum" ); meta.setIncludeSQL( true ); meta.setSQLField( "theSQL" ); meta.setIncludeTargetURL( true ); meta.setTargetURLField( "theURL" ); meta.setIncludeTimestamp( true ); meta.setTimestampField( "ts_Field" ); meta.setInputFields( new SalesforceInputField[]{ new SalesforceInputField( "test" ) } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; errorCount++; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); }
|
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getInputFields().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); if ( includeTargetURL() && Utils.isEmpty( getTargetURLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTargetURLField" ), stepMeta ); remarks.add( cr ); } if ( includeSQL() && Utils.isEmpty( getSQLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoSQLField" ), stepMeta ); remarks.add( cr ); } if ( includeModule() && Utils.isEmpty( moduleField ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoModuleField" ), stepMeta ); remarks.add( cr ); } if ( includeTimestamp() && Utils.isEmpty( getTimestampField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTimestampField" ), stepMeta ); remarks.add( cr ); } if ( includeRowNumber() && Utils.isEmpty( getRowNumberField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoRowNumberField" ), stepMeta ); remarks.add( cr ); } if ( includeDeletionDate() && Utils.isEmpty( getDeletionDateField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoDeletionDateField" ), stepMeta ); remarks.add( cr ); } }
|
SalesforceInputMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getInputFields().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); if ( includeTargetURL() && Utils.isEmpty( getTargetURLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTargetURLField" ), stepMeta ); remarks.add( cr ); } if ( includeSQL() && Utils.isEmpty( getSQLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoSQLField" ), stepMeta ); remarks.add( cr ); } if ( includeModule() && Utils.isEmpty( moduleField ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoModuleField" ), stepMeta ); remarks.add( cr ); } if ( includeTimestamp() && Utils.isEmpty( getTimestampField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTimestampField" ), stepMeta ); remarks.add( cr ); } if ( includeRowNumber() && Utils.isEmpty( getRowNumberField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoRowNumberField" ), stepMeta ); remarks.add( cr ); } if ( includeDeletionDate() && Utils.isEmpty( getDeletionDateField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoDeletionDateField" ), stepMeta ); remarks.add( cr ); } } }
|
SalesforceInputMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getInputFields().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); if ( includeTargetURL() && Utils.isEmpty( getTargetURLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTargetURLField" ), stepMeta ); remarks.add( cr ); } if ( includeSQL() && Utils.isEmpty( getSQLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoSQLField" ), stepMeta ); remarks.add( cr ); } if ( includeModule() && Utils.isEmpty( moduleField ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoModuleField" ), stepMeta ); remarks.add( cr ); } if ( includeTimestamp() && Utils.isEmpty( getTimestampField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTimestampField" ), stepMeta ); remarks.add( cr ); } if ( includeRowNumber() && Utils.isEmpty( getRowNumberField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoRowNumberField" ), stepMeta ); remarks.add( cr ); } if ( includeDeletionDate() && Utils.isEmpty( getDeletionDateField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoDeletionDateField" ), stepMeta ); remarks.add( cr ); } } SalesforceInputMeta(); }
|
SalesforceInputMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getInputFields().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); if ( includeTargetURL() && Utils.isEmpty( getTargetURLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTargetURLField" ), stepMeta ); remarks.add( cr ); } if ( includeSQL() && Utils.isEmpty( getSQLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoSQLField" ), stepMeta ); remarks.add( cr ); } if ( includeModule() && Utils.isEmpty( moduleField ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoModuleField" ), stepMeta ); remarks.add( cr ); } if ( includeTimestamp() && Utils.isEmpty( getTimestampField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTimestampField" ), stepMeta ); remarks.add( cr ); } if ( includeRowNumber() && Utils.isEmpty( getRowNumberField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoRowNumberField" ), stepMeta ); remarks.add( cr ); } if ( includeDeletionDate() && Utils.isEmpty( getDeletionDateField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoDeletionDateField" ), stepMeta ); remarks.add( cr ); } } SalesforceInputMeta(); SalesforceInputField[] getInputFields(); void setInputFields( SalesforceInputField[] inputFields ); String getQuery(); void setQuery( String query ); boolean isSpecifyQuery(); void setSpecifyQuery( boolean specifyQuery ); boolean isQueryAll(); void setQueryAll( boolean value ); String getCondition(); void setCondition( String condition ); void setTargetURLField( String TargetURLField ); void setSQLField( String sqlField ); void setTimestampField( String timestampField ); void setModuleField( String module_field ); int getRecordsFilter(); void setRecordsFilter( int recordsFilter ); boolean includeTargetURL(); boolean includeSQL(); void setIncludeSQL( boolean includeSQL ); boolean includeTimestamp(); void setIncludeTimestamp( boolean includeTimestamp ); boolean includeModule(); void setIncludeTargetURL( boolean includeTargetURL ); void setIncludeModule( boolean includemodule ); boolean includeRowNumber(); void setIncludeRowNumber( boolean includeRowNumber ); boolean includeDeletionDate(); void setIncludeDeletionDate( boolean includeDeletionDate ); String getRowLimit(); void setRowLimit( String rowLimit ); String getRowNumberField(); String getDeletionDateField(); void setDeletionDateField( String value ); String getTargetURLField(); String getReadFrom(); void setReadFrom( String readFrom ); String getReadTo(); void setReadTo( String readTo ); String getSQLField(); String getTimestampField(); String getModuleField(); void setRowNumberField( String rowNumberField ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrfields ); int getNrFields(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); }
|
SalesforceInputMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getInputFields().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); if ( includeTargetURL() && Utils.isEmpty( getTargetURLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTargetURLField" ), stepMeta ); remarks.add( cr ); } if ( includeSQL() && Utils.isEmpty( getSQLField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoSQLField" ), stepMeta ); remarks.add( cr ); } if ( includeModule() && Utils.isEmpty( moduleField ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoModuleField" ), stepMeta ); remarks.add( cr ); } if ( includeTimestamp() && Utils.isEmpty( getTimestampField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoTimestampField" ), stepMeta ); remarks.add( cr ); } if ( includeRowNumber() && Utils.isEmpty( getRowNumberField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoRowNumberField" ), stepMeta ); remarks.add( cr ); } if ( includeDeletionDate() && Utils.isEmpty( getDeletionDateField() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInputMeta.CheckResult.NoDeletionDateField" ), stepMeta ); remarks.add( cr ); } } SalesforceInputMeta(); SalesforceInputField[] getInputFields(); void setInputFields( SalesforceInputField[] inputFields ); String getQuery(); void setQuery( String query ); boolean isSpecifyQuery(); void setSpecifyQuery( boolean specifyQuery ); boolean isQueryAll(); void setQueryAll( boolean value ); String getCondition(); void setCondition( String condition ); void setTargetURLField( String TargetURLField ); void setSQLField( String sqlField ); void setTimestampField( String timestampField ); void setModuleField( String module_field ); int getRecordsFilter(); void setRecordsFilter( int recordsFilter ); boolean includeTargetURL(); boolean includeSQL(); void setIncludeSQL( boolean includeSQL ); boolean includeTimestamp(); void setIncludeTimestamp( boolean includeTimestamp ); boolean includeModule(); void setIncludeTargetURL( boolean includeTargetURL ); void setIncludeModule( boolean includemodule ); boolean includeRowNumber(); void setIncludeRowNumber( boolean includeRowNumber ); boolean includeDeletionDate(); void setIncludeDeletionDate( boolean includeDeletionDate ); String getRowLimit(); void setRowLimit( String rowLimit ); String getRowNumberField(); String getDeletionDateField(); void setDeletionDateField( String value ); String getTargetURLField(); String getReadFrom(); void setReadFrom( String readFrom ); String getReadTo(); void setReadTo( String readTo ); String getSQLField(); String getTimestampField(); String getModuleField(); void setRowNumberField( String rowNumberField ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrfields ); int getNrFields(); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); static String DATE_TIME_FORMAT; }
|
@Test public void testErrorHandling() { SalesforceStepMeta meta = new SalesforceInsertMeta(); assertTrue( meta.supportsErrorHandling() ); }
|
public boolean supportsErrorHandling() { return true; }
|
SalesforceInsertMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } }
|
SalesforceInsertMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceInsertMeta(); }
|
SalesforceInsertMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceInsertMeta extends SalesforceStepMeta { public boolean supportsErrorHandling() { return true; } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testCheck() { SalesforceInsertMeta meta = new SalesforceInsertMeta(); meta.setDefault(); List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); meta.check( remarks, null, null, null, null, null, null, null, null, null ); boolean hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertTrue( hasError ); remarks.clear(); meta.setDefault(); meta.setUsername( "user" ); meta.setUpdateLookup( new String[]{ "SalesforceField" } ); meta.setUpdateStream( new String[]{ "StreamField" } ); meta.setUseExternalId( new Boolean[]{ false } ); meta.check( remarks, null, null, null, null, null, null, null, null, null ); hasError = false; for ( CheckResultInterface cr : remarks ) { if ( cr.getType() == CheckResult.TYPE_RESULT_ERROR ) { hasError = true; } } assertFalse( remarks.isEmpty() ); assertFalse( hasError ); }
|
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); }
|
SalesforceInsertMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } }
|
SalesforceInsertMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceInsertMeta(); }
|
SalesforceInsertMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceInsertMeta extends SalesforceStepMeta { public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { super.check( remarks, transMeta, stepMeta, prev, input, output, info, space, repository, metaStore ); CheckResult cr; if ( input != null && input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInputExpected" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoInput" ), stepMeta ); } remarks.add( cr ); if ( getUpdateLookup().length == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.NoFields" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SalesforceInsertMeta.CheckResult.FieldsOk" ), stepMeta ); } remarks.add( cr ); } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testGetFields() throws KettleStepException { SalesforceInsertMeta meta = new SalesforceInsertMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( "Id", r.getFieldNames()[0] ); meta.setSalesforceIDFieldName( "id_field" ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( "id_field", r.getFieldNames()[0] ); }
|
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } }
|
SalesforceInsertMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } } }
|
SalesforceInsertMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceInsertMeta(); }
|
SalesforceInsertMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceInsertMeta extends SalesforceStepMeta { public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() ); if ( !Utils.isEmpty( realfieldname ) ) { ValueMetaInterface v = new ValueMetaString( realfieldname ); v.setLength( 18 ); v.setOrigin( name ); r.addValueMeta( v ); } } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testSalesforceInsertMeta() throws KettleException { List<String> attributes = new ArrayList<String>(); attributes.addAll( SalesforceMetaTest.getDefaultAttributes() ); attributes.addAll( Arrays.asList( "batchSize", "salesforceIDFieldName", "updateLookup", "updateStream", "useExternalId", "rollbackAllChangesOnError" ) ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidators = new HashMap<String, FieldLoadSaveValidator<?>>(); fieldLoadSaveValidators.put( "updateLookup", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "updateStream", new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 50 ) ); fieldLoadSaveValidators.put( "useExternalId", new ArrayLoadSaveValidator<Boolean>( new BooleanLoadSaveValidator(), 50 ) ); LoadSaveTester loadSaveTester = new LoadSaveTester( SalesforceInsertMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidators, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testRepoRoundTrip(); loadSaveTester.testXmlRoundTrip(); }
|
public SalesforceInsertMeta() { super(); }
|
SalesforceInsertMeta extends SalesforceStepMeta { public SalesforceInsertMeta() { super(); } }
|
SalesforceInsertMeta extends SalesforceStepMeta { public SalesforceInsertMeta() { super(); } SalesforceInsertMeta(); }
|
SalesforceInsertMeta extends SalesforceStepMeta { public SalesforceInsertMeta() { super(); } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
SalesforceInsertMeta extends SalesforceStepMeta { public SalesforceInsertMeta() { super(); } SalesforceInsertMeta(); boolean isRollbackAllChangesOnError(); void setRollbackAllChangesOnError( boolean rollbackAllChangesOnError ); String[] getUpdateLookup(); void setUpdateLookup( String[] updateLookup ); String[] getUpdateStream(); void setUpdateStream( String[] updateStream ); Boolean[] getUseExternalId(); void setUseExternalId( Boolean[] useExternalId ); void setBatchSize( String value ); String getBatchSize(); int getBatchSizeInt(); String getSalesforceIDFieldName(); void setSalesforceIDFieldName( String salesforceIDFieldName ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); String getXML(); void allocate( int nrvalues ); void setDefault(); void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
|
@Test public void testInsertTransStepCondition() throws KettleException { ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); repo.insertTransStepCondition( new LongObjectId( 234 ), new LongObjectId( 567 ), new LongObjectId( 468 ) ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 234 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 567 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 468 ), insertRecord.getInteger( 2 ) ); }
|
public synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step, ObjectId id_condition ) throws KettleException { String tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; RowMetaAndData table = new RowMetaAndData(); table .addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP ), id_step ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION ), id_condition ); connectionDelegate.insertTableRow( tablename, table ); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step, ObjectId id_condition ) throws KettleException { String tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; RowMetaAndData table = new RowMetaAndData(); table .addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP ), id_step ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION ), id_condition ); connectionDelegate.insertTableRow( tablename, table ); } }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step, ObjectId id_condition ) throws KettleException { String tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; RowMetaAndData table = new RowMetaAndData(); table .addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP ), id_step ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION ), id_condition ); connectionDelegate.insertTableRow( tablename, table ); } KettleDatabaseRepository(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step, ObjectId id_condition ) throws KettleException { String tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; RowMetaAndData table = new RowMetaAndData(); table .addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP ), id_step ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION ), id_condition ); connectionDelegate.insertTableRow( tablename, table ); } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); }
|
KettleDatabaseRepository extends KettleDatabaseRepositoryBase { public synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step, ObjectId id_condition ) throws KettleException { String tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; RowMetaAndData table = new RowMetaAndData(); table .addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION ), id_transformation ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP ), id_step ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION ), id_condition ); connectionDelegate.insertTableRow( tablename, table ); } KettleDatabaseRepository(); void init( RepositoryMeta repositoryMeta ); RepositoryMeta createRepositoryMeta(); void connect( String username, String password ); void connect( String username, String password, boolean upgrade ); @Override boolean test(); @Override void create(); synchronized void commit(); synchronized void rollback(); IUser getUserInfo(); int getMajorVersion(); int getMinorVersion(); String getVersion(); TransMeta loadTransformation( String transname, RepositoryDirectoryInterface repdir,
ProgressMonitorListener monitor, boolean setInternalVariables, String versionName ); SharedObjects readTransSharedObjects( TransMeta transMeta ); ObjectId renameTransformation( ObjectId id_transformation, RepositoryDirectoryInterface newDir,
String newName ); synchronized ObjectId renameTransformation( ObjectId id_transformation, String versionComment,
RepositoryDirectoryInterface newDir, String newName ); JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
String versionName ); SharedObjects readJobMetaSharedObjects( JobMeta jobMeta ); ObjectId renameJob( ObjectId id_job, RepositoryDirectoryInterface dir, String newname ); synchronized ObjectId renameJob( ObjectId id_job, String versionComment, RepositoryDirectoryInterface dir,
String newname ); boolean exists( String name, RepositoryDirectoryInterface repositoryDirectory,
RepositoryObjectType objectType ); void save( RepositoryElementInterface repositoryElement, String versionComment ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, boolean overwrite ); void save( RepositoryElementInterface repositoryElement, String versionComment,
ProgressMonitorListener monitor, ObjectId parentId, boolean used, boolean overwrite ); @Override void save( RepositoryElementInterface repositoryElement, String versionComment, Calendar versionDate,
ProgressMonitorListener monitor, boolean overwrite ); Condition loadCondition( ObjectId id_condition ); ObjectId saveCondition( Condition condition ); ObjectId saveCondition( Condition condition, ObjectId id_condition_parent ); DatabaseMeta loadDatabaseMeta( ObjectId id_database, String versionName ); void deleteDatabaseMeta( String databaseName ); ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers,
String versionLabel ); SlaveServer loadSlaveServer( ObjectId id_slave_server, String versionName ); PartitionSchema loadPartitionSchema( ObjectId id_partition_schema, String versionName ); ValueMetaAndData loadValueMetaAndData( ObjectId id_value ); NotePadMeta loadNotePadMeta( ObjectId id_note ); void saveNotePadMeta( NotePadMeta note, ObjectId id_transformation ); RepositoryDirectoryInterface loadRepositoryDirectoryTree(); RepositoryDirectoryInterface loadRepositoryDirectoryTree( RepositoryDirectoryInterface root ); RepositoryDirectoryInterface findDirectory( String directory ); RepositoryDirectoryInterface findDirectory( ObjectId directory ); void saveRepositoryDirectory( RepositoryDirectoryInterface dir ); void deleteRepositoryDirectory( RepositoryDirectoryInterface dir ); ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ); RepositoryDirectoryInterface createRepositoryDirectory( RepositoryDirectoryInterface parentDirectory,
String directoryPath ); synchronized ObjectId getRootDirectoryID(); synchronized int getNrSubDirectories( ObjectId id_directory ); synchronized ObjectId[] getSubDirectoryIDs( ObjectId id_directory ); synchronized ObjectId insertLogEntry( String description ); synchronized void insertTransNote( ObjectId id_transformation, ObjectId id_note ); synchronized void insertJobNote( ObjectId id_job, ObjectId id_note ); synchronized void insertStepDatabase( ObjectId id_transformation, ObjectId id_step, ObjectId id_database ); synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ); synchronized ObjectId insertTransformationPartitionSchema( ObjectId id_transformation,
ObjectId id_partition_schema ); synchronized ObjectId insertClusterSlave( ClusterSchema clusterSchema, SlaveServer slaveServer ); synchronized ObjectId insertTransformationCluster( ObjectId id_transformation, ObjectId id_cluster ); synchronized ObjectId insertTransformationSlave( ObjectId id_transformation, ObjectId id_slave ); synchronized void insertTransStepCondition( ObjectId id_transformation, ObjectId id_step,
ObjectId id_condition ); synchronized String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ); List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); synchronized String[] getJobNames( ObjectId id_directory, boolean includeDeleted ); synchronized String[] getDirectoryNames( ObjectId id_directory ); synchronized String[] getJobNames(); ObjectId[] getSubConditionIDs( ObjectId id_condition ); ObjectId[] getTransNoteIDs( ObjectId id_transformation ); ObjectId[] getTransformationConditionIDs( ObjectId id_transformation ); ObjectId[] getTransformationDatabaseIDs( ObjectId id_transformation ); ObjectId[] getJobNoteIDs( ObjectId id_job ); ObjectId[] getDatabaseIDs( boolean includeDeleted ); ObjectId[] getDatabaseAttributeIDs( ObjectId id_database ); ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ); ObjectId[] getPartitionIDs( ObjectId id_partition_schema ); ObjectId[] getTransformationPartitionSchemaIDs( ObjectId id_transformation ); ObjectId[] getTransformationClusterSchemaIDs( ObjectId id_transformation ); ObjectId[] getClusterIDs( boolean includeDeleted ); ObjectId[] getSlaveIDs( boolean includeDeleted ); ObjectId[] getClusterSlaveIDs( ObjectId id_cluster_schema ); synchronized String[] getDatabaseNames( boolean includeDeleted ); synchronized String[] getPartitionSchemaNames( boolean includeDeleted ); synchronized String[] getSlaveNames( boolean includeDeleted ); synchronized String[] getClusterNames( boolean includeDeleted ); ObjectId[] getStepIDs( ObjectId id_transformation ); synchronized String[] getTransformationsUsingDatabase( ObjectId id_database ); synchronized String[] getJobsUsingDatabase( ObjectId id_database ); synchronized String[] getClustersUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingSlave( ObjectId id_slave ); synchronized String[] getTransformationsUsingPartitionSchema( ObjectId id_partition_schema ); synchronized String[] getTransformationsUsingCluster( ObjectId id_cluster ); ObjectId[] getJobHopIDs( ObjectId id_job ); ObjectId[] getTransDependencyIDs( ObjectId id_transformation ); ObjectId[] getJobEntryIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job ); ObjectId[] getJobEntryCopyIDs( ObjectId id_job, ObjectId id_jobentry ); static final String byteArrayToString( byte[] val ); synchronized void delSteps( ObjectId id_transformation ); synchronized void deleteCondition( ObjectId id_condition ); synchronized void delStepConditions( ObjectId id_transformation ); synchronized void delStepDatabases( ObjectId id_transformation ); synchronized void delJobEntryDatabases( ObjectId id_job ); synchronized void delJobEntries( ObjectId id_job ); synchronized void delJobEntryCopies( ObjectId id_job ); synchronized void delDependencies( ObjectId id_transformation ); synchronized void delStepAttributes( ObjectId id_transformation ); synchronized void delTransAttributes( ObjectId id_transformation ); synchronized void delJobAttributes( ObjectId id_job ); synchronized void delPartitionSchemas( ObjectId id_transformation ); synchronized void delPartitions( ObjectId id_partition_schema ); synchronized void delClusterSlaves( ObjectId id_cluster ); synchronized void delTransformationClusters( ObjectId id_transformation ); synchronized void delTransformationSlaves( ObjectId id_transformation ); synchronized void delJobEntryAttributes( ObjectId id_job ); synchronized void delTransHops( ObjectId id_transformation ); synchronized void delJobHops( ObjectId id_job ); synchronized void delTransNotes( ObjectId id_transformation ); synchronized void delJobNotes( ObjectId id_job ); synchronized void delTrans( ObjectId id_transformation ); synchronized void delJob( ObjectId id_job ); synchronized void delTransStepCondition( ObjectId id_transformation ); synchronized void delValue( ObjectId id_value ); synchronized void deleteSlave( ObjectId id_slave ); synchronized void deletePartitionSchema( ObjectId id_partition_schema ); synchronized void deleteClusterSchema( ObjectId id_cluster ); synchronized void deleteTransformation( ObjectId id_transformation ); synchronized void deleteJob( ObjectId id_job ); boolean dropRepositorySchema(); void updateStepTypes(); void updateDatabaseTypes(); void updateJobEntryTypes(); synchronized String toString(); void clearSharedObjectCache(); Database getDatabase(); void setDatabase( Database database ); synchronized void lockRepository(); synchronized void unlockRepository(); List<DatabaseMeta> getDatabases(); List<SlaveServer> getSlaveServers(); DatabaseMeta getDatabaseMeta(); List<DatabaseMeta> readDatabases(); boolean isUseBatchProcessing(); void setImportBaseDirectory( RepositoryDirectory importBaseDirectory ); RepositoryDirectory getImportBaseDirectory(); void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements,
boolean dryRun ); synchronized int countNrStepAttributes( ObjectId id_step, String code ); synchronized int countNrJobEntryAttributes( ObjectId id_jobentry, String code ); synchronized void disconnect(); long getJobEntryAttributeInteger( ObjectId id_jobentry, int nr, String code ); String getJobEntryAttributeString( ObjectId id_jobentry, int nr, String code ); @Override boolean getJobEntryAttributeBoolean( ObjectId id_jobentry, int nr, String code, boolean def ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, String value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, boolean value ); void saveJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String code, long value ); boolean getStepAttributeBoolean( ObjectId id_step, int nr, String code, boolean def ); long getStepAttributeInteger( ObjectId id_step, int nr, String code ); String getStepAttributeString( ObjectId id_step, int nr, String code ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, String value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, boolean value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, long value ); void saveStepAttribute( ObjectId id_transformation, ObjectId id_step, int nr, String code, double value ); ObjectId findStepAttributeID( ObjectId id_step, int nr, String code ); void execStatement( String sql ); void loadJobEntry( JobEntryBase jobEntryBase, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ); ObjectId getClusterID( String name ); ObjectId getDatabaseID( String name ); ObjectId getJobId( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId getPartitionSchemaID( String name ); ObjectId getSlaveID( String name ); ObjectId getTransformationID( String name, RepositoryDirectoryInterface repositoryDirectory ); ObjectId insertJobEntry( ObjectId id_job, JobEntryBase jobEntryBase ); DatabaseMeta loadDatabaseMetaFromStepAttribute( ObjectId idStep, String code, List<DatabaseMeta> databases ); void saveDatabaseMetaStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
DatabaseMeta database ); DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId id_jobentry, String nameCode, int nr,
String idCode, List<DatabaseMeta> databases ); void saveDatabaseMetaJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, int nr, String nameCode,
String idCode, DatabaseMeta database ); Condition loadConditionFromStepAttribute( ObjectId id_step, String code ); void saveConditionStepAttribute( ObjectId id_transformation, ObjectId id_step, String code,
Condition condition ); KettleDatabaseRepositorySecurityProvider getSecurityProvider(); KettleDatabaseRepositorySecurityProvider getSecurityManager(); void undeleteObject( RepositoryElementMetaInterface element ); List<RepositoryElementMetaInterface> getJobAndTransformationObjects( ObjectId id_directory,
boolean includeDeleted ); IRepositoryService getService( Class<? extends IRepositoryService> clazz ); List<Class<? extends IRepositoryService>> getServiceInterfaces(); boolean hasService( Class<? extends IRepositoryService> clazz ); RepositoryDirectory getDefaultSaveDirectory( RepositoryElementInterface repositoryElement ); RepositoryDirectory getUserHomeDirectory(); RepositoryObject getObjectInformation( ObjectId objectId, RepositoryObjectType objectType ); JobMeta loadJob( ObjectId idJob, String versionLabel ); TransMeta loadTransformation( ObjectId idTransformation, String versionLabel ); String getConnectMessage(); IRepositoryExporter getExporter(); IRepositoryImporter getImporter(); KettleDatabaseRepositoryMetaStore getMetaStore(); public KettleDatabaseRepositoryTransDelegate transDelegate; public KettleDatabaseRepositoryJobDelegate jobDelegate; public KettleDatabaseRepositoryDatabaseDelegate databaseDelegate; public KettleDatabaseRepositorySlaveServerDelegate slaveServerDelegate; public KettleDatabaseRepositoryClusterSchemaDelegate clusterSchemaDelegate; public KettleDatabaseRepositoryPartitionSchemaDelegate partitionSchemaDelegate; public KettleDatabaseRepositoryDirectoryDelegate directoryDelegate; public KettleDatabaseRepositoryConnectionDelegate connectionDelegate; public KettleDatabaseRepositoryUserDelegate userDelegate; public KettleDatabaseRepositoryConditionDelegate conditionDelegate; public KettleDatabaseRepositoryValueDelegate valueDelegate; public KettleDatabaseRepositoryNotePadDelegate notePadDelegate; public KettleDatabaseRepositoryStepDelegate stepDelegate; public KettleDatabaseRepositoryJobEntryDelegate jobEntryDelegate; public KettleDatabaseRepositoryMetaStoreDelegate metaStoreDelegate; public KettleDatabaseRepositoryMetaStore metaStore; }
|
@Test public void testLoadAccessData() throws Exception { when( accessBox.getSelectedItem() ).thenReturn( "Native" ); DatabaseInterface dbInterface = mock( DatabaseInterface.class ); when( dbInterface.getDefaultDatabasePort() ).thenReturn( 5309 ); DataHandler.connectionMap.put( "myDb", dbInterface ); dataHandler.loadAccessData(); dataHandler.loadAccessData(); }
|
public void loadAccessData() { getControls(); pushCache(); Object key = connectionBox.getSelectedItem(); if ( key == null ) { key = connectionMap.firstKey(); connectionBox.setSelectedItem( key ); return; } DatabaseInterface database = connectionMap.get( key ); int[] acc = database.getAccessTypeList(); Object accessKey = accessBox.getSelectedItem(); accessBox.removeItems(); for ( int value : acc ) { accessBox.addItem( DatabaseMeta.getAccessTypeDescLong( value ) ); } accessBox.setRows( accessBox.getRows() ); if ( accessKey != null ) { accessBox.setSelectedItem( accessKey ); } if ( accessBox.getSelectedItem() == null ) { accessBox.setSelectedItem( DatabaseMeta.getAccessTypeDescLong( acc[0] ) ); } Map<String, String> options = null; if ( this.databaseMeta != null ) { this.databaseMeta.applyDefaultOptions( database ); options = this.databaseMeta.getExtraOptions(); } else { clearOptionsData(); options = database.getDefaultOptions(); } setOptionsData( options ); PartitionDatabaseMeta[] clusterInfo = null; if ( this.databaseMeta != null ) { clusterInfo = this.databaseMeta.getPartitioningInformation(); } setClusterData( clusterInfo ); popCache(); }
|
DataHandler extends AbstractXulEventHandler { public void loadAccessData() { getControls(); pushCache(); Object key = connectionBox.getSelectedItem(); if ( key == null ) { key = connectionMap.firstKey(); connectionBox.setSelectedItem( key ); return; } DatabaseInterface database = connectionMap.get( key ); int[] acc = database.getAccessTypeList(); Object accessKey = accessBox.getSelectedItem(); accessBox.removeItems(); for ( int value : acc ) { accessBox.addItem( DatabaseMeta.getAccessTypeDescLong( value ) ); } accessBox.setRows( accessBox.getRows() ); if ( accessKey != null ) { accessBox.setSelectedItem( accessKey ); } if ( accessBox.getSelectedItem() == null ) { accessBox.setSelectedItem( DatabaseMeta.getAccessTypeDescLong( acc[0] ) ); } Map<String, String> options = null; if ( this.databaseMeta != null ) { this.databaseMeta.applyDefaultOptions( database ); options = this.databaseMeta.getExtraOptions(); } else { clearOptionsData(); options = database.getDefaultOptions(); } setOptionsData( options ); PartitionDatabaseMeta[] clusterInfo = null; if ( this.databaseMeta != null ) { clusterInfo = this.databaseMeta.getPartitioningInformation(); } setClusterData( clusterInfo ); popCache(); } }
|
DataHandler extends AbstractXulEventHandler { public void loadAccessData() { getControls(); pushCache(); Object key = connectionBox.getSelectedItem(); if ( key == null ) { key = connectionMap.firstKey(); connectionBox.setSelectedItem( key ); return; } DatabaseInterface database = connectionMap.get( key ); int[] acc = database.getAccessTypeList(); Object accessKey = accessBox.getSelectedItem(); accessBox.removeItems(); for ( int value : acc ) { accessBox.addItem( DatabaseMeta.getAccessTypeDescLong( value ) ); } accessBox.setRows( accessBox.getRows() ); if ( accessKey != null ) { accessBox.setSelectedItem( accessKey ); } if ( accessBox.getSelectedItem() == null ) { accessBox.setSelectedItem( DatabaseMeta.getAccessTypeDescLong( acc[0] ) ); } Map<String, String> options = null; if ( this.databaseMeta != null ) { this.databaseMeta.applyDefaultOptions( database ); options = this.databaseMeta.getExtraOptions(); } else { clearOptionsData(); options = database.getDefaultOptions(); } setOptionsData( options ); PartitionDatabaseMeta[] clusterInfo = null; if ( this.databaseMeta != null ) { clusterInfo = this.databaseMeta.getPartitioningInformation(); } setClusterData( clusterInfo ); popCache(); } DataHandler(); }
|
DataHandler extends AbstractXulEventHandler { public void loadAccessData() { getControls(); pushCache(); Object key = connectionBox.getSelectedItem(); if ( key == null ) { key = connectionMap.firstKey(); connectionBox.setSelectedItem( key ); return; } DatabaseInterface database = connectionMap.get( key ); int[] acc = database.getAccessTypeList(); Object accessKey = accessBox.getSelectedItem(); accessBox.removeItems(); for ( int value : acc ) { accessBox.addItem( DatabaseMeta.getAccessTypeDescLong( value ) ); } accessBox.setRows( accessBox.getRows() ); if ( accessKey != null ) { accessBox.setSelectedItem( accessKey ); } if ( accessBox.getSelectedItem() == null ) { accessBox.setSelectedItem( DatabaseMeta.getAccessTypeDescLong( acc[0] ) ); } Map<String, String> options = null; if ( this.databaseMeta != null ) { this.databaseMeta.applyDefaultOptions( database ); options = this.databaseMeta.getExtraOptions(); } else { clearOptionsData(); options = database.getDefaultOptions(); } setOptionsData( options ); PartitionDatabaseMeta[] clusterInfo = null; if ( this.databaseMeta != null ) { clusterInfo = this.databaseMeta.getPartitioningInformation(); } setClusterData( clusterInfo ); popCache(); } DataHandler(); void loadConnectionData(); void loadAccessData(); void editOptions( int index ); void clearOptionsData(); void getOptionHelp(); void setDeckChildIndex(); void onPoolingCheck(); void onClusterCheck(); Object getData(); void setData( Object data ); void pushCache(); void popCache(); void onCancel(); void onOK(); void testDatabaseConnection(); void restoreDefaults(); void poolingRowChange( int idx ); void disablePortIfInstancePopulated(); void handleUseSecurityCheckbox(); }
|
DataHandler extends AbstractXulEventHandler { public void loadAccessData() { getControls(); pushCache(); Object key = connectionBox.getSelectedItem(); if ( key == null ) { key = connectionMap.firstKey(); connectionBox.setSelectedItem( key ); return; } DatabaseInterface database = connectionMap.get( key ); int[] acc = database.getAccessTypeList(); Object accessKey = accessBox.getSelectedItem(); accessBox.removeItems(); for ( int value : acc ) { accessBox.addItem( DatabaseMeta.getAccessTypeDescLong( value ) ); } accessBox.setRows( accessBox.getRows() ); if ( accessKey != null ) { accessBox.setSelectedItem( accessKey ); } if ( accessBox.getSelectedItem() == null ) { accessBox.setSelectedItem( DatabaseMeta.getAccessTypeDescLong( acc[0] ) ); } Map<String, String> options = null; if ( this.databaseMeta != null ) { this.databaseMeta.applyDefaultOptions( database ); options = this.databaseMeta.getExtraOptions(); } else { clearOptionsData(); options = database.getDefaultOptions(); } setOptionsData( options ); PartitionDatabaseMeta[] clusterInfo = null; if ( this.databaseMeta != null ) { clusterInfo = this.databaseMeta.getPartitioningInformation(); } setClusterData( clusterInfo ); popCache(); } DataHandler(); void loadConnectionData(); void loadAccessData(); void editOptions( int index ); void clearOptionsData(); void getOptionHelp(); void setDeckChildIndex(); void onPoolingCheck(); void onClusterCheck(); Object getData(); void setData( Object data ); void pushCache(); void popCache(); void onCancel(); void onOK(); void testDatabaseConnection(); void restoreDefaults(); void poolingRowChange( int idx ); void disablePortIfInstancePopulated(); void handleUseSecurityCheckbox(); static final SortedMap<String, DatabaseInterface> connectionMap; static final Map<String, String> connectionNametoID; }
|
@Test public void testAddFieldsFromSOQLQuery() throws Exception { final Set<String> fields = new LinkedHashSet<>(); XmlObject testObject = createObject( "Field1", VALUE, ObjectType.XMLOBJECT ); dialog.addFields( "", fields, testObject ); dialog.addFields( "", fields, testObject ); assertArrayEquals( "No duplicates", new String[]{"Field1"}, fields.toArray() ); testObject = createObject( "Field2", VALUE, ObjectType.XMLOBJECT ); dialog.addFields( "", fields, testObject ); assertArrayEquals( "Two fields", new String[]{"Field1", "Field2"}, fields.toArray() ); }
|
void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } SalesforceInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ); }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } SalesforceInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ); @Override String open(); void getData( SalesforceInputMeta in ); void setPosition(); }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } SalesforceInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ); @Override String open(); void getData( SalesforceInputMeta in ); void setPosition(); }
|
@Test public void testAddFields_nullIdNotAdded() throws Exception { final Set<String> fields = new LinkedHashSet<>(); XmlObject testObject = createObject( "Id", null, ObjectType.XMLOBJECT ); dialog.addFields( "", fields, testObject ); assertArrayEquals( "Null Id field not added", new String[]{}, fields.toArray() ); }
|
void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } SalesforceInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ); }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } SalesforceInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ); @Override String open(); void getData( SalesforceInputMeta in ); void setPosition(); }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } SalesforceInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ); @Override String open(); void getData( SalesforceInputMeta in ); void setPosition(); }
|
@Test public void testAddFields_IdAdded() throws Exception { final Set<String> fields = new LinkedHashSet<>(); XmlObject testObject = createObject( "Id", VALUE, ObjectType.XMLOBJECT ); dialog.addFields( "", fields, testObject ); assertArrayEquals( "Id field added", new String[]{"Id"}, fields.toArray() ); }
|
void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } SalesforceInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ); }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } SalesforceInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ); @Override String open(); void getData( SalesforceInputMeta in ); void setPosition(); }
|
SalesforceInputDialog extends SalesforceStepDialog { void addFields( String prefix, Set<String> fieldNames, XmlObject field ) { if ( isNullIdField( field ) ) { return; } String fieldname = prefix + field.getName().getLocalPart(); if ( field instanceof SObject ) { SObject sobject = (SObject) field; for ( XmlObject element : SalesforceConnection.getChildren( sobject ) ) { addFields( fieldname + ".", fieldNames, element ); } } else { addField( fieldname, fieldNames, (String) field.getValue() ); } } SalesforceInputDialog( Shell parent, Object in, TransMeta transMeta, String sname ); @Override String open(); void getData( SalesforceInputMeta in ); void setPosition(); }
|
@Test public void testGetTypes() { String[] types = executionConfigurationManager.getTypes(); assertTrue( Arrays.asList( types ).contains( DefaultRunConfiguration.TYPE ) ); assertTrue( Arrays.asList( types ).contains( SparkRunConfiguration.TYPE ) ); }
|
public String[] getTypes() { List<String> types = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { types.add( runConfigurationProvider.getType() ); } return types.toArray( new String[ 0 ] ); }
|
RunConfigurationManager implements RunConfigurationService { public String[] getTypes() { List<String> types = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { types.add( runConfigurationProvider.getType() ); } return types.toArray( new String[ 0 ] ); } }
|
RunConfigurationManager implements RunConfigurationService { public String[] getTypes() { List<String> types = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { types.add( runConfigurationProvider.getType() ); } return types.toArray( new String[ 0 ] ); } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); }
|
RunConfigurationManager implements RunConfigurationService { public String[] getTypes() { List<String> types = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { types.add( runConfigurationProvider.getType() ); } return types.toArray( new String[ 0 ] ); } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
RunConfigurationManager implements RunConfigurationService { public String[] getTypes() { List<String> types = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { types.add( runConfigurationProvider.getType() ); } return types.toArray( new String[ 0 ] ); } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
@Test public void testLoad() { List<RunConfiguration> runConfigurations = executionConfigurationManager.load(); assertEquals( runConfigurations.size(), 3 ); }
|
@Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; }
|
RunConfigurationManager implements RunConfigurationService { @Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; } }
|
RunConfigurationManager implements RunConfigurationService { @Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); }
|
RunConfigurationManager implements RunConfigurationService { @Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
RunConfigurationManager implements RunConfigurationService { @Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
@Test public void testLoadByName() { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) executionConfigurationManager .load( "Default Configuration" ); assertNotNull( defaultRunConfiguration ); assertEquals( defaultRunConfiguration.getName(), "Default Configuration" ); }
|
@Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; }
|
RunConfigurationManager implements RunConfigurationService { @Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; } }
|
RunConfigurationManager implements RunConfigurationService { @Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); }
|
RunConfigurationManager implements RunConfigurationService { @Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
RunConfigurationManager implements RunConfigurationService { @Override public List<RunConfiguration> load() { List<RunConfiguration> runConfigurations = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { runConfigurations.addAll( runConfigurationProvider.load() ); } Collections.sort( runConfigurations, ( o1, o2 ) -> { if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.getName().compareToIgnoreCase( o2.getName() ); } ); return runConfigurations; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
@Test public void testGetNames() { List<String> names = executionConfigurationManager.getNames(); assertTrue( names.contains( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ); assertTrue( names.contains( "Default Configuration" ) ); assertTrue( names.contains( "Spark Configuration" ) ); }
|
public List<String> getNames() { List<String> names = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { names.addAll( runConfigurationProvider.getNames() ); } Collections.sort( names, ( o1, o2 ) -> { if ( o2.equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.compareToIgnoreCase( o2 ); } ); return names; }
|
RunConfigurationManager implements RunConfigurationService { public List<String> getNames() { List<String> names = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { names.addAll( runConfigurationProvider.getNames() ); } Collections.sort( names, ( o1, o2 ) -> { if ( o2.equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.compareToIgnoreCase( o2 ); } ); return names; } }
|
RunConfigurationManager implements RunConfigurationService { public List<String> getNames() { List<String> names = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { names.addAll( runConfigurationProvider.getNames() ); } Collections.sort( names, ( o1, o2 ) -> { if ( o2.equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.compareToIgnoreCase( o2 ); } ); return names; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); }
|
RunConfigurationManager implements RunConfigurationService { public List<String> getNames() { List<String> names = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { names.addAll( runConfigurationProvider.getNames() ); } Collections.sort( names, ( o1, o2 ) -> { if ( o2.equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.compareToIgnoreCase( o2 ); } ); return names; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
RunConfigurationManager implements RunConfigurationService { public List<String> getNames() { List<String> names = new ArrayList<>(); for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) { names.addAll( runConfigurationProvider.getNames() ); } Collections.sort( names, ( o1, o2 ) -> { if ( o2.equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { return 1; } return o1.compareToIgnoreCase( o2 ); } ); return names; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
@Test public void testGetRunConfigurationByType() { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) executionConfigurationManager.getRunConfigurationByType( DefaultRunConfiguration.TYPE ); SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) executionConfigurationManager.getRunConfigurationByType( SparkRunConfiguration.TYPE ); assertNotNull( defaultRunConfiguration ); assertNotNull( sparkRunConfiguration ); }
|
public RunConfiguration getRunConfigurationByType( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getConfiguration(); } return null; }
|
RunConfigurationManager implements RunConfigurationService { public RunConfiguration getRunConfigurationByType( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getConfiguration(); } return null; } }
|
RunConfigurationManager implements RunConfigurationService { public RunConfiguration getRunConfigurationByType( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getConfiguration(); } return null; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); }
|
RunConfigurationManager implements RunConfigurationService { public RunConfiguration getRunConfigurationByType( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getConfiguration(); } return null; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
RunConfigurationManager implements RunConfigurationService { public RunConfiguration getRunConfigurationByType( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getConfiguration(); } return null; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
@Test public void testGetExecutor() { DefaultRunConfigurationExecutor defaultRunConfigurationExecutor = (DefaultRunConfigurationExecutor) executionConfigurationManager.getExecutor( DefaultRunConfiguration.TYPE ); assertNotNull( defaultRunConfigurationExecutor ); }
|
public RunConfigurationExecutor getExecutor( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getExecutor(); } return null; }
|
RunConfigurationManager implements RunConfigurationService { public RunConfigurationExecutor getExecutor( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getExecutor(); } return null; } }
|
RunConfigurationManager implements RunConfigurationService { public RunConfigurationExecutor getExecutor( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getExecutor(); } return null; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); }
|
RunConfigurationManager implements RunConfigurationService { public RunConfigurationExecutor getExecutor( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getExecutor(); } return null; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
RunConfigurationManager implements RunConfigurationService { public RunConfigurationExecutor getExecutor( String type ) { RunConfigurationProvider runConfigurationProvider = getProvider( type ); if ( runConfigurationProvider != null ) { return runConfigurationProvider.getExecutor(); } return null; } RunConfigurationManager( List<RunConfigurationProvider> runConfigurationProviders ); @Override List<RunConfiguration> load(); @Override RunConfiguration load( String name ); @Override boolean save( RunConfiguration runConfiguration ); @Override boolean delete( String name ); @Override void deleteAll(); String[] getTypes(); List<String> getNames(); List<String> getNames( String type ); RunConfiguration getRunConfigurationByType( String type ); RunConfigurationExecutor getExecutor( String type ); List<RunConfigurationProvider> getRunConfigurationProviders( String type ); List<RunConfigurationProvider> getRunConfigurationProviders(); RunConfigurationProvider getDefaultRunConfigurationProvider(); void setDefaultRunConfigurationProvider(
RunConfigurationProvider defaultRunConfigurationProvider ); }
|
@Test public void testCallExtensionPoint() throws Exception { runConfigurationRunExtensionPoint.callExtensionPoint( log, new Object[] { transExecutionConfiguration, abstractMeta, variableSpace } ); verify( runConfigurationExecutor ) .execute( runConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); }
|
@Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } }
|
RunConfigurationRunExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } } }
|
RunConfigurationRunExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } } RunConfigurationRunExtensionPoint( RunConfigurationManager runConfigurationManager ); }
|
RunConfigurationRunExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } } RunConfigurationRunExtensionPoint( RunConfigurationManager runConfigurationManager ); @Override void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ); }
|
RunConfigurationRunExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } } RunConfigurationRunExtensionPoint( RunConfigurationManager runConfigurationManager ); @Override void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ); }
|
@Test public void testCreateIndexLenghts() throws KettleException { DatabaseMeta meta = mock( DatabaseMeta.class ); when( meta.getStartQuote() ).thenReturn( "" ); when( meta.getEndQuote() ).thenReturn( "" ); when( meta.getQuotedSchemaTableCombination( anyString(), anyString() ) ).thenAnswer( new Answer<String>() { @Override public String answer( InvocationOnMock invocation ) throws Throwable { return invocation.getArguments()[1].toString(); } } ); when( meta.getDatabaseInterface() ).thenReturn( new OracleDatabaseMeta() ); Database db = mock( Database.class ); when( db.getDatabaseMeta() ).thenReturn( meta ); when( db.getDDL( anyString(), any( RowMetaInterface.class ), anyString(), anyBoolean(), anyString(), anyBoolean() ) ).thenReturn( "### CREATE TABLE;" ); when( repository.getDatabase() ).thenReturn( db ); when( repository.getDatabaseMeta() ).thenReturn( meta ); when( db.getCreateIndexStatement( anyString(), anyString(), any( String[].class ), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean() ) ).thenAnswer( lan ); KettleDatabaseRepositoryCreationHelper helper = new KettleDatabaseRepositoryCreationHelper( repository ); PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() ); PluginRegistry.init( true ); String passwordEncoderPluginID = Const.NVL( EnvUtil.getSystemProperty( Const.KETTLE_PASSWORD_ENCODER_PLUGIN ), "Kettle" ); Encr.init( passwordEncoderPluginID ); List<String> statements = new ArrayList<String>(); helper.createRepositorySchema( null, false, statements, true ); for ( String st : statements ) { if ( st == null || st.startsWith( "#" ) ) { continue; } assertTrue( "Index name is not overlenght!: " + st, st.length() <= 30 ); } }
|
public synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryrun ) throws KettleException { RowMetaInterface table; String sql; String tablename; String schemaTable; String indexname; String[] keyfield; String[] user, pass, code, desc; int KEY = 9; log.logBasic( "Starting to create or modify the repository tables..." ); String message = ( upgrade ? "Upgrading " : "Creating" ) + " the Kettle repository..."; if ( monitor != null ) { monitor.beginTask( message, 31 ); } repository.connectionDelegate.setAutoCommit( true ); table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_REPOSITORY_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_REP_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_DATE ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_OPERATION_DESC, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( !dryrun ) { repository.insertLogEntry( ( upgrade ? "Upgrade" : "Creation" ) + " of the Kettle repository" ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_VERSION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MAJOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MINOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_VERSION_UPGRADE_DATE, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VERSION_IS_UPGRADE, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, false ); boolean create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } try { LongObjectId nextId; if ( sql.toUpperCase().indexOf( "CREATE TABLE" ) < 0 ) { nextId = repository.connectionDelegate.getNextID( schemaTable, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION ); } else { nextId = new LongObjectId( 1L ); } Object[] data = new Object[] { nextId.longValue(), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MAJOR_VERSION ), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MINOR_VERSION ), new Date(), Boolean.valueOf( upgrade ), }; if ( dryrun ) { sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_VERSION, table, data, null ); statements.add( sql ); } else { database.execStatement( "INSERT INTO " + databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_VERSION ) + " VALUES(?, ?, ?, ?, ?)", table, data ); } } catch ( KettleException e ) { throw new KettleException( "Unable to insert new version log record into " + schemaTable, e ); } boolean ok_database_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_database_type ) { updateDatabaseTypes( statements, dryrun, create ); } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_database_contype = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_CONTYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } ok_database_contype = sql.toUpperCase().contains( "CREATE TABLE" ); if ( ok_database_contype ) { code = DatabaseMeta.dbAccessTypeCode; desc = DatabaseMeta.dbAccessTypeDesc; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextDatabaseConnectionTypeID(); } Object[] tableData = new Object[] { new LongObjectId( nextid ).longValue(), code[i], desc[i], }; if ( dryrun ) { sql = database.getSQLOutput( null, tablename, table, tableData, null ); statements.add( sql ); } else { database.setValuesInsert( table, tableData ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_WIDTH, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_HEIGHT, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_FONT_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_FONT_SIZE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_BOLD, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_ITALIC, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_DRAW_SHADOW, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_PORT, 7, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DATABASE_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DIRECTORY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DIRECTORY; keyfield = new String[] { KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANSFORMATION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_READ, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_WRITE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_INPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_OUTPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_UPDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_BATCHID, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_LOGFIELD, 1, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_MAXDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_FIELD_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_OFFSET_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_DIFF_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_SIZE_ROWSET, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( database.checkTableExists( schemaTable ) ) { sql = "SELECT * FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; List<Object[]> rows = database.getRows( sql, 1 ); if ( rows != null && rows.size() > 0 ) { sql = "UPDATE " + schemaTable + " SET " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + "=0 WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; statements.add( sql ); if ( !dryrun ) { database.execStatement( sql ); } } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_TRANS_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_JOB_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DEPENDENCY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_TABLE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_FIELD_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table .addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_DYNAMIC_DEFINITION, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_PARTITIONS_PER_SLAVE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_PARTITION_ID, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC, 0, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_WEB_APP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NON_PROXY_HOSTS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_SLAVE_MASTER ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANS_HOP_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CONDITION_NEGATED, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_OPERATOR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_LEFT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_CONDITION_FUNCTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_RIGHT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_VALUE_RIGHT, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_VALUE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_TYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_STR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VALUE_IS_NULL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_step_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_HELPTEXT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, "ID_STEP_TYPE", false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_step_type ) { updateStepTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_COPIES, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ID_STEP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_STEP_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_loglevel = true; tablename = KettleDatabaseRepository.TABLE_R_LOGLEVEL; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_loglevel ) { code = LogLevel.logLogLevelCodes(); desc = LogLevel.getLogLevelDescriptions(); if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 1; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ) + " FROM " + schemaTable + " WHERE " + database.getDatabaseMeta().quoteField( "CODE" ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i ); if ( !create ) { nextid = repository.connectionDelegate.getNextLoglevelID(); } RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION ), desc[i] ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData.getRowMeta(), tableData.getData() ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_LOGTYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILENAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILEEXTENTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_DATE, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_TIME, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOG_ID_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, 0, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_jobentry_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_jobentry_type ) { updateJobEntryTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_COPY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 4, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM, 13, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_ENABLED, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_EVALUATION, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_UNCONDITIONAL, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NAMESPACE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NAMESPACE_NAME, ( database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH ), 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_NAME, getRepoStringLength(), 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_NAME, getRepoStringLength(), 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_KEY, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_VALUE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } Map<String, ObjectId> users = new Hashtable<String, ObjectId>(); boolean ok_user = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_USER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_USER_ID_USER, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_user ) { user = new String[] { "admin", "guest" }; pass = new String[] { "admin", "guest" }; code = new String[] { "Administrator", "Guest account" }; desc = new String[] { "User manager", "Read-only guest account" }; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < user.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_USER_ID_USER ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_USER_LOGIN ) + " = '" + user[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextUserID(); } String password = Encr.encryptPassword( pass[i] ); RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN ), user[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD ), password ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION ), desc[i] ); tableData.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED ), Boolean.TRUE ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData ); database.insertRow(); } users.put( user[i], nextid ); } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } if ( monitor != null ) { monitor.done(); } log.logBasic( ( upgrade ? "Upgraded" : "Created" ) + " " + KettleDatabaseRepository.repositoryTableNames.length + " repository tables." ); }
|
KettleDatabaseRepositoryCreationHelper { public synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryrun ) throws KettleException { RowMetaInterface table; String sql; String tablename; String schemaTable; String indexname; String[] keyfield; String[] user, pass, code, desc; int KEY = 9; log.logBasic( "Starting to create or modify the repository tables..." ); String message = ( upgrade ? "Upgrading " : "Creating" ) + " the Kettle repository..."; if ( monitor != null ) { monitor.beginTask( message, 31 ); } repository.connectionDelegate.setAutoCommit( true ); table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_REPOSITORY_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_REP_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_DATE ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_OPERATION_DESC, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( !dryrun ) { repository.insertLogEntry( ( upgrade ? "Upgrade" : "Creation" ) + " of the Kettle repository" ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_VERSION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MAJOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MINOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_VERSION_UPGRADE_DATE, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VERSION_IS_UPGRADE, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, false ); boolean create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } try { LongObjectId nextId; if ( sql.toUpperCase().indexOf( "CREATE TABLE" ) < 0 ) { nextId = repository.connectionDelegate.getNextID( schemaTable, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION ); } else { nextId = new LongObjectId( 1L ); } Object[] data = new Object[] { nextId.longValue(), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MAJOR_VERSION ), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MINOR_VERSION ), new Date(), Boolean.valueOf( upgrade ), }; if ( dryrun ) { sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_VERSION, table, data, null ); statements.add( sql ); } else { database.execStatement( "INSERT INTO " + databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_VERSION ) + " VALUES(?, ?, ?, ?, ?)", table, data ); } } catch ( KettleException e ) { throw new KettleException( "Unable to insert new version log record into " + schemaTable, e ); } boolean ok_database_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_database_type ) { updateDatabaseTypes( statements, dryrun, create ); } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_database_contype = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_CONTYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } ok_database_contype = sql.toUpperCase().contains( "CREATE TABLE" ); if ( ok_database_contype ) { code = DatabaseMeta.dbAccessTypeCode; desc = DatabaseMeta.dbAccessTypeDesc; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextDatabaseConnectionTypeID(); } Object[] tableData = new Object[] { new LongObjectId( nextid ).longValue(), code[i], desc[i], }; if ( dryrun ) { sql = database.getSQLOutput( null, tablename, table, tableData, null ); statements.add( sql ); } else { database.setValuesInsert( table, tableData ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_WIDTH, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_HEIGHT, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_FONT_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_FONT_SIZE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_BOLD, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_ITALIC, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_DRAW_SHADOW, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_PORT, 7, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DATABASE_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DIRECTORY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DIRECTORY; keyfield = new String[] { KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANSFORMATION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_READ, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_WRITE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_INPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_OUTPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_UPDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_BATCHID, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_LOGFIELD, 1, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_MAXDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_FIELD_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_OFFSET_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_DIFF_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_SIZE_ROWSET, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( database.checkTableExists( schemaTable ) ) { sql = "SELECT * FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; List<Object[]> rows = database.getRows( sql, 1 ); if ( rows != null && rows.size() > 0 ) { sql = "UPDATE " + schemaTable + " SET " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + "=0 WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; statements.add( sql ); if ( !dryrun ) { database.execStatement( sql ); } } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_TRANS_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_JOB_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DEPENDENCY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_TABLE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_FIELD_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table .addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_DYNAMIC_DEFINITION, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_PARTITIONS_PER_SLAVE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_PARTITION_ID, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC, 0, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_WEB_APP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NON_PROXY_HOSTS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_SLAVE_MASTER ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANS_HOP_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CONDITION_NEGATED, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_OPERATOR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_LEFT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_CONDITION_FUNCTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_RIGHT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_VALUE_RIGHT, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_VALUE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_TYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_STR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VALUE_IS_NULL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_step_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_HELPTEXT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, "ID_STEP_TYPE", false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_step_type ) { updateStepTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_COPIES, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ID_STEP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_STEP_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_loglevel = true; tablename = KettleDatabaseRepository.TABLE_R_LOGLEVEL; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_loglevel ) { code = LogLevel.logLogLevelCodes(); desc = LogLevel.getLogLevelDescriptions(); if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 1; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ) + " FROM " + schemaTable + " WHERE " + database.getDatabaseMeta().quoteField( "CODE" ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i ); if ( !create ) { nextid = repository.connectionDelegate.getNextLoglevelID(); } RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION ), desc[i] ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData.getRowMeta(), tableData.getData() ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_LOGTYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILENAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILEEXTENTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_DATE, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_TIME, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOG_ID_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, 0, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_jobentry_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_jobentry_type ) { updateJobEntryTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_COPY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 4, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM, 13, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_ENABLED, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_EVALUATION, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_UNCONDITIONAL, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NAMESPACE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NAMESPACE_NAME, ( database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH ), 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_NAME, getRepoStringLength(), 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_NAME, getRepoStringLength(), 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_KEY, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_VALUE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } Map<String, ObjectId> users = new Hashtable<String, ObjectId>(); boolean ok_user = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_USER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_USER_ID_USER, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_user ) { user = new String[] { "admin", "guest" }; pass = new String[] { "admin", "guest" }; code = new String[] { "Administrator", "Guest account" }; desc = new String[] { "User manager", "Read-only guest account" }; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < user.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_USER_ID_USER ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_USER_LOGIN ) + " = '" + user[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextUserID(); } String password = Encr.encryptPassword( pass[i] ); RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN ), user[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD ), password ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION ), desc[i] ); tableData.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED ), Boolean.TRUE ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData ); database.insertRow(); } users.put( user[i], nextid ); } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } if ( monitor != null ) { monitor.done(); } log.logBasic( ( upgrade ? "Upgraded" : "Created" ) + " " + KettleDatabaseRepository.repositoryTableNames.length + " repository tables." ); } }
|
KettleDatabaseRepositoryCreationHelper { public synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryrun ) throws KettleException { RowMetaInterface table; String sql; String tablename; String schemaTable; String indexname; String[] keyfield; String[] user, pass, code, desc; int KEY = 9; log.logBasic( "Starting to create or modify the repository tables..." ); String message = ( upgrade ? "Upgrading " : "Creating" ) + " the Kettle repository..."; if ( monitor != null ) { monitor.beginTask( message, 31 ); } repository.connectionDelegate.setAutoCommit( true ); table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_REPOSITORY_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_REP_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_DATE ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_OPERATION_DESC, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( !dryrun ) { repository.insertLogEntry( ( upgrade ? "Upgrade" : "Creation" ) + " of the Kettle repository" ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_VERSION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MAJOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MINOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_VERSION_UPGRADE_DATE, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VERSION_IS_UPGRADE, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, false ); boolean create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } try { LongObjectId nextId; if ( sql.toUpperCase().indexOf( "CREATE TABLE" ) < 0 ) { nextId = repository.connectionDelegate.getNextID( schemaTable, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION ); } else { nextId = new LongObjectId( 1L ); } Object[] data = new Object[] { nextId.longValue(), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MAJOR_VERSION ), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MINOR_VERSION ), new Date(), Boolean.valueOf( upgrade ), }; if ( dryrun ) { sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_VERSION, table, data, null ); statements.add( sql ); } else { database.execStatement( "INSERT INTO " + databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_VERSION ) + " VALUES(?, ?, ?, ?, ?)", table, data ); } } catch ( KettleException e ) { throw new KettleException( "Unable to insert new version log record into " + schemaTable, e ); } boolean ok_database_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_database_type ) { updateDatabaseTypes( statements, dryrun, create ); } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_database_contype = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_CONTYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } ok_database_contype = sql.toUpperCase().contains( "CREATE TABLE" ); if ( ok_database_contype ) { code = DatabaseMeta.dbAccessTypeCode; desc = DatabaseMeta.dbAccessTypeDesc; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextDatabaseConnectionTypeID(); } Object[] tableData = new Object[] { new LongObjectId( nextid ).longValue(), code[i], desc[i], }; if ( dryrun ) { sql = database.getSQLOutput( null, tablename, table, tableData, null ); statements.add( sql ); } else { database.setValuesInsert( table, tableData ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_WIDTH, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_HEIGHT, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_FONT_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_FONT_SIZE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_BOLD, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_ITALIC, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_DRAW_SHADOW, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_PORT, 7, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DATABASE_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DIRECTORY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DIRECTORY; keyfield = new String[] { KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANSFORMATION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_READ, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_WRITE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_INPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_OUTPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_UPDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_BATCHID, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_LOGFIELD, 1, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_MAXDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_FIELD_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_OFFSET_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_DIFF_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_SIZE_ROWSET, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( database.checkTableExists( schemaTable ) ) { sql = "SELECT * FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; List<Object[]> rows = database.getRows( sql, 1 ); if ( rows != null && rows.size() > 0 ) { sql = "UPDATE " + schemaTable + " SET " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + "=0 WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; statements.add( sql ); if ( !dryrun ) { database.execStatement( sql ); } } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_TRANS_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_JOB_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DEPENDENCY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_TABLE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_FIELD_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table .addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_DYNAMIC_DEFINITION, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_PARTITIONS_PER_SLAVE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_PARTITION_ID, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC, 0, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_WEB_APP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NON_PROXY_HOSTS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_SLAVE_MASTER ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANS_HOP_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CONDITION_NEGATED, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_OPERATOR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_LEFT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_CONDITION_FUNCTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_RIGHT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_VALUE_RIGHT, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_VALUE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_TYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_STR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VALUE_IS_NULL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_step_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_HELPTEXT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, "ID_STEP_TYPE", false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_step_type ) { updateStepTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_COPIES, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ID_STEP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_STEP_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_loglevel = true; tablename = KettleDatabaseRepository.TABLE_R_LOGLEVEL; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_loglevel ) { code = LogLevel.logLogLevelCodes(); desc = LogLevel.getLogLevelDescriptions(); if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 1; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ) + " FROM " + schemaTable + " WHERE " + database.getDatabaseMeta().quoteField( "CODE" ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i ); if ( !create ) { nextid = repository.connectionDelegate.getNextLoglevelID(); } RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION ), desc[i] ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData.getRowMeta(), tableData.getData() ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_LOGTYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILENAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILEEXTENTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_DATE, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_TIME, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOG_ID_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, 0, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_jobentry_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_jobentry_type ) { updateJobEntryTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_COPY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 4, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM, 13, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_ENABLED, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_EVALUATION, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_UNCONDITIONAL, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NAMESPACE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NAMESPACE_NAME, ( database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH ), 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_NAME, getRepoStringLength(), 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_NAME, getRepoStringLength(), 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_KEY, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_VALUE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } Map<String, ObjectId> users = new Hashtable<String, ObjectId>(); boolean ok_user = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_USER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_USER_ID_USER, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_user ) { user = new String[] { "admin", "guest" }; pass = new String[] { "admin", "guest" }; code = new String[] { "Administrator", "Guest account" }; desc = new String[] { "User manager", "Read-only guest account" }; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < user.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_USER_ID_USER ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_USER_LOGIN ) + " = '" + user[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextUserID(); } String password = Encr.encryptPassword( pass[i] ); RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN ), user[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD ), password ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION ), desc[i] ); tableData.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED ), Boolean.TRUE ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData ); database.insertRow(); } users.put( user[i], nextid ); } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } if ( monitor != null ) { monitor.done(); } log.logBasic( ( upgrade ? "Upgraded" : "Created" ) + " " + KettleDatabaseRepository.repositoryTableNames.length + " repository tables." ); } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); }
|
KettleDatabaseRepositoryCreationHelper { public synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryrun ) throws KettleException { RowMetaInterface table; String sql; String tablename; String schemaTable; String indexname; String[] keyfield; String[] user, pass, code, desc; int KEY = 9; log.logBasic( "Starting to create or modify the repository tables..." ); String message = ( upgrade ? "Upgrading " : "Creating" ) + " the Kettle repository..."; if ( monitor != null ) { monitor.beginTask( message, 31 ); } repository.connectionDelegate.setAutoCommit( true ); table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_REPOSITORY_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_REP_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_DATE ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_OPERATION_DESC, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( !dryrun ) { repository.insertLogEntry( ( upgrade ? "Upgrade" : "Creation" ) + " of the Kettle repository" ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_VERSION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MAJOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MINOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_VERSION_UPGRADE_DATE, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VERSION_IS_UPGRADE, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, false ); boolean create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } try { LongObjectId nextId; if ( sql.toUpperCase().indexOf( "CREATE TABLE" ) < 0 ) { nextId = repository.connectionDelegate.getNextID( schemaTable, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION ); } else { nextId = new LongObjectId( 1L ); } Object[] data = new Object[] { nextId.longValue(), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MAJOR_VERSION ), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MINOR_VERSION ), new Date(), Boolean.valueOf( upgrade ), }; if ( dryrun ) { sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_VERSION, table, data, null ); statements.add( sql ); } else { database.execStatement( "INSERT INTO " + databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_VERSION ) + " VALUES(?, ?, ?, ?, ?)", table, data ); } } catch ( KettleException e ) { throw new KettleException( "Unable to insert new version log record into " + schemaTable, e ); } boolean ok_database_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_database_type ) { updateDatabaseTypes( statements, dryrun, create ); } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_database_contype = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_CONTYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } ok_database_contype = sql.toUpperCase().contains( "CREATE TABLE" ); if ( ok_database_contype ) { code = DatabaseMeta.dbAccessTypeCode; desc = DatabaseMeta.dbAccessTypeDesc; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextDatabaseConnectionTypeID(); } Object[] tableData = new Object[] { new LongObjectId( nextid ).longValue(), code[i], desc[i], }; if ( dryrun ) { sql = database.getSQLOutput( null, tablename, table, tableData, null ); statements.add( sql ); } else { database.setValuesInsert( table, tableData ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_WIDTH, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_HEIGHT, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_FONT_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_FONT_SIZE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_BOLD, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_ITALIC, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_DRAW_SHADOW, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_PORT, 7, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DATABASE_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DIRECTORY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DIRECTORY; keyfield = new String[] { KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANSFORMATION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_READ, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_WRITE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_INPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_OUTPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_UPDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_BATCHID, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_LOGFIELD, 1, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_MAXDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_FIELD_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_OFFSET_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_DIFF_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_SIZE_ROWSET, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( database.checkTableExists( schemaTable ) ) { sql = "SELECT * FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; List<Object[]> rows = database.getRows( sql, 1 ); if ( rows != null && rows.size() > 0 ) { sql = "UPDATE " + schemaTable + " SET " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + "=0 WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; statements.add( sql ); if ( !dryrun ) { database.execStatement( sql ); } } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_TRANS_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_JOB_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DEPENDENCY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_TABLE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_FIELD_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table .addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_DYNAMIC_DEFINITION, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_PARTITIONS_PER_SLAVE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_PARTITION_ID, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC, 0, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_WEB_APP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NON_PROXY_HOSTS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_SLAVE_MASTER ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANS_HOP_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CONDITION_NEGATED, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_OPERATOR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_LEFT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_CONDITION_FUNCTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_RIGHT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_VALUE_RIGHT, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_VALUE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_TYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_STR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VALUE_IS_NULL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_step_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_HELPTEXT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, "ID_STEP_TYPE", false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_step_type ) { updateStepTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_COPIES, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ID_STEP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_STEP_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_loglevel = true; tablename = KettleDatabaseRepository.TABLE_R_LOGLEVEL; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_loglevel ) { code = LogLevel.logLogLevelCodes(); desc = LogLevel.getLogLevelDescriptions(); if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 1; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ) + " FROM " + schemaTable + " WHERE " + database.getDatabaseMeta().quoteField( "CODE" ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i ); if ( !create ) { nextid = repository.connectionDelegate.getNextLoglevelID(); } RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION ), desc[i] ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData.getRowMeta(), tableData.getData() ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_LOGTYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILENAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILEEXTENTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_DATE, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_TIME, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOG_ID_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, 0, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_jobentry_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_jobentry_type ) { updateJobEntryTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_COPY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 4, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM, 13, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_ENABLED, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_EVALUATION, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_UNCONDITIONAL, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NAMESPACE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NAMESPACE_NAME, ( database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH ), 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_NAME, getRepoStringLength(), 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_NAME, getRepoStringLength(), 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_KEY, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_VALUE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } Map<String, ObjectId> users = new Hashtable<String, ObjectId>(); boolean ok_user = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_USER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_USER_ID_USER, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_user ) { user = new String[] { "admin", "guest" }; pass = new String[] { "admin", "guest" }; code = new String[] { "Administrator", "Guest account" }; desc = new String[] { "User manager", "Read-only guest account" }; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < user.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_USER_ID_USER ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_USER_LOGIN ) + " = '" + user[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextUserID(); } String password = Encr.encryptPassword( pass[i] ); RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN ), user[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD ), password ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION ), desc[i] ); tableData.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED ), Boolean.TRUE ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData ); database.insertRow(); } users.put( user[i], nextid ); } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } if ( monitor != null ) { monitor.done(); } log.logBasic( ( upgrade ? "Upgraded" : "Created" ) + " " + KettleDatabaseRepository.repositoryTableNames.length + " repository tables." ); } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade,
List<String> statements, boolean dryrun ); List<String> updateStepTypes( List<String> statements, boolean dryrun, boolean create ); List<String> updateDatabaseTypes( List<String> statements, boolean dryrun, boolean create ); void updateJobEntryTypes( List<String> statements, boolean dryrun, boolean create ); }
|
KettleDatabaseRepositoryCreationHelper { public synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade, List<String> statements, boolean dryrun ) throws KettleException { RowMetaInterface table; String sql; String tablename; String schemaTable; String indexname; String[] keyfield; String[] user, pass, code, desc; int KEY = 9; log.logBasic( "Starting to create or modify the repository tables..." ); String message = ( upgrade ? "Upgrading " : "Creating" ) + " the Kettle repository..."; if ( monitor != null ) { monitor.beginTask( message, 31 ); } repository.connectionDelegate.setAutoCommit( true ); table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_REPOSITORY_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_REP_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_DATE ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_LOG_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_REPOSITORY_LOG_OPERATION_DESC, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_REPOSITORY_LOG_ID_REPOSITORY_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( !dryrun ) { repository.insertLogEntry( ( upgrade ? "Upgrade" : "Creation" ) + " of the Kettle repository" ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_VERSION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MAJOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VERSION_MINOR_VERSION, 3, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_VERSION_UPGRADE_DATE, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VERSION_IS_UPGRADE, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION, false ); boolean create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } try { LongObjectId nextId; if ( sql.toUpperCase().indexOf( "CREATE TABLE" ) < 0 ) { nextId = repository.connectionDelegate.getNextID( schemaTable, KettleDatabaseRepository.FIELD_VERSION_ID_VERSION ); } else { nextId = new LongObjectId( 1L ); } Object[] data = new Object[] { nextId.longValue(), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MAJOR_VERSION ), Long.valueOf( KettleDatabaseRepositoryConnectionDelegate.REQUIRED_MINOR_VERSION ), new Date(), Boolean.valueOf( upgrade ), }; if ( dryrun ) { sql = database.getSQLOutput( null, KettleDatabaseRepository.TABLE_R_VERSION, table, data, null ); statements.add( sql ); } else { database.execStatement( "INSERT INTO " + databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_VERSION ) + " VALUES(?, ?, ?, ?, ?)", table, data ); } } catch ( KettleException e ) { throw new KettleException( "Unable to insert new version log record into " + schemaTable, e ); } boolean ok_database_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { try { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created/altered table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to create or modify table " + schemaTable, dbe ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_database_type ) { updateDatabaseTypes( statements, dryrun, create ); } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_database_contype = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_CONTYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } ok_database_contype = sql.toUpperCase().contains( "CREATE TABLE" ); if ( ok_database_contype ) { code = DatabaseMeta.dbAccessTypeCode; desc = DatabaseMeta.dbAccessTypeDesc; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_ID_DATABASE_CONTYPE ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_DATABASE_CONTYPE_CODE ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextDatabaseConnectionTypeID(); } Object[] tableData = new Object[] { new LongObjectId( nextid ).longValue(), code[i], desc[i], }; if ( dryrun ) { sql = database.getSQLOutput( null, tablename, table, tableData, null ); statements.add( sql ); } else { database.setValuesInsert( table, tableData ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_WIDTH, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_GUI_LOCATION_HEIGHT, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NOTE_FONT_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_FONT_SIZE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_BOLD, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_FONT_ITALIC, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BACK_GROUND_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_RED, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_GREEN, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NOTE_BORDER_COLOR_BLUE, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_NOTE_DRAW_SHADOW, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NOTE_ID_NOTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_PORT, 7, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DATABASE_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE, KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_CODE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DIRECTORY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_DIRECTORY; keyfield = new String[] { KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT, KettleDatabaseRepository.FIELD_DIRECTORY_DIRECTORY_NAME }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANSFORMATION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_READ, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_WRITE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_INPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_OUTPUT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_UPDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_BATCHID, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_LOGFIELD, 1, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_MAXDATE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_FIELD_NAME_MAXDATE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_OFFSET_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_TRANSFORMATION_DIFF_MAXDATE, 12, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_SIZE_ROWSET, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( database.checkTableExists( schemaTable ) ) { sql = "SELECT * FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; List<Object[]> rows = database.getRows( sql, 1 ); if ( rows != null && rows.size() > 0 ) { sql = "UPDATE " + schemaTable + " SET " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + "=0 WHERE " + repository.quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " IS NULL"; statements.add( sql ); if ( !dryrun ) { database.execStatement( sql ); } } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANS_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_TRANS_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_JOB_ATTRIBUTE_LOOKUP; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_DEPENDENCY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DATABASE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_TABLE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_DEPENDENCY_FIELD_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_DEPENDENCY_ID_DEPENDENCY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table .addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_DYNAMIC_DEFINITION, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_PARTITIONS_PER_SLAVE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_PARTITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_PARTITION_PARTITION_ID, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_PARTITION_ID_PARTITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_PARTITION_SCHEMA, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_PARTITION_SCHEMA_ID_TRANS_PARTITION_SCHEMA, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC, 0, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_WEB_APP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_USERNAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_HOST_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_PROXY_PORT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_SLAVE_NON_PROXY_HOSTS, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_SLAVE_MASTER ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_SLAVE_ID_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CLUSTER_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CLUSTER_SLAVE_ID_CLUSTER_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_SLAVE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_SLAVE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_SLAVE_ID_TRANS_SLAVE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_CLUSTER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_CLUSTER, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_CLUSTER_ID_TRANS_CLUSTER, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_HOP_ID_STEP_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_TRANS_HOP_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_HOP_ID_TRANS_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_STEP_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_STEP, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_STEP_CONDITION_ID_CONDITION, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_CONDITION; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CONDITION_NEGATED, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_OPERATOR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_LEFT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_CONDITION_FUNCTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_CONDITION_RIGHT_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CONDITION_ID_VALUE_RIGHT, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_VALUE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_TYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_VALUE_VALUE_STR, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_VALUE_IS_NULL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_VALUE_ID_VALUE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_step_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_TYPE_HELPTEXT, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, "ID_STEP_TYPE", false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_step_type ) { updateStepTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_STEP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_COPIES, 3, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ID_STEP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, KEY, 0 ) ); table .addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_NUM, 18, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.IDX_R_STEP_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } tablename = KettleDatabaseRepository.TABLE_R_STEP_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_STEP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_STEP_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_loglevel = true; tablename = KettleDatabaseRepository.TABLE_R_LOGLEVEL; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table = new RowMeta(); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_loglevel ) { code = LogLevel.logLogLevelCodes(); desc = LogLevel.getLogLevelDescriptions(); if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 1; i < code.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ) + " FROM " + schemaTable + " WHERE " + database.getDatabaseMeta().quoteField( "CODE" ) + " = '" + code[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i ); if ( !create ) { nextid = repository.connectionDelegate.getNextLoglevelID(); } RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOGLEVEL_ID_LOGLEVEL ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_CODE ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_LOGLEVEL_DESCRIPTION ), desc[i] ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData.getRowMeta(), tableData.getData() ); database.insertRow(); } } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_LOG; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_LOGLEVEL, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_LOGTYPE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILENAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_FILEEXTENTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_DATE, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_LOG_ADD_TIME, 1, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_LOG_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_LOG_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_LOG_ID_LOG, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_CREATED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, 20, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, 0, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, 0, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_ID_JOB, false ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU1; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_DATABASE_LU2; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, false, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } boolean ok_jobentry_type = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_jobentry_type ) { updateJobEntryTypes( statements, dryrun, create ); if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ID_JOBENTRY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_COPY; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 4, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 6, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 6, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_COPY, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOBENTRY_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, 6, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaNumber( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM, 13, 2 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } try { indexname = KettleDatabaseRepositoryBase.R_JOBENTRY_ATTRIBUTE; keyfield = new String[] { KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE, KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR, }; if ( !database.checkIndexExists( schemaTable, keyfield ) ) { sql = database.getCreateIndexStatement( schemaTable, indexname, keyfield, false, true, false, false ); statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created lookup index " + indexname + " on " + schemaTable ); } } } } catch ( KettleException kdbe ) { } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_HOP; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_FROM, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_TO, KEY, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_ENABLED, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_EVALUATION, 1, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_HOP_UNCONDITIONAL, 1, 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_NOTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_NOTE_ID_NOTE, KEY, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, null, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_TRANS_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANSFORMATION, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_TRANS_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_TRANS_LOCK_ID_TRANS_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_JOB_LOCK; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_LOCK_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_MESSAGE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_LOCK_LOCK_DATE, 0, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_JOB_LOCK_ID_JOB_LOCK, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_NAMESPACE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_NAMESPACE_NAME, ( database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH ), 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_NAMESPACE_ID_NAMESPACE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_TYPE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_NAMESPACE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_NAME, getRepoStringLength(), 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_TYPE_ID_ELEMENT_TYPE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT_TYPE, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_NAME, getRepoStringLength(), 0 ) ); sql = database .getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_ELEMENT_ATTRIBUTE; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE_PARENT, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_KEY, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_VALUE, KettleDatabaseRepository.REP_STRING_LENGTH, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_ELEMENT_ATTRIBUTE_ID_ELEMENT_ATTRIBUTE, false ); if ( !Utils.isEmpty( sql ) ) { statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( monitor != null ) { monitor.worked( 1 ); } Map<String, ObjectId> users = new Hashtable<String, ObjectId>(); boolean ok_user = true; table = new RowMeta(); tablename = KettleDatabaseRepository.TABLE_R_USER; schemaTable = databaseMeta.getQuotedSchemaTableCombination( null, tablename ); if ( monitor != null ) { monitor.subTask( "Checking table " + schemaTable ); } table.addValueMeta( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER, KEY, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0 ) ); table.addValueMeta( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED, 1, 0 ) ); sql = database.getDDL( schemaTable, table, null, false, KettleDatabaseRepository.FIELD_USER_ID_USER, false ); create = false; if ( !Utils.isEmpty( sql ) ) { create = sql.toUpperCase().indexOf( "CREATE TABLE" ) >= 0; statements.add( sql ); if ( !dryrun ) { if ( log.isDetailed() ) { log.logDetailed( "executing SQL statements: " + Const.CR + sql ); } database.execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( "Created or altered table " + schemaTable ); } } } else { if ( log.isDetailed() ) { log.logDetailed( "Table " + schemaTable + " is OK." ); } } if ( ok_user ) { user = new String[] { "admin", "guest" }; pass = new String[] { "admin", "guest" }; code = new String[] { "Administrator", "Guest account" }; desc = new String[] { "User manager", "Read-only guest account" }; if ( !dryrun ) { database.prepareInsert( table, null, tablename ); } for ( int i = 0; i < user.length; i++ ) { RowMetaAndData lookup = null; if ( upgrade ) { lookup = database.getOneRow( "SELECT " + repository.quote( KettleDatabaseRepository.FIELD_USER_ID_USER ) + " FROM " + schemaTable + " WHERE " + repository.quote( KettleDatabaseRepository.FIELD_USER_LOGIN ) + " = '" + user[i] + "'" ); } if ( lookup == null ) { ObjectId nextid = new LongObjectId( i + 1 ); if ( !create ) { nextid = repository.connectionDelegate.getNextUserID(); } String password = Encr.encryptPassword( pass[i] ); RowMetaAndData tableData = new RowMetaAndData(); tableData.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER ), nextid ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_LOGIN ), user[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_PASSWORD ), password ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME ), code[i] ); tableData.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_DESCRIPTION ), desc[i] ); tableData.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_USER_ENABLED ), Boolean.TRUE ); if ( dryrun ) { sql = database.getSQLOutput( null, tablename, tableData.getRowMeta(), tableData.getData(), null ); statements.add( sql ); } else { database.setValuesInsert( tableData ); database.insertRow(); } users.put( user[i], nextid ); } } try { if ( !dryrun ) { database.closeInsert(); } if ( log.isDetailed() ) { log.logDetailed( "Populated table " + schemaTable ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to close insert after populating table " + schemaTable, dbe ); } } if ( monitor != null ) { monitor.worked( 1 ); } if ( monitor != null ) { monitor.done(); } log.logBasic( ( upgrade ? "Upgraded" : "Created" ) + " " + KettleDatabaseRepository.repositoryTableNames.length + " repository tables." ); } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade,
List<String> statements, boolean dryrun ); List<String> updateStepTypes( List<String> statements, boolean dryrun, boolean create ); List<String> updateDatabaseTypes( List<String> statements, boolean dryrun, boolean create ); void updateJobEntryTypes( List<String> statements, boolean dryrun, boolean create ); }
|
@Test public void testCallExtensionPointEmbedded() throws Exception { when( runConfigurationManager.load( "RUN_CONF" ) ).thenReturn( null ); try { runConfigurationRunExtensionPoint.callExtensionPoint( log, new Object[] { transExecutionConfiguration, abstractMeta, variableSpace } ); fail(); } catch ( Exception e ) { } }
|
@Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } }
|
RunConfigurationRunExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } } }
|
RunConfigurationRunExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } } RunConfigurationRunExtensionPoint( RunConfigurationManager runConfigurationManager ); }
|
RunConfigurationRunExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } } RunConfigurationRunExtensionPoint( RunConfigurationManager runConfigurationManager ); @Override void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ); }
|
RunConfigurationRunExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } } RunConfigurationRunExtensionPoint( RunConfigurationManager runConfigurationManager ); @Override void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ); }
|
@Test public void testCallExtensionPoint() throws Exception { runConfigurationImportExtensionPoint.callExtensionPoint( log, abstractMeta ); verify( abstractMeta ).getEmbeddedMetaStore(); }
|
@Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { AbstractMeta abstractMeta = (AbstractMeta) o; final EmbeddedMetaStore embeddedMetaStore = abstractMeta.getEmbeddedMetaStore(); RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); List<RunConfiguration> runConfigurationList = embeddedRunConfigurationManager.load(); for ( RunConfiguration runConfiguration : runConfigurationList ) { if ( !runConfiguration.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { runConfigurationManager.save( runConfiguration ); } } }
|
RunConfigurationImportExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { AbstractMeta abstractMeta = (AbstractMeta) o; final EmbeddedMetaStore embeddedMetaStore = abstractMeta.getEmbeddedMetaStore(); RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); List<RunConfiguration> runConfigurationList = embeddedRunConfigurationManager.load(); for ( RunConfiguration runConfiguration : runConfigurationList ) { if ( !runConfiguration.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { runConfigurationManager.save( runConfiguration ); } } } }
|
RunConfigurationImportExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { AbstractMeta abstractMeta = (AbstractMeta) o; final EmbeddedMetaStore embeddedMetaStore = abstractMeta.getEmbeddedMetaStore(); RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); List<RunConfiguration> runConfigurationList = embeddedRunConfigurationManager.load(); for ( RunConfiguration runConfiguration : runConfigurationList ) { if ( !runConfiguration.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { runConfigurationManager.save( runConfiguration ); } } } RunConfigurationImportExtensionPoint( RunConfigurationManager runConfigurationManager ); }
|
RunConfigurationImportExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { AbstractMeta abstractMeta = (AbstractMeta) o; final EmbeddedMetaStore embeddedMetaStore = abstractMeta.getEmbeddedMetaStore(); RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); List<RunConfiguration> runConfigurationList = embeddedRunConfigurationManager.load(); for ( RunConfiguration runConfiguration : runConfigurationList ) { if ( !runConfiguration.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { runConfigurationManager.save( runConfiguration ); } } } RunConfigurationImportExtensionPoint( RunConfigurationManager runConfigurationManager ); @Override void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ); }
|
RunConfigurationImportExtensionPoint implements ExtensionPointInterface { @Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { AbstractMeta abstractMeta = (AbstractMeta) o; final EmbeddedMetaStore embeddedMetaStore = abstractMeta.getEmbeddedMetaStore(); RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); List<RunConfiguration> runConfigurationList = embeddedRunConfigurationManager.load(); for ( RunConfiguration runConfiguration : runConfigurationList ) { if ( !runConfiguration.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) { runConfigurationManager.save( runConfiguration ); } } } RunConfigurationImportExtensionPoint( RunConfigurationManager runConfigurationManager ); @Override void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ); }
|
@Test public void testExecuteLocalTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( true ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); assertTrue( transExecutionConfiguration.isExecutingLocally() ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
@Test public void testExecuteRemoteTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( true ); defaultRunConfiguration.setServer( "Test Server" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); doReturn( slaveServer ).when( abstractMeta ).findSlaveServer( "Test Server" ); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); assertFalse( transExecutionConfiguration.isExecutingLocally() ); assertTrue( transExecutionConfiguration.isExecutingRemotely() ); assertEquals( transExecutionConfiguration.getRemoteServer(), slaveServer ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
@Test public void testExecutePentahoTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setPentaho( true ); defaultRunConfiguration.setRemote( false ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); assertFalse( transExecutionConfiguration.isExecutingLocally() ); assertFalse( transExecutionConfiguration.isExecutingRemotely() ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
@Test public void testExecuteClusteredTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( false ); defaultRunConfiguration.setClustered( true ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); assertTrue( transExecutionConfiguration.isExecutingClustered() ); assertFalse( transExecutionConfiguration.isExecutingRemotely() ); assertFalse( transExecutionConfiguration.isExecutingLocally() ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
@Test public void testExecuteRemoteNotFoundTrans() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( true ); defaultRunConfiguration.setServer( "Test Server" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); doReturn( slaveServer ).when( abstractMeta ).findSlaveServer( null ); try { defaultRunConfigurationExecutor .execute( defaultRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); fail(); } catch ( KettleException e ) { } }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
@Test public void testExecuteLocalJob() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( true ); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace ); assertTrue( jobExecutionConfiguration.isExecutingLocally() ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
@Test public void testExecuteRemoteJob() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( true ); defaultRunConfiguration.setServer( "Test Server" ); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); doReturn( slaveServer ).when( abstractMeta ).findSlaveServer( "Test Server" ); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace ); assertFalse( jobExecutionConfiguration.isExecutingLocally() ); assertTrue( jobExecutionConfiguration.isExecutingRemotely() ); assertEquals( jobExecutionConfiguration.getRemoteServer(), slaveServer ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
@Test public void testExecuteRemoteNotFoundJob() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( true ); defaultRunConfiguration.setServer( "Test Server" ); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); doReturn( slaveServer ).when( abstractMeta ).findSlaveServer( null ); try { defaultRunConfigurationExecutor .execute( defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace ); fail(); } catch ( KettleException e ) { } }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
@Test public void testOracleDBRepoStringLength() throws Exception { KettleEnvironment.init(); DatabaseMeta databaseMeta = new DatabaseMeta( "OraRepo", "ORACLE", "JDBC", null, "test", null, null, null ); repositoryMeta = new KettleDatabaseRepositoryMeta( "KettleDatabaseRepository", "OraRepo", "Ora Repository", databaseMeta ); repository = new KettleDatabaseRepository(); repository.init( repositoryMeta ); KettleDatabaseRepositoryCreationHelper helper = new KettleDatabaseRepositoryCreationHelper( repository ); int repoStringLength = helper.getRepoStringLength(); assertEquals( EXPECTED_ORACLE_DB_REPO_STRING, repoStringLength ); }
|
protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; }
|
KettleDatabaseRepositoryCreationHelper { protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; } }
|
KettleDatabaseRepositoryCreationHelper { protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); }
|
KettleDatabaseRepositoryCreationHelper { protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade,
List<String> statements, boolean dryrun ); List<String> updateStepTypes( List<String> statements, boolean dryrun, boolean create ); List<String> updateDatabaseTypes( List<String> statements, boolean dryrun, boolean create ); void updateJobEntryTypes( List<String> statements, boolean dryrun, boolean create ); }
|
KettleDatabaseRepositoryCreationHelper { protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade,
List<String> statements, boolean dryrun ); List<String> updateStepTypes( List<String> statements, boolean dryrun, boolean create ); List<String> updateDatabaseTypes( List<String> statements, boolean dryrun, boolean create ); void updateJobEntryTypes( List<String> statements, boolean dryrun, boolean create ); }
|
@Test public void testExecutePentahoJob() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setPentaho( true ); defaultRunConfiguration.setRemote( false ); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); defaultRunConfigurationExecutor .execute( defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace ); assertFalse( jobExecutionConfiguration.isExecutingLocally() ); assertFalse( jobExecutionConfiguration.isExecutingRemotely() ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
DefaultRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration,
AbstractMeta meta,
VariableSpace variableSpace ); }
|
@Test public void testExecuteRSADaemon() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); sparkRunConfiguration.setUrl( "ws: doReturn( "1.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine", "remote" ); verify( variableSpace ).setVariable( "engine.remote", "spark" ); verify( properties ).put( "zookeeper.host", "127.0.0.2" ); verify( properties ).put( "zookeeper.port", "8121" ); verify( variableSpace ).setVariable( "engine.host", null ); verify( variableSpace ).setVariable( "engine.port", null ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); static String ZOOKEEPER_CAPABILITY_ID; static String PENTAHO_SERVER_CAPABILITY_ID; static String CONFIG_KEY; static String JAAS_CAPABILITY_ID; static String AEL_SECURITY_CAPABILITY_ID; static String DEFAULT_PROTOCOL; static String DEFAULT_HOST; static String DEFAULT_ZOOKEEPER_PORT; static String DEFAULT_WEBSOCKET_PORT; }
|
@Test public void testExecuteNoPortRSADaemon() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); sparkRunConfiguration.setUrl( "zk: TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); doReturn( "1.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine", "remote" ); verify( variableSpace ).setVariable( "engine.remote", "spark" ); verify( properties ).put( "zookeeper.host", SparkRunConfigurationExecutor.DEFAULT_HOST ); verify( properties ).put( "zookeeper.port", SparkRunConfigurationExecutor.DEFAULT_ZOOKEEPER_PORT ); verify( variableSpace ).setVariable( "engine.host", null ); verify( variableSpace ).setVariable( "engine.port", null ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); static String ZOOKEEPER_CAPABILITY_ID; static String PENTAHO_SERVER_CAPABILITY_ID; static String CONFIG_KEY; static String JAAS_CAPABILITY_ID; static String AEL_SECURITY_CAPABILITY_ID; static String DEFAULT_PROTOCOL; static String DEFAULT_HOST; static String DEFAULT_ZOOKEEPER_PORT; static String DEFAULT_WEBSOCKET_PORT; }
|
@Test public void testWebSocketVersionExecute() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); sparkRunConfiguration.setUrl( "http: doReturn( "2.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine", "remote" ); verify( variableSpace ).setVariable( "engine.remote", "spark" ); verify( properties ).remove( "zookeeper.host" ); verify( properties ).remove( "zookeeper.port" ); verify( variableSpace ).setVariable( "engine.host", "127.0.0.2" ); verify( variableSpace ).setVariable( "engine.port", "8121" ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); static String ZOOKEEPER_CAPABILITY_ID; static String PENTAHO_SERVER_CAPABILITY_ID; static String CONFIG_KEY; static String JAAS_CAPABILITY_ID; static String AEL_SECURITY_CAPABILITY_ID; static String DEFAULT_PROTOCOL; static String DEFAULT_HOST; static String DEFAULT_ZOOKEEPER_PORT; static String DEFAULT_WEBSOCKET_PORT; }
|
@Test public void testWebSocketVersionExecuteNoPort() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); doReturn( "2.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine", "remote" ); verify( variableSpace ).setVariable( "engine.remote", "spark" ); verify( properties ).remove( "zookeeper.host" ); verify( properties ).remove( "zookeeper.port" ); verify( variableSpace ).setVariable( "engine.protocol", SparkRunConfigurationExecutor.DEFAULT_PROTOCOL ); verify( variableSpace ).setVariable( "engine.host", SparkRunConfigurationExecutor.DEFAULT_HOST ); verify( variableSpace ).setVariable( "engine.port", SparkRunConfigurationExecutor.DEFAULT_WEBSOCKET_PORT ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); static String ZOOKEEPER_CAPABILITY_ID; static String PENTAHO_SERVER_CAPABILITY_ID; static String CONFIG_KEY; static String JAAS_CAPABILITY_ID; static String AEL_SECURITY_CAPABILITY_ID; static String DEFAULT_PROTOCOL; static String DEFAULT_HOST; static String DEFAULT_ZOOKEEPER_PORT; static String DEFAULT_WEBSOCKET_PORT; }
|
@Test public void testWssWebSocketVersionExecute() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); sparkRunConfiguration.setUrl( "wss: doReturn( "2.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine.protocol", "wss" ); verify( variableSpace ).setVariable( "engine.host", "127.0.0.2" ); verify( variableSpace ).setVariable( "engine.port", "8121" ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); static String ZOOKEEPER_CAPABILITY_ID; static String PENTAHO_SERVER_CAPABILITY_ID; static String CONFIG_KEY; static String JAAS_CAPABILITY_ID; static String AEL_SECURITY_CAPABILITY_ID; static String DEFAULT_PROTOCOL; static String DEFAULT_HOST; static String DEFAULT_ZOOKEEPER_PORT; static String DEFAULT_WEBSOCKET_PORT; }
|
@Test public void testUrlWssWebSocketVersionExecute() { SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); sparkRunConfiguration.setUrl( " ws: doReturn( "2.0" ).when( variableSpace ).getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( variableSpace ).setVariable( "engine.protocol", "ws" ); verify( variableSpace ).setVariable( "engine.host", "127.0.0.2" ); verify( variableSpace ).setVariable( "engine.port", "8121" ); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); static String ZOOKEEPER_CAPABILITY_ID; static String PENTAHO_SERVER_CAPABILITY_ID; static String CONFIG_KEY; static String JAAS_CAPABILITY_ID; static String AEL_SECURITY_CAPABILITY_ID; static String DEFAULT_PROTOCOL; static String DEFAULT_HOST; static String DEFAULT_ZOOKEEPER_PORT; static String DEFAULT_WEBSOCKET_PORT; }
|
@Test public void testExecuteWithAelSecurityInstalled() { ICapability aelSecurityCapability = mock( ICapability.class ); setCapability( aelSecurityCapability, SparkRunConfigurationExecutor.AEL_SECURITY_CAPABILITY_ID, true ); ICapability jaasCapability = mock( ICapability.class ); setCapability( jaasCapability, SparkRunConfigurationExecutor.JAAS_CAPABILITY_ID, false ); SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( jaasCapability ).isInstalled(); verify( jaasCapability ).install(); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); static String ZOOKEEPER_CAPABILITY_ID; static String PENTAHO_SERVER_CAPABILITY_ID; static String CONFIG_KEY; static String JAAS_CAPABILITY_ID; static String AEL_SECURITY_CAPABILITY_ID; static String DEFAULT_PROTOCOL; static String DEFAULT_HOST; static String DEFAULT_ZOOKEEPER_PORT; static String DEFAULT_WEBSOCKET_PORT; }
|
@Test public void testExecuteWithNoAelSecurityInstalled() { ICapability aelSecurityCapability = mock( ICapability.class ); setCapability( aelSecurityCapability, SparkRunConfigurationExecutor.AEL_SECURITY_CAPABILITY_ID, false ); ICapability jaasCapability = mock( ICapability.class ); setCapability( jaasCapability, SparkRunConfigurationExecutor.JAAS_CAPABILITY_ID, false ); SparkRunConfiguration sparkRunConfiguration = new SparkRunConfiguration(); sparkRunConfiguration.setName( "Spark Configuration" ); TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(); sparkRunConfigurationExecutor .execute( sparkRunConfiguration, transExecutionConfiguration, abstractMeta, variableSpace ); verify( jaasCapability, never() ).isInstalled(); }
|
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); }
|
SparkRunConfigurationExecutor implements RunConfigurationExecutor { @Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration, AbstractMeta meta, VariableSpace variableSpace ) { ICapability securityCapability = capabilityManager.getCapabilityById( AEL_SECURITY_CAPABILITY_ID ); ICapability jaasCapability = capabilityManager.getCapabilityById( JAAS_CAPABILITY_ID ); if ( securityCapability != null && securityCapability.isInstalled() ) { if ( jaasCapability != null && !jaasCapability.isInstalled() ) { jaasCapability.install(); } } ICapability capability = capabilityManager.getCapabilityById( ZOOKEEPER_CAPABILITY_ID ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } SparkRunConfiguration sparkRunConfiguration = (SparkRunConfiguration) runConfiguration; String runConfigURL = Const.NVL( sparkRunConfiguration.getUrl(), "" ); URI uri = URI.create( runConfigURL.trim() ); String protocol = uri.getScheme(); String host = uri.getHost(); String port = uri.getPort() == -1 ? null : String.valueOf( uri.getPort() ); String version = variableSpace.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); boolean version2 = Const.toDouble( version, 1 ) >= 2; boolean serverMode = capabilityManager.getCapabilityById( PENTAHO_SERVER_CAPABILITY_ID ) != null; if ( version2 ) { variableSpace.setVariable( "engine.protocol", Const.NVL( protocol, DEFAULT_PROTOCOL ) ); variableSpace.setVariable( "engine.host", Const.NVL( host, DEFAULT_HOST ) ); variableSpace.setVariable( "engine.port", Const.NVL( port, DEFAULT_WEBSOCKET_PORT ) ); } if ( !serverMode ) { try { Configuration zookeeperConfiguration = configurationAdmin.getConfiguration( CONFIG_KEY ); Dictionary<String, Object> properties = zookeeperConfiguration.getProperties(); if ( properties != null ) { if ( !version2 ) { properties.put( "zookeeper.host", Const.NVL( host, DEFAULT_HOST ) ); properties.put( "zookeeper.port", Const.NVL( port, DEFAULT_ZOOKEEPER_PORT ) ); variableSpace.setVariable( "engine.protocol", null ); variableSpace.setVariable( "engine.host", null ); variableSpace.setVariable( "engine.port", null ); } else { properties.remove( "zookeeper.host" ); properties.remove( "zookeeper.port" ); } zookeeperConfiguration.update( properties ); } } catch ( IOException ioe ) { System.out.println( "Error occurred accessing configuration" ); } } variableSpace.setVariable( "engine", "remote" ); variableSpace.setVariable( "engine.remote", "spark" ); } SparkRunConfigurationExecutor( ConfigurationAdmin configurationAdmin ); @Override void execute( RunConfiguration runConfiguration, ExecutionConfiguration configuration,
AbstractMeta meta, VariableSpace variableSpace ); static String ZOOKEEPER_CAPABILITY_ID; static String PENTAHO_SERVER_CAPABILITY_ID; static String CONFIG_KEY; static String JAAS_CAPABILITY_ID; static String AEL_SECURITY_CAPABILITY_ID; static String DEFAULT_PROTOCOL; static String DEFAULT_HOST; static String DEFAULT_ZOOKEEPER_PORT; static String DEFAULT_WEBSOCKET_PORT; }
|
@Test public void testInputFileSurroundedBySingleQuotes() throws Exception { String datafile = "test-data-file"; loader = new GPBulkLoader( mockHelper.stepMeta, mockHelper.stepDataInterface, 0, mockHelper.transMeta, mockHelper.trans ); DatabaseMeta dbMetaMock = mock( DatabaseMeta.class ); doReturn( "" ).when( dbMetaMock ).getQuotedSchemaTableCombination( anyString(), anyString() ); doReturn( "" ).when( dbMetaMock ).quoteField( anyString() ); GPBulkLoaderMeta meta = new GPBulkLoaderMeta(); meta.setLoadAction( "" ); meta.setFieldStream( new String[] { "" } ); meta.setFieldTable( new String[] { "" } ); meta.setDatabaseMeta( dbMetaMock ); meta.setDataFile( datafile ); String actual = loader.getControlFileContents( meta, null, null ); int first = actual.indexOf( datafile ); if ( first > 0 ) { if ( actual.charAt( first - 1 ) != '\'' || actual.charAt( first + datafile.length() ) != '\'' ) { Assert.fail( "Datafile name is not surrounded by single quotes. Actual control file: " + actual ); } } else { Assert.fail( "Datafile name not found in control file. Actual control file: " + actual ); } }
|
public String getControlFileContents( GPBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ) throws KettleException { DatabaseMeta dm = meta.getDatabaseMeta(); String inputName = "'" + environmentSubstitute( meta.getDataFile() ) + "'"; String loadAction = meta.getLoadAction(); StringBuffer contents = new StringBuffer( 500 ); String tableName = dm.getQuotedSchemaTableCombination( environmentSubstitute( meta.getSchemaName() ), environmentSubstitute( meta.getTableName() ) ); if ( loadAction.equalsIgnoreCase( "truncate" ) ) { contents.append( loadAction + " " ); contents.append( tableName + ";" ); contents.append( Const.CR ); } contents.append( "\\COPY " ); contents.append( tableName ); contents.append( " ( " ); String[] streamFields = meta.getFieldStream(); String[] tableFields = meta.getFieldTable(); if ( streamFields == null || streamFields.length == 0 ) { throw new KettleException( "No fields defined to load to database" ); } for ( int i = 0; i < streamFields.length; i++ ) { if ( i != 0 ) { contents.append( ", " ); } contents.append( dm.quoteField( tableFields[i] ) ); } contents.append( " ) " ); contents.append( " FROM " ); contents.append( inputName ); contents.append( " WITH CSV " ); contents.append( "LOG ERRORS INTO " ); contents.append( tableName + "_errors " ); contents.append( " SEGMENT REJECT LIMIT " ); contents.append( meta.getMaxErrors() ); return contents.toString(); }
|
GPBulkLoader extends BaseStep implements StepInterface { public String getControlFileContents( GPBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ) throws KettleException { DatabaseMeta dm = meta.getDatabaseMeta(); String inputName = "'" + environmentSubstitute( meta.getDataFile() ) + "'"; String loadAction = meta.getLoadAction(); StringBuffer contents = new StringBuffer( 500 ); String tableName = dm.getQuotedSchemaTableCombination( environmentSubstitute( meta.getSchemaName() ), environmentSubstitute( meta.getTableName() ) ); if ( loadAction.equalsIgnoreCase( "truncate" ) ) { contents.append( loadAction + " " ); contents.append( tableName + ";" ); contents.append( Const.CR ); } contents.append( "\\COPY " ); contents.append( tableName ); contents.append( " ( " ); String[] streamFields = meta.getFieldStream(); String[] tableFields = meta.getFieldTable(); if ( streamFields == null || streamFields.length == 0 ) { throw new KettleException( "No fields defined to load to database" ); } for ( int i = 0; i < streamFields.length; i++ ) { if ( i != 0 ) { contents.append( ", " ); } contents.append( dm.quoteField( tableFields[i] ) ); } contents.append( " ) " ); contents.append( " FROM " ); contents.append( inputName ); contents.append( " WITH CSV " ); contents.append( "LOG ERRORS INTO " ); contents.append( tableName + "_errors " ); contents.append( " SEGMENT REJECT LIMIT " ); contents.append( meta.getMaxErrors() ); return contents.toString(); } }
|
GPBulkLoader extends BaseStep implements StepInterface { public String getControlFileContents( GPBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ) throws KettleException { DatabaseMeta dm = meta.getDatabaseMeta(); String inputName = "'" + environmentSubstitute( meta.getDataFile() ) + "'"; String loadAction = meta.getLoadAction(); StringBuffer contents = new StringBuffer( 500 ); String tableName = dm.getQuotedSchemaTableCombination( environmentSubstitute( meta.getSchemaName() ), environmentSubstitute( meta.getTableName() ) ); if ( loadAction.equalsIgnoreCase( "truncate" ) ) { contents.append( loadAction + " " ); contents.append( tableName + ";" ); contents.append( Const.CR ); } contents.append( "\\COPY " ); contents.append( tableName ); contents.append( " ( " ); String[] streamFields = meta.getFieldStream(); String[] tableFields = meta.getFieldTable(); if ( streamFields == null || streamFields.length == 0 ) { throw new KettleException( "No fields defined to load to database" ); } for ( int i = 0; i < streamFields.length; i++ ) { if ( i != 0 ) { contents.append( ", " ); } contents.append( dm.quoteField( tableFields[i] ) ); } contents.append( " ) " ); contents.append( " FROM " ); contents.append( inputName ); contents.append( " WITH CSV " ); contents.append( "LOG ERRORS INTO " ); contents.append( tableName + "_errors " ); contents.append( " SEGMENT REJECT LIMIT " ); contents.append( meta.getMaxErrors() ); return contents.toString(); } GPBulkLoader( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans ); }
|
GPBulkLoader extends BaseStep implements StepInterface { public String getControlFileContents( GPBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ) throws KettleException { DatabaseMeta dm = meta.getDatabaseMeta(); String inputName = "'" + environmentSubstitute( meta.getDataFile() ) + "'"; String loadAction = meta.getLoadAction(); StringBuffer contents = new StringBuffer( 500 ); String tableName = dm.getQuotedSchemaTableCombination( environmentSubstitute( meta.getSchemaName() ), environmentSubstitute( meta.getTableName() ) ); if ( loadAction.equalsIgnoreCase( "truncate" ) ) { contents.append( loadAction + " " ); contents.append( tableName + ";" ); contents.append( Const.CR ); } contents.append( "\\COPY " ); contents.append( tableName ); contents.append( " ( " ); String[] streamFields = meta.getFieldStream(); String[] tableFields = meta.getFieldTable(); if ( streamFields == null || streamFields.length == 0 ) { throw new KettleException( "No fields defined to load to database" ); } for ( int i = 0; i < streamFields.length; i++ ) { if ( i != 0 ) { contents.append( ", " ); } contents.append( dm.quoteField( tableFields[i] ) ); } contents.append( " ) " ); contents.append( " FROM " ); contents.append( inputName ); contents.append( " WITH CSV " ); contents.append( "LOG ERRORS INTO " ); contents.append( tableName + "_errors " ); contents.append( " SEGMENT REJECT LIMIT " ); contents.append( meta.getMaxErrors() ); return contents.toString(); } GPBulkLoader( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans ); String getControlFileContents( GPBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ); void createControlFile( String filename, Object[] row, GPBulkLoaderMeta meta ); String createCommandLine( GPBulkLoaderMeta meta, boolean password ); boolean execute( GPBulkLoaderMeta meta, boolean wait ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); void dispose( StepMetaInterface smi, StepDataInterface sdi ); }
|
GPBulkLoader extends BaseStep implements StepInterface { public String getControlFileContents( GPBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ) throws KettleException { DatabaseMeta dm = meta.getDatabaseMeta(); String inputName = "'" + environmentSubstitute( meta.getDataFile() ) + "'"; String loadAction = meta.getLoadAction(); StringBuffer contents = new StringBuffer( 500 ); String tableName = dm.getQuotedSchemaTableCombination( environmentSubstitute( meta.getSchemaName() ), environmentSubstitute( meta.getTableName() ) ); if ( loadAction.equalsIgnoreCase( "truncate" ) ) { contents.append( loadAction + " " ); contents.append( tableName + ";" ); contents.append( Const.CR ); } contents.append( "\\COPY " ); contents.append( tableName ); contents.append( " ( " ); String[] streamFields = meta.getFieldStream(); String[] tableFields = meta.getFieldTable(); if ( streamFields == null || streamFields.length == 0 ) { throw new KettleException( "No fields defined to load to database" ); } for ( int i = 0; i < streamFields.length; i++ ) { if ( i != 0 ) { contents.append( ", " ); } contents.append( dm.quoteField( tableFields[i] ) ); } contents.append( " ) " ); contents.append( " FROM " ); contents.append( inputName ); contents.append( " WITH CSV " ); contents.append( "LOG ERRORS INTO " ); contents.append( tableName + "_errors " ); contents.append( " SEGMENT REJECT LIMIT " ); contents.append( meta.getMaxErrors() ); return contents.toString(); } GPBulkLoader( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans ); String getControlFileContents( GPBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ); void createControlFile( String filename, Object[] row, GPBulkLoaderMeta meta ); String createCommandLine( GPBulkLoaderMeta meta, boolean password ); boolean execute( GPBulkLoaderMeta meta, boolean wait ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); void dispose( StepMetaInterface smi, StepDataInterface sdi ); }
|
@Test public void testDefaultDBRepoStringLength() throws Exception { KettleEnvironment.init(); DatabaseMeta databaseMeta = new DatabaseMeta(); databaseMeta.setDatabaseInterface( new TestDatabaseMeta() ); repositoryMeta = new KettleDatabaseRepositoryMeta( "KettleDatabaseRepository", "TestRepo", "Test Repository", databaseMeta ); repository = new KettleDatabaseRepository(); repository.init( repositoryMeta ); KettleDatabaseRepositoryCreationHelper helper = new KettleDatabaseRepositoryCreationHelper( repository ); int repoStringLength = helper.getRepoStringLength(); assertEquals( EXPECTED_DEFAULT_DB_REPO_STRING, repoStringLength ); }
|
protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; }
|
KettleDatabaseRepositoryCreationHelper { protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; } }
|
KettleDatabaseRepositoryCreationHelper { protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); }
|
KettleDatabaseRepositoryCreationHelper { protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade,
List<String> statements, boolean dryrun ); List<String> updateStepTypes( List<String> statements, boolean dryrun, boolean create ); List<String> updateDatabaseTypes( List<String> statements, boolean dryrun, boolean create ); void updateJobEntryTypes( List<String> statements, boolean dryrun, boolean create ); }
|
KettleDatabaseRepositoryCreationHelper { protected int getRepoStringLength() { return database.getDatabaseMeta().getDatabaseInterface().getMaxVARCHARLength() - 1 > 0 ? database.getDatabaseMeta() .getDatabaseInterface().getMaxVARCHARLength() - 1 : KettleDatabaseRepository.REP_ORACLE_STRING_LENGTH; } KettleDatabaseRepositoryCreationHelper( KettleDatabaseRepository repository ); synchronized void createRepositorySchema( ProgressMonitorListener monitor, boolean upgrade,
List<String> statements, boolean dryrun ); List<String> updateStepTypes( List<String> statements, boolean dryrun, boolean create ); List<String> updateDatabaseTypes( List<String> statements, boolean dryrun, boolean create ); void updateJobEntryTypes( List<String> statements, boolean dryrun, boolean create ); }
|
@Test public void testProcessRow() throws KettleException { AggregateRows aggregateRows = new AggregateRows( stepMockHelper.stepMeta, stepMockHelper.stepDataInterface, 0, stepMockHelper.transMeta, stepMockHelper.trans ); aggregateRows.init( stepMockHelper.initStepMetaInterface, stepMockHelper.initStepDataInterface ); aggregateRows.setInputRowSets( new ArrayList<RowSet>( Arrays.asList( createSourceRowSet( "TEST" ) ) ) ); int fieldSize = stepMockHelper.initStepMetaInterface.getFieldName().length; AggregateRowsData data = new AggregateRowsData(); data.fieldnrs = new int[ fieldSize ]; data.counts = new long[ fieldSize ]; data.values = new Object[ fieldSize ]; assertTrue( aggregateRows.processRow( stepMockHelper.initStepMetaInterface, data ) ); assertTrue( aggregateRows.getErrors() == 0 ); assertTrue( aggregateRows.getLinesRead() > 0 ); RowMetaInterface outputRowMeta = mock( RowMetaInterface.class ); when( outputRowMeta.size() ).thenReturn( fieldSize ); data.outputRowMeta = outputRowMeta; assertFalse( aggregateRows.processRow( stepMockHelper.initStepMetaInterface, data ) ); assertTrue( aggregateRows.getLinesWritten() > 0 ); }
|
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (AggregateRowsMeta) smi; data = (AggregateRowsData) sdi; Object[] r = getRow(); if ( r == null ) { Object[] agg = buildAggregate(); putRow( data.outputRowMeta, agg ); setOutputDone(); return false; } if ( first ) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); for ( int i = 0; i < meta.getFieldName().length; i++ ) { data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getFieldName()[i] ); if ( data.fieldnrs[i] < 0 ) { logError( BaseMessages.getString( PKG, "AggregateRows.Log.CouldNotFindField", meta.getFieldName()[i] ) ); setErrors( 1 ); stopAll(); return false; } data.counts[i] = 0L; } } AddAggregate( getInputRowMeta(), r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "AggregateRows.Log.LineNumber" ) + getLinesRead() ); } } return true; }
|
AggregateRows extends BaseStep implements StepInterface { public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (AggregateRowsMeta) smi; data = (AggregateRowsData) sdi; Object[] r = getRow(); if ( r == null ) { Object[] agg = buildAggregate(); putRow( data.outputRowMeta, agg ); setOutputDone(); return false; } if ( first ) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); for ( int i = 0; i < meta.getFieldName().length; i++ ) { data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getFieldName()[i] ); if ( data.fieldnrs[i] < 0 ) { logError( BaseMessages.getString( PKG, "AggregateRows.Log.CouldNotFindField", meta.getFieldName()[i] ) ); setErrors( 1 ); stopAll(); return false; } data.counts[i] = 0L; } } AddAggregate( getInputRowMeta(), r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "AggregateRows.Log.LineNumber" ) + getLinesRead() ); } } return true; } }
|
AggregateRows extends BaseStep implements StepInterface { public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (AggregateRowsMeta) smi; data = (AggregateRowsData) sdi; Object[] r = getRow(); if ( r == null ) { Object[] agg = buildAggregate(); putRow( data.outputRowMeta, agg ); setOutputDone(); return false; } if ( first ) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); for ( int i = 0; i < meta.getFieldName().length; i++ ) { data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getFieldName()[i] ); if ( data.fieldnrs[i] < 0 ) { logError( BaseMessages.getString( PKG, "AggregateRows.Log.CouldNotFindField", meta.getFieldName()[i] ) ); setErrors( 1 ); stopAll(); return false; } data.counts[i] = 0L; } } AddAggregate( getInputRowMeta(), r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "AggregateRows.Log.LineNumber" ) + getLinesRead() ); } } return true; } AggregateRows( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans ); }
|
AggregateRows extends BaseStep implements StepInterface { public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (AggregateRowsMeta) smi; data = (AggregateRowsData) sdi; Object[] r = getRow(); if ( r == null ) { Object[] agg = buildAggregate(); putRow( data.outputRowMeta, agg ); setOutputDone(); return false; } if ( first ) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); for ( int i = 0; i < meta.getFieldName().length; i++ ) { data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getFieldName()[i] ); if ( data.fieldnrs[i] < 0 ) { logError( BaseMessages.getString( PKG, "AggregateRows.Log.CouldNotFindField", meta.getFieldName()[i] ) ); setErrors( 1 ); stopAll(); return false; } data.counts[i] = 0L; } } AddAggregate( getInputRowMeta(), r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "AggregateRows.Log.LineNumber" ) + getLinesRead() ); } } return true; } AggregateRows( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
|
AggregateRows extends BaseStep implements StepInterface { public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (AggregateRowsMeta) smi; data = (AggregateRowsData) sdi; Object[] r = getRow(); if ( r == null ) { Object[] agg = buildAggregate(); putRow( data.outputRowMeta, agg ); setOutputDone(); return false; } if ( first ) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); for ( int i = 0; i < meta.getFieldName().length; i++ ) { data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getFieldName()[i] ); if ( data.fieldnrs[i] < 0 ) { logError( BaseMessages.getString( PKG, "AggregateRows.Log.CouldNotFindField", meta.getFieldName()[i] ) ); setErrors( 1 ); stopAll(); return false; } data.counts[i] = 0L; } } AddAggregate( getInputRowMeta(), r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "AggregateRows.Log.LineNumber" ) + getLinesRead() ); } } return true; } AggregateRows( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
|
@Test public void convertFromProxyPentahoUser_RetunsNull_WhenErrorOccurs() throws Exception { IRoleSupportSecurityManager manager = mock( IRoleSupportSecurityManager.class ); when( manager.constructUser() ).thenThrow( new KettleException() ); IUser user = convertFromProxyPentahoUser( new ProxyPentahoUser(), Collections.<UserToRoleAssignment> emptyList(), manager ); assertNull( user ); }
|
public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IUser> convertFromNonPentahoUsers( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromNonPentahoRoles( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoDefaultRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static ProxyPentahoRole getProxyPentahoRole( IUserRoleWebService userRoleWebService, String name ); static List<IUser> convertToListFromProxyPentahoUsers( ProxyPentahoUser[] users,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( ProxyPentahoRole[] roles,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static ProxyPentahoUser[] convertToPentahoProxyUsers( Set<IUser> users ); static ProxyPentahoUser[] convertToPentahoProxyUsers( List<IUser> users ); static ProxyPentahoUser convertToPentahoProxyUser( IUser userInfo ); static ProxyPentahoRole[] convertToPentahoProxyRoles( Set<IRole> roles ); static ProxyPentahoRole[] convertToPentahoProxyRoles( List<IRole> roles ); static ProxyPentahoRole convertToPentahoProxyRole( IRole roleInfo ); static IRole convertFromProxyPentahoRole( IUserRoleWebService userRoleWebService, ProxyPentahoRole role,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertToUserInfo( ProxyPentahoUser user, ProxyPentahoRole[] roles,
IRoleSupportSecurityManager rsm ); static IRole convertFromProxyPentahoRole( ProxyPentahoRole role, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IRole convertFromNonPentahoRole( String role, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( ProxyPentahoUser user, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IUser convertFromNonPentahoUser( String user, IRoleSupportSecurityManager rsm ); static Set<IUser> getUsersForRole( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static Set<IRole> getRolesForUser( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IUser> convertFromNonPentahoUsers( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromNonPentahoRoles( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoDefaultRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static ProxyPentahoRole getProxyPentahoRole( IUserRoleWebService userRoleWebService, String name ); static List<IUser> convertToListFromProxyPentahoUsers( ProxyPentahoUser[] users,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( ProxyPentahoRole[] roles,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static ProxyPentahoUser[] convertToPentahoProxyUsers( Set<IUser> users ); static ProxyPentahoUser[] convertToPentahoProxyUsers( List<IUser> users ); static ProxyPentahoUser convertToPentahoProxyUser( IUser userInfo ); static ProxyPentahoRole[] convertToPentahoProxyRoles( Set<IRole> roles ); static ProxyPentahoRole[] convertToPentahoProxyRoles( List<IRole> roles ); static ProxyPentahoRole convertToPentahoProxyRole( IRole roleInfo ); static IRole convertFromProxyPentahoRole( IUserRoleWebService userRoleWebService, ProxyPentahoRole role,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertToUserInfo( ProxyPentahoUser user, ProxyPentahoRole[] roles,
IRoleSupportSecurityManager rsm ); static IRole convertFromProxyPentahoRole( ProxyPentahoRole role, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IRole convertFromNonPentahoRole( String role, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( ProxyPentahoUser user, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IUser convertFromNonPentahoUser( String user, IRoleSupportSecurityManager rsm ); static Set<IUser> getUsersForRole( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static Set<IRole> getRolesForUser( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); }
|
@Test public void convertFromProxyPentahoUser_CopiesDataFromInput() throws Exception { IRoleSupportSecurityManager manager = mockSecurityManager( false ); ProxyPentahoUser pentahoUser = pentahoUser( "name" ); pentahoUser.setPassword( "password" ); pentahoUser.setDescription( "desc" ); pentahoUser.setEnabled( true ); IUser user = convertFromProxyPentahoUser( pentahoUser, Collections.<UserToRoleAssignment> emptyList(), manager ); assertNotNull( user ); assertEquals( pentahoUser.getName(), user.getName() ); assertEquals( pentahoUser.getName(), user.getLogin() ); assertEquals( pentahoUser.getPassword(), user.getPassword() ); assertEquals( pentahoUser.getDescription(), user.getDescription() ); assertEquals( pentahoUser.getEnabled(), user.isEnabled() ); }
|
public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IUser> convertFromNonPentahoUsers( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromNonPentahoRoles( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoDefaultRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static ProxyPentahoRole getProxyPentahoRole( IUserRoleWebService userRoleWebService, String name ); static List<IUser> convertToListFromProxyPentahoUsers( ProxyPentahoUser[] users,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( ProxyPentahoRole[] roles,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static ProxyPentahoUser[] convertToPentahoProxyUsers( Set<IUser> users ); static ProxyPentahoUser[] convertToPentahoProxyUsers( List<IUser> users ); static ProxyPentahoUser convertToPentahoProxyUser( IUser userInfo ); static ProxyPentahoRole[] convertToPentahoProxyRoles( Set<IRole> roles ); static ProxyPentahoRole[] convertToPentahoProxyRoles( List<IRole> roles ); static ProxyPentahoRole convertToPentahoProxyRole( IRole roleInfo ); static IRole convertFromProxyPentahoRole( IUserRoleWebService userRoleWebService, ProxyPentahoRole role,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertToUserInfo( ProxyPentahoUser user, ProxyPentahoRole[] roles,
IRoleSupportSecurityManager rsm ); static IRole convertFromProxyPentahoRole( ProxyPentahoRole role, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IRole convertFromNonPentahoRole( String role, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( ProxyPentahoUser user, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IUser convertFromNonPentahoUser( String user, IRoleSupportSecurityManager rsm ); static Set<IUser> getUsersForRole( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static Set<IRole> getRolesForUser( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IUser> convertFromNonPentahoUsers( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromNonPentahoRoles( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoDefaultRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static ProxyPentahoRole getProxyPentahoRole( IUserRoleWebService userRoleWebService, String name ); static List<IUser> convertToListFromProxyPentahoUsers( ProxyPentahoUser[] users,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( ProxyPentahoRole[] roles,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static ProxyPentahoUser[] convertToPentahoProxyUsers( Set<IUser> users ); static ProxyPentahoUser[] convertToPentahoProxyUsers( List<IUser> users ); static ProxyPentahoUser convertToPentahoProxyUser( IUser userInfo ); static ProxyPentahoRole[] convertToPentahoProxyRoles( Set<IRole> roles ); static ProxyPentahoRole[] convertToPentahoProxyRoles( List<IRole> roles ); static ProxyPentahoRole convertToPentahoProxyRole( IRole roleInfo ); static IRole convertFromProxyPentahoRole( IUserRoleWebService userRoleWebService, ProxyPentahoRole role,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertToUserInfo( ProxyPentahoUser user, ProxyPentahoRole[] roles,
IRoleSupportSecurityManager rsm ); static IRole convertFromProxyPentahoRole( ProxyPentahoRole role, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IRole convertFromNonPentahoRole( String role, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( ProxyPentahoUser user, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IUser convertFromNonPentahoUser( String user, IRoleSupportSecurityManager rsm ); static Set<IUser> getUsersForRole( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static Set<IRole> getRolesForUser( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); }
|
@Test public void convertFromProxyPentahoUser_CopiesRolesForEeUser() throws Exception { IRoleSupportSecurityManager manager = mockSecurityManager( true ); ProxyPentahoUser pentahoUser = pentahoUser( "name" ); List<UserToRoleAssignment> assignments = Collections.singletonList( new UserToRoleAssignment( "name", "role" ) ); EEUserInfo user = (EEUserInfo) convertFromProxyPentahoUser( pentahoUser, assignments, manager ); assertNotNull( user ); assertEquals( pentahoUser.getName(), user.getName() ); assertEquals( 1, user.getRoles().size() ); assertEquals( "role", user.getRoles().iterator().next().getName() ); }
|
public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IUser> convertFromNonPentahoUsers( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromNonPentahoRoles( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoDefaultRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static ProxyPentahoRole getProxyPentahoRole( IUserRoleWebService userRoleWebService, String name ); static List<IUser> convertToListFromProxyPentahoUsers( ProxyPentahoUser[] users,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( ProxyPentahoRole[] roles,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static ProxyPentahoUser[] convertToPentahoProxyUsers( Set<IUser> users ); static ProxyPentahoUser[] convertToPentahoProxyUsers( List<IUser> users ); static ProxyPentahoUser convertToPentahoProxyUser( IUser userInfo ); static ProxyPentahoRole[] convertToPentahoProxyRoles( Set<IRole> roles ); static ProxyPentahoRole[] convertToPentahoProxyRoles( List<IRole> roles ); static ProxyPentahoRole convertToPentahoProxyRole( IRole roleInfo ); static IRole convertFromProxyPentahoRole( IUserRoleWebService userRoleWebService, ProxyPentahoRole role,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertToUserInfo( ProxyPentahoUser user, ProxyPentahoRole[] roles,
IRoleSupportSecurityManager rsm ); static IRole convertFromProxyPentahoRole( ProxyPentahoRole role, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IRole convertFromNonPentahoRole( String role, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( ProxyPentahoUser user, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IUser convertFromNonPentahoUser( String user, IRoleSupportSecurityManager rsm ); static Set<IUser> getUsersForRole( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static Set<IRole> getRolesForUser( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); }
|
UserRoleHelper implements java.io.Serializable { public static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ) { IUser userInfo = null; try { userInfo = rsm.constructUser(); userInfo.setDescription( user.getDescription() ); userInfo.setPassword( user.getPassword() ); userInfo.setLogin( user.getName() ); userInfo.setName( user.getName() ); try { if ( userInfo instanceof IEEUser ) { ( (IEEUser) userInfo ).setRoles( convertToSetFromProxyPentahoRoles( userRoleWebService.getRolesForUser( user ), lookupCache ) ); } } catch ( UserRoleException e ) { e.printStackTrace(); } } catch ( KettleException e1 ) { e1.printStackTrace(); } return userInfo; } static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IUser> convertFromNonPentahoUsers( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromNonPentahoRoles( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoDefaultRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static ProxyPentahoRole getProxyPentahoRole( IUserRoleWebService userRoleWebService, String name ); static List<IUser> convertToListFromProxyPentahoUsers( ProxyPentahoUser[] users,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( ProxyPentahoRole[] roles,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static ProxyPentahoUser[] convertToPentahoProxyUsers( Set<IUser> users ); static ProxyPentahoUser[] convertToPentahoProxyUsers( List<IUser> users ); static ProxyPentahoUser convertToPentahoProxyUser( IUser userInfo ); static ProxyPentahoRole[] convertToPentahoProxyRoles( Set<IRole> roles ); static ProxyPentahoRole[] convertToPentahoProxyRoles( List<IRole> roles ); static ProxyPentahoRole convertToPentahoProxyRole( IRole roleInfo ); static IRole convertFromProxyPentahoRole( IUserRoleWebService userRoleWebService, ProxyPentahoRole role,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertToUserInfo( ProxyPentahoUser user, ProxyPentahoRole[] roles,
IRoleSupportSecurityManager rsm ); static IRole convertFromProxyPentahoRole( ProxyPentahoRole role, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IRole convertFromNonPentahoRole( String role, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( ProxyPentahoUser user, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IUser convertFromNonPentahoUser( String user, IRoleSupportSecurityManager rsm ); static Set<IUser> getUsersForRole( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static Set<IRole> getRolesForUser( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); }
|
@Test public void convertFromProxyPentahoUsers_ReturnsEmptyList_WhenUsersAreAbsent() throws Exception { UserRoleSecurityInfo info = new UserRoleSecurityInfo(); info.setUsers( null ); IRoleSupportSecurityManager manager = mockSecurityManager( false ); List<IUser> users = convertFromProxyPentahoUsers( info, manager ); assertNotNull( users ); assertTrue( users.isEmpty() ); }
|
public static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ) { List<ProxyPentahoUser> users = info.getUsers(); if ( users == null || users.isEmpty() ) { return Collections.emptyList(); } List<UserToRoleAssignment> assignments = info.getAssignments(); List<IUser> userList = new ArrayList<IUser>( users.size() ); for ( ProxyPentahoUser user : users ) { userList.add( convertFromProxyPentahoUser( user, assignments, rsm ) ); } return userList; }
|
UserRoleHelper implements java.io.Serializable { public static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ) { List<ProxyPentahoUser> users = info.getUsers(); if ( users == null || users.isEmpty() ) { return Collections.emptyList(); } List<UserToRoleAssignment> assignments = info.getAssignments(); List<IUser> userList = new ArrayList<IUser>( users.size() ); for ( ProxyPentahoUser user : users ) { userList.add( convertFromProxyPentahoUser( user, assignments, rsm ) ); } return userList; } }
|
UserRoleHelper implements java.io.Serializable { public static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ) { List<ProxyPentahoUser> users = info.getUsers(); if ( users == null || users.isEmpty() ) { return Collections.emptyList(); } List<UserToRoleAssignment> assignments = info.getAssignments(); List<IUser> userList = new ArrayList<IUser>( users.size() ); for ( ProxyPentahoUser user : users ) { userList.add( convertFromProxyPentahoUser( user, assignments, rsm ) ); } return userList; } }
|
UserRoleHelper implements java.io.Serializable { public static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ) { List<ProxyPentahoUser> users = info.getUsers(); if ( users == null || users.isEmpty() ) { return Collections.emptyList(); } List<UserToRoleAssignment> assignments = info.getAssignments(); List<IUser> userList = new ArrayList<IUser>( users.size() ); for ( ProxyPentahoUser user : users ) { userList.add( convertFromProxyPentahoUser( user, assignments, rsm ) ); } return userList; } static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IUser> convertFromNonPentahoUsers( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromNonPentahoRoles( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoDefaultRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static ProxyPentahoRole getProxyPentahoRole( IUserRoleWebService userRoleWebService, String name ); static List<IUser> convertToListFromProxyPentahoUsers( ProxyPentahoUser[] users,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( ProxyPentahoRole[] roles,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static ProxyPentahoUser[] convertToPentahoProxyUsers( Set<IUser> users ); static ProxyPentahoUser[] convertToPentahoProxyUsers( List<IUser> users ); static ProxyPentahoUser convertToPentahoProxyUser( IUser userInfo ); static ProxyPentahoRole[] convertToPentahoProxyRoles( Set<IRole> roles ); static ProxyPentahoRole[] convertToPentahoProxyRoles( List<IRole> roles ); static ProxyPentahoRole convertToPentahoProxyRole( IRole roleInfo ); static IRole convertFromProxyPentahoRole( IUserRoleWebService userRoleWebService, ProxyPentahoRole role,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertToUserInfo( ProxyPentahoUser user, ProxyPentahoRole[] roles,
IRoleSupportSecurityManager rsm ); static IRole convertFromProxyPentahoRole( ProxyPentahoRole role, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IRole convertFromNonPentahoRole( String role, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( ProxyPentahoUser user, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IUser convertFromNonPentahoUser( String user, IRoleSupportSecurityManager rsm ); static Set<IUser> getUsersForRole( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static Set<IRole> getRolesForUser( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); }
|
UserRoleHelper implements java.io.Serializable { public static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ) { List<ProxyPentahoUser> users = info.getUsers(); if ( users == null || users.isEmpty() ) { return Collections.emptyList(); } List<UserToRoleAssignment> assignments = info.getAssignments(); List<IUser> userList = new ArrayList<IUser>( users.size() ); for ( ProxyPentahoUser user : users ) { userList.add( convertFromProxyPentahoUser( user, assignments, rsm ) ); } return userList; } static List<IUser> convertFromProxyPentahoUsers( UserRoleSecurityInfo info, IRoleSupportSecurityManager rsm ); static List<IUser> convertFromNonPentahoUsers( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromNonPentahoRoles( UserRoleInfo info, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoDefaultRoles( UserRoleSecurityInfo info,
IRoleSupportSecurityManager rsm ); static ProxyPentahoRole getProxyPentahoRole( IUserRoleWebService userRoleWebService, String name ); static List<IUser> convertToListFromProxyPentahoUsers( ProxyPentahoUser[] users,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static List<IRole> convertToListFromProxyPentahoRoles( ProxyPentahoRole[] roles,
IUserRoleWebService userRoleWebService, UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static ProxyPentahoUser[] convertToPentahoProxyUsers( Set<IUser> users ); static ProxyPentahoUser[] convertToPentahoProxyUsers( List<IUser> users ); static ProxyPentahoUser convertToPentahoProxyUser( IUser userInfo ); static ProxyPentahoRole[] convertToPentahoProxyRoles( Set<IRole> roles ); static ProxyPentahoRole[] convertToPentahoProxyRoles( List<IRole> roles ); static ProxyPentahoRole convertToPentahoProxyRole( IRole roleInfo ); static IRole convertFromProxyPentahoRole( IUserRoleWebService userRoleWebService, ProxyPentahoRole role,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( IUserRoleWebService userRoleWebService, ProxyPentahoUser user,
UserRoleLookupCache lookupCache, IRoleSupportSecurityManager rsm ); static IUser convertToUserInfo( ProxyPentahoUser user, ProxyPentahoRole[] roles,
IRoleSupportSecurityManager rsm ); static IRole convertFromProxyPentahoRole( ProxyPentahoRole role, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IRole convertFromNonPentahoRole( String role, IRoleSupportSecurityManager rsm ); static IUser convertFromProxyPentahoUser( ProxyPentahoUser user, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static IUser convertFromNonPentahoUser( String user, IRoleSupportSecurityManager rsm ); static Set<IUser> getUsersForRole( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); static Set<IRole> getRolesForUser( String name, List<UserToRoleAssignment> assignments,
IRoleSupportSecurityManager rsm ); }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.