top-image

LATEST ARTICLES

Hi,

Here, a simple example using the Samba Java CIFS Client Library. JCIFS is an Open Source client library that implements the CIFS/SMB networking protocol in 100% Java. CIFS is the standard file sharing protocol on the Microsoft Windows platform (e.g. Map Network Drive …). This client is used extensively in production on large Intranets.

Some resources:
https://jcifs.samba.org/
http://blog.icodejava.com/875/java-tutorial-using-jcifs-to-copy-files-to-shared-network-drive-using-username-and-password/

package com.huo.test.javacifs;

import java.io.File;
import java.text.MessageFormat;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;

import jcifs.smb.NtlmPasswordAuthentication;
import jcifs.smb.SmbException;
import jcifs.smb.SmbFile;
import jcifs.smb.SmbFileFilter;

import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;

/**
 * Class Test SAMBA for CIFS library
 * @author huseyino001
 */
public class TestCIFS implements Runnable {
	
	private NtlmPasswordAuthentication ntlmPasswordAuthentication = null;
	private SmbFile inputFolder;
	private int nbOfRetentionDays;

	public static void main(String[] args) {
		// Creation of ExecutorService
		final ScheduledExecutorService executorServiceOnceScanning = Executors.newSingleThreadScheduledExecutor();

		try {
			System.out.println(" ############################## EXECUTION "+TestCIFS.class+" - START ############################## ");
			final TestCIFS testCIFS = new TestCIFS();
			//
			NtlmPasswordAuthentication ntlmPasswordAuthentication = new NtlmPasswordAuthentication("LU", "huseyin", "mypassword");
			testCIFS.ntlmPasswordAuthentication = ntlmPasswordAuthentication;
			//
			testCIFS.inputFolder = new SmbFile("", testCIFS.ntlmPasswordAuthentication);
			System.out.println("Input folder = "+ testCIFS.inputFolder.getCanonicalPath());
			//
			testCIFS.nbOfRetentionDays = 7;
			System.out.println("Nb of retention days = "+ testCIFS.nbOfRetentionDays);
			
			// Once scanning execution
			executorServiceOnceScanning.submit(testCIFS);

			executorServiceOnceScanning.shutdown();

		} catch (Throwable e) {
			e.printStackTrace();
		}finally{
			executorServiceOnceScanning.shutdown();
		}
	}

	
	@Override
	public void run() {
		try {
			processDeletingSubFolders(this);
			
		} catch (Exception e) {
			e.printStackTrace();
		} finally {
			System.out.println(" ############################## EXECUTION "+TestCIFS.class+" - END ############################## ");
		}
	}


	
	private void processDeletingSubFolders(final TestCIFS testCIFS) {
		try {
			System.out.println(MessageFormat.format("Scanning \"input\" directory for deleting folders : \"{0}\" - START" + IOUtils.LINE_SEPARATOR, testCIFS.inputFolder.getCanonicalPath()));

			for (final SmbFile aFolderInINPUT_SmbFile : testCIFS.inputFolder.listFiles(new SmbFileFilter() {
				@Override
				public boolean accept(SmbFile aFolderInINPUT_SmbFile) throws SmbException {
					boolean ret = false;
					
					if(aFolderInINPUT_SmbFile.isDirectory() 
						&& aFolderInINPUT_SmbFile.canRead() 
						&& aFolderInINPUT_SmbFile.canWrite() 
						&& aFolderInINPUT_SmbFile.getName().startsWith("Folder_TO_DELETE_")){
						
						if(aFolderInINPUT_SmbFile.getLastModified()>0){
							Calendar calLastModified = GregorianCalendar.getInstance();
							calLastModified.setTimeInMillis(aFolderInINPUT_SmbFile.getLastModified());

							Calendar calRetention = GregorianCalendar.getInstance();
							calRetention.add(Calendar.DAY_OF_MONTH, -testCIFS.nbOfRetentionDays);

							ret = calLastModified.compareTo(calRetention)<0;
						}
					}
					return ret; 
				}
			})) {
					System.out.println(MessageFormat.format("Deleting \"{0}\" - START", aFolderInINPUT_SmbFile));
					try {
						System.out.println(MessageFormat.format("{0}={1}", "Folder to delete", aFolderInINPUT_SmbFile.getCanonicalPath()));
						deleteDirectory(aFolderInINPUT_SmbFile);
			
						
					} catch (Throwable e) {
						StringBuilder sb = new StringBuilder(MessageFormat.format("Unable to delete folder {0}", aFolderInINPUT_SmbFile));
						sb.append(IOUtils.LINE_SEPARATOR);
						sb.append(e.getMessage());
						System.out.println(sb.toString());
						e.printStackTrace();
					}
					System.out.println(MessageFormat.format("Deleting \"{0}\" - END", aFolderInINPUT_SmbFile));
			}// FOR-END
			System.out.println(MessageFormat.format("Scanning \"input\" directory for deleting folders : \"{0}\" - END" + IOUtils.LINE_SEPARATOR, testCIFS.inputFolder.getCanonicalPath()));

		} catch (Exception e) {
			System.out.println("Listing of \""+testCIFS.inputFolder.getCanonicalPath()+"\" directory failed ");
			e.printStackTrace();
		}
	}
	
	
	private void deleteDirectory(SmbFile aSmbFile) throws Exception{
		if(aSmbFile!=null){
			aSmbFile.delete();
			if(aSmbFile.exists()){
				FileUtils.deleteDirectory(new File(aSmbFile.getUncPath()));
			}
		}
	}
	
	private void deleteFile(SmbFile aSmbFile) throws Exception{
		if(aSmbFile!=null){
			final File fileToDelete = new File(aSmbFile.getCanonicalPath());
			if (!fileToDelete.delete()) {
				aSmbFile.delete();
			}
		}
	}
	
}

Best regards,

Huseyin

Hi,

Here, a simple java example using the Apache Commons Configuration library. The Apache Commons Configuration software library provides a generic configuration interface which enables a Java application to read configuration data from a variety of sources.

Some resources:
https://commons.apache.org/proper/commons-configuration/
https://commons.apache.org/proper/commons-configuration/userguide/quick_start.html

A simple file properties myfile.properties:

MYPARAMETER1=VALUE1
MYPARAMETER2=VALUE2
MYPARAMETER3=VALUE3
MYPARAMETER4_INT=123
MYPARAMETER5_DOUBLE=123.45
MYPARAMETER6_BOOL=true

We need the librairies commons-configuration-1.5.jar and commons-collections-3.2.jar:

package com.huo.test.javaconfig;

import java.text.MessageFormat;

import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.configuration.reloading.FileChangedReloadingStrategy;

/**
 * Class Test for test loading configuration
 */
public class TestConfig  {
	
	public static void main(String[] args) {
		try {
			System.out.println(" ############################## EXECUTION "+TestConfig.class+" - START ############################## ");
			PropertiesConfiguration docbaseConfig = new PropertiesConfiguration(MessageFormat.format("{0}/src/com/huo/test/javaconfig/myfile.properties", System.getProperty("user.dir")));
			docbaseConfig.setListDelimiter(',');
			docbaseConfig.setReloadingStrategy(new FileChangedReloadingStrategy());
			{
				final String parameter1 = docbaseConfig.getString("MYPARAMETER1");
				System.out.println("The value of MYPARAMETER1 is :"+parameter1);
				final String parameter2 = docbaseConfig.getString("MYPARAMETER2", "");
				System.out.println("The value of MYPARAMETER2 is :"+parameter2);
				final String parameter3 = docbaseConfig.getString("MYPARAMETER3", "");
				System.out.println("The value of MYPARAMETER3 is :"+parameter3);
				//
				final int parameter4 = docbaseConfig.getInt("MYPARAMETER4_INT");
				System.out.println("The value of MYPARAMETER4_INT is :"+parameter4);
				final double parameter5 = docbaseConfig.getDouble("MYPARAMETER5_DOUBLE");
				System.out.println("The value of MYPARAMETER5_DOUBLE is :"+parameter5);
				final boolean parameter6 = docbaseConfig.getBoolean("MYPARAMETER6_BOOL");
				System.out.println("The value of MYPARAMETER6_BOOL is :"+parameter6);
			}

		} catch (Throwable e) {
			e.printStackTrace();
		}finally{
			System.out.println(" ############################## EXECUTION "+TestConfig.class+" - END ############################## ");
		}
	}
}

The outputs are:

 ############################## EXECUTION class com.huo.test.javaconfig.TestConfig - START ############################## 
The value of MYPARAMETER1 is :VALUE1
The value of MYPARAMETER2 is :VALUE2
The value of MYPARAMETER3 is :VALUE3
The value of MYPARAMETER4_INT is :123
The value of MYPARAMETER5_DOUBLE is :123.45
The value of MYPARAMETER6_BOOL is :true
 ############################## EXECUTION class com.huo.test.javaconfig.TestConfig - END ############################## 

That’s all!!

Huseyin OZVEREN

Hi,

After my previous posts concerning the Documentum audit trail entries (dm_audittrail) http://www.javablog.fr/documentum-creation-of-audit-trail-entries-dm_audittrail.html and http://www.javablog.fr/deleting-of-audit-trail-entries-dm_audittrail.html, here, I would like to expose a solution in order to force content server to store some extra information in dm_audittrail.application_code. This information stored in the SessionConfig allows the identification of dm_audittrail / dm_audittrail_acl created through the current session.

Here, DQL example:

begintran,c
#
set,c,sessionconfig,application_code
my_value_of_app_code
#.... actions generating dm_audittrail  entries
#....
save,c,l
#
commit,c

Here, JAVA DFC example:

@Override
public void onSessionCreate(IDfSession session) throws DfException {
    setApplicationCode(session, "my_value");

    //.... actions generating dm_audittrail  entries

}
         
private void setApplicationCode(IDfSession session, String value) throws DfException {
    IDfTypedObject sessionConfig = session.getSessionConfig();
 
    if (StringUtils.isBlank(value)) {
        return;
    }
 
    boolean needAdd = true;
    for (int n = sessionConfig.getValueCount("application_code"), i = n; i > 0; i--) {
        if (value.equals(sessionConfig.getRepeatingString("application_code", i - 1))) {
            needAdd = false;
        }
    }
    if (needAdd) {
        sessionConfig.appendString("application_code", value);
    }
}

Best regards,

Huseyin OZVEREN

Hi,

After the post on creation of audittrail (http://www.javablog.fr/documentum-creation-of-audit-trail-entries-dm_audittrail.html), I would like to present the purge of audittrail because “DELL ECM/OpenTxt” advises to purge/archive the audittrail data in order to improve the system’s performances.
 
So, several solutions are possibles:

  • archiving the audittrail in external database, then, deleting these events in audittrail via custom jobs,
  • deleting the useless audittrail of technical events (custom indexation,…) via a DQL/API scripts or custom jobs,

 
In this post, the audittrail will be archived in a dedicated database then purged via custom generic method called by several jobs. First, we need to configure the DATASOURCE (DS) on DCTM server in order to access to archive database : http://www.javablog.fr/documentum-datasource-configuration-ds.html.
 

Below, the steps in archiving and purge of audittrails in a generic and custom dm_method :

  • STEP 1: Select the audittrail which must be removed
    		SELECT DISTINCT r_object_id, time_stamp, event_name, event_description, user_name ,user_id, owner_name, audited_obj_id, chronicle_id, object_name, version_label, 
    		host_name, attribute_list, attribute_list_id, string_1, string_2, string_3, string_4, string_5, id_1, id_2, id_3, id_4, id_5, object_type, application_code
    		FROM dm_audittrail
    		WHERE DATEDIFF(MONTH,TIME_STAMP,DATE(NOW)) >= <grace_period>
    			AND 
    			(
    				<qualification>
    
    			)
    		ORDER BY r_object_id ASC
    		ENABLE(RETURN_TOP <commit_size>)
    
    	-table MYDATA1, 
    	-qualification "event_name like 'huo\_%' ESCAPE '\' OR object_type IN (select name from dm_type where r_object_id in (select r_type_id from dmi_type_info where any r_supertype LIKE 'huo\_%' ESCAPE '\'))", 
    	-grace_period 3, 
    	-commit_size 2000  
    

     

  • STEP 2 : Insert audittrail in a database table for docbase and year (AUDIT_TRAIL.AUDIT_TRAILS_MYDATA1_2017)
     --- insertAuditTrails
    INSERT INTO AUDIT_TRAIL.AUDIT_TRAILS_<table>_<year> 
     (ATTRIBUTE_LIST, AUDITED_OBJ_ID, CHRONICLE_ID, EVENT_DESCRIPTION, 
    EVENT_NAME, HOST_NAME, ID_1, ID_2, ID_3, ID_4, ID_5, OBJECT_NAME, 
    OWNER_NAME, STRING_1, STRING_2, STRING_3, STRING_4, STRING_5, 
    TIME_STAMP, USER_ID, USER_NAME, VERSION_LABEL, OBJECT_TYPE, 
    APPLICATION_CODE)
     VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
    

     

  • STEP 3 : Purge the audittrail entries via the administrative method “PURGE_AUDIT” which removes an audit trail entry from the Docbase.
    Use and arguments: see p.252 in Content Server DQL Reference Manual at http://ec.europa.eu/ipg/tools/wcm-portal/documentation/assets/dql_reference_guide_en.pdf and my previous
    post concerning the use of Documentum administration methods http://www.javablog.fr/documentum-administration-methods-presentation-and-examples-migrate_content-get_path-do_method-check_security-get_file_url.html.
     
    Examples from “Content Server DQL Reference Manual”:
    o This example deletes all audit trail entries generated from January 1, 2003 to January 1, 2004, including unarchived entries. The number of entries deleted in each transaction is set to 500:

    EXECUTE purge_audit WITH delete_mode=’DATE_RANGE’, date_start=’01/01/2003 00:00:00 AM’, date_end=’01/01/2004 00:00:00 AM’ purge_non_archived=T,commit_size=500

    o This example deletes all archived audit trail entries that identify the document 090xxxxxxxxxx94ef as the audited object:

    EXECUTE purge_audit WITH delete_mode=’ALL_VERSIONS’, object_id=’090xxxxxxxxxx94ef’

    o This example deletes the single audit trail entry whose object ID is 5f0000021372ac6f:

    EXECUTE purge_audit WITH delete_mode=’AUDIT_RECORD’, object_id=’5f0000021372ac6f’

    o This example deletes all audit trail entries whose object IDs range from 5f1xxxxxxxx901 to 5f1xxxxxxx925, including unarchived entries:

    EXECUTE purge_audit WITH delete_mode=’ID_RANGE’, id_start=’5f1xxxxxxxx901’,id_end=’5f1xxxxxxx925’, purge_non_archived=T

    o This example deletes all audit trail entries that satisfy the specified DQL predicate:

    EXECUTE purge_audit WITH delete_mode=’PREDICATE’, dql_predicate=’dm_audittrail where event_name like ’’dcm%’’and r_gen_source=0’

     

  • STEP 4 : Deployment of method and manual creation of several jobs in DA which call the above dm_method.
     
    o Exemple 1 : creation of a job HuO_MyData1AuditArchiving on globalR (yes!) – Archive & Purge dm_audittrail named “huo1_*” with time_stamp > 3 months:

    + Frequency   :  1/ Day at 01:00:00
    + Method Name   :  HuO_AuditArchiving  
    + Arguments   :  -docbase_name HUO_MYDOCBASE1_DEV, -user_name huouser, -table MYDATA1, -qualification "event_name like 'huo1\_%' ESCAPE '\' OR object_type IN (select name from dm_type where r_object_id in (select r_type_id from dmi_type_info where any r_supertype LIKE 'huo\_%' ESCAPE '\'))", -grace_period 3, -commit_size 2000  
    

     
    o Exemple 2 : creation of a job HuO_MyData2AuditArchiving on globalR (yes!) – Archive & Purge dm_audittrail named “huo2_*” with time_stamp > 3 months:

    + Frequency   :  1/ Day at 01:00:00
    + Method Name   :  HuO_AuditArchiving  
    + Arguments   :  -docbase_name HUO_MYDOCBASE2_DEV, -user_name huouser, -table MYDATA2, -qualification "event_name like 'huo2\_%' ESCAPE '\' OR object_type IN (select name from dm_type where r_object_id in (select r_type_id from dmi_type_info where any r_supertype LIKE 'huo\_%' ESCAPE '\'))", -grace_period 3  
    

     

    Here, some DQL requests in order to check the deployment of dmc_module, dm_jar, dm_method, dm_job:

    # Last versions of Module : Definition of java class
    select r_object_id, object_name, r_creation_date, r_modify_date, r_version_label from dmc_module (all) where LOWER(object_name)  like '%auditarchiving%' ORDER BY r_modify_date desc ;
    0bxxxxxxxx9af	com.huo.lu.ecm.audit.AuditArchivingMethod	16/01/2014 14:37:04	31/10/2017 08:20:48	CURRENT,1.0
    

     

    # Last versions of JARS (Java Method):
    select r_object_id, object_name, r_creation_date, r_modify_date, r_version_label from dmc_jar (all) where LOWER(object_name)  like '%auditarchiving%' ORDER BY r_modify_date desc ;
    090xxxxxxxx8bf	AuditArchiving-Impl	31/10/2017 08:20:46	31/10/2017 08:20:47	CURRENT,1.13
    

     

    # Last versions of DAR:
    select r_object_id, object_name, r_creation_date, r_modify_date, r_version_label from dmc_dar (all) where LOWER(object_name)  like '%logbook-archiving%' ORDER BY r_modify_date desc ;
    080xxxxxxx8c1	ECM-LogBook-Archiving	31/10/2017 08:20:47	31/10/2017 08:20:48	1.13,CURRENT
    

     

    # Last versions of JARS (Java Method):
    SELECT r_object_id, object_name, r_creation_date, r_modify_date, trace_launch,  method_type, use_method_server, method_verb, r_version_label from dm_method (all) where LOWER(object_name)  like '%auditarchiving%' ORDER BY r_modify_date;
    10xxxxxxx9b9	HuO_AuditArchiving	16/01/2014 14:37:05	01/02/2016 09:53:13	0	java	1	com.huo.lu.ecm.audit.AuditArchivingMethod	CURRENT,1.0
    

     

    # Last versions of Jobs:
    SELECT r_object_id, object_name, r_creation_date, r_modify_date, r_version_label from dm_job (all) where LOWER(object_name)  like '%auditarchiving%' ORDER BY r_modify_date;
    080xxxxxxxx39ce	HuO_MyData1AuditArchiving	16/01/2014 14:37:06	24/11/2017 00:08:07	1.0,CURRENT
    080xxxxxxxx39cf	HuO_MyData2AuditArchiving	16/01/2014 14:37:06	24/11/2017 00:06:33	CURRENT,1.0
    

     

 

Here, the source code of Custom method AuditArchivingMethod:

package com.huo.lu.ecm.audit;

import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.lang.reflect.Method;
import java.sql.Blob;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Time;
import java.text.DateFormat;
import java.text.MessageFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.sql.DataSource;

import org.apache.commons.lang.StringUtils;

import com.documentum.com.DfClientX;
import com.documentum.com.IDfClientX;
import com.documentum.fc.client.DfQuery;
import com.documentum.fc.client.IDfBusinessObject;
import com.documentum.fc.client.IDfClient;
import com.documentum.fc.client.IDfCollection;
import com.documentum.fc.client.IDfModule;
import com.documentum.fc.client.IDfPersistentObject;
import com.documentum.fc.client.IDfQuery;
import com.documentum.fc.client.IDfSession;
import com.documentum.fc.client.IDfSessionManager;
import com.documentum.fc.commands.admin.DfAdminCommand;
import com.documentum.fc.commands.admin.IDfApplyPurgeAudit;
import com.documentum.fc.common.DfException;
import com.documentum.fc.common.DfLoginInfo;
import com.documentum.fc.common.IDfId;
import com.documentum.fc.common.impl.MessageHelper;
import com.documentum.fc.methodserver.IDfMethod;

public class AuditArchivingMethod implements IDfMethod, IDfModule, IDfBusinessObject {

	private static final String DS = "AUDIT_TRAIL";
	private static final String JNDI_PREFIX[] = { "java:/", "java:", "jdbc/" };

	private static int copy(InputStream input, Blob output) throws SQLException, IOException {
		byte buffer[] = new byte[4096];
		int count = 0;
		for (int n = 0; -1 != (n = input.read(buffer));) {
			output.setBytes(count + 1, buffer, 0, n);
			count += n;
		}

		return count;
	}

	private PrintWriter printWriter;

	private boolean loadParameters = true;
	private String userName;
	private String repositoryName;
	private String tableName;
	private String qualification;
	private int gracePeriod;
	private int commitSize;
	private Properties properties;
	private DataSource dataSource;

	private String getProperty(String key, String defaultValue) {
		if (properties == null) {
			synchronized (this) {
				if (properties == null) {
					properties = new Properties();
					String propertyFile = getClass().getSimpleName() + ".properties";
					InputStream inputStream = getClass().getResourceAsStream(propertyFile);
					if (inputStream != null) {
						try {
							properties.load(inputStream);
						} catch (IOException e) {
							println("Unable to load " + propertyFile, e);
						} finally {
							try {
								inputStream.close();
							} catch (IOException e) {

							}
						}
					}
				}
			}
		}
		String result = properties.getProperty(key);
		if (result == null) {
			return defaultValue;
		}
		return result;
	}

	protected void execute() throws Exception {
		InitialContext ic = new InitialContext();
		for (int i = 0; i < JNDI_PREFIX.length && dataSource == null; i++) {
			try {
				dataSource = (DataSource) ic.lookup(JNDI_PREFIX[i] + DS);
			} catch (NamingException e) {
			}
		}
		if (dataSource == null) {
			throw new Exception("Unable to find datasource " + DS);
		}

		final String globalRegistryRepository = "GLOBALR";
		IDfClientX clientx = new DfClientX();
		IDfClient client = clientx.getLocalClient();
		final IDfSessionManager sessionManager = client.newSessionManager();
		for (String repository : new String[] { globalRegistryRepository, getRepositoryName() }) {
			if (sessionManager.hasIdentity(repository)) {
				sessionManager.clearIdentity(repository);
			}
			sessionManager.setIdentity(repository, new DfLoginInfo(getUserName(), null));
		}

		Calendar timeOut = Calendar.getInstance();
		IDfSession globalRSession = sessionManager.getSession(globalRegistryRepository);
		try {
			IDfPersistentObject dfMethod = globalRSession.getObjectByQualification("dm_method WHERE method_verb = '" + this.getClass().getName() + "'");
			if (dfMethod == null) {
				timeOut.add(Calendar.SECOND, 3600);
			} else {
				timeOut.add(Calendar.SECOND, 95 * dfMethod.getInt("timeout_default") / 100);
			}
		} finally {
			sessionManager.release(globalRSession);
		}

		List<String> list = new ArrayList<String>(200);
		do {
			list.clear();
			IDfSession dfSession = sessionManager.newSession(getRepositoryName());
			try {
				if (dfSession.isTransactionActive()) {
					throw new DfException("IDfSession's transaction is already active !");
				}
				boolean commit = false;
				dfSession.beginTrans();
				println("IdfSession transaction has begun");
				try {
					Connection connection = dataSource.getConnection();
					try {
						connection.setAutoCommit(false);
						println("SqlConnection transaction has begun");
						try {
							StringBuilder dqlPredicate = new StringBuilder();
							dqlPredicate.append("dm_audittrail");
							boolean addWhere = true;
							println(MessageFormat.format("{0,choice,0#There is no grace period.|1#There is a grace period of one month.|1<There is a grace period of {0,number,integer} months}.", gracePeriod));
							if (gracePeriod >= 0) {
								dqlPredicate.append(" WHERE DATEDIFF(MONTH,TIME_STAMP,DATE(NOW)) >= ").append(gracePeriod);
								addWhere = false;
							}
							if (StringUtils.isNotBlank(qualification)) {
								if (addWhere) {
									dqlPredicate.append(" WHERE ");
									addWhere = false;
								} else {
									dqlPredicate.append(" AND ");
								}
								dqlPredicate.append("(").append(qualification).append(")");
							}

							StringBuilder sb = new StringBuilder();
							sb.append(" SELECT DISTINCT r_object_id, time_stamp, event_name, event_description, user_name ,user_id, owner_name, audited_obj_id, chronicle_id, object_name, version_label,");
							sb.append(" host_name, attribute_list, attribute_list_id, string_1, string_2, string_3, string_4, string_5, id_1, id_2, id_3, id_4, id_5, object_type, application_code");
							sb.append(" FROM ").append(dqlPredicate.toString());
							sb.append(" ORDER BY r_object_id ASC");
							sb.append(" ENABLE(RETURN_TOP ").append(getCommitSize()).append(")");

							IDfQuery dfQuery = new DfQuery(sb.toString());
							println(MessageFormat.format("Launching query: {0}", sb.toString()));

							IDfCollection coll = dfQuery.execute(dfSession, IDfQuery.DF_READ_QUERY);
							try {
								while (coll.next()) {
									if (!list.contains(coll.getId("r_object_id").getId())) {
										list.add(insertAuditTrails(connection, getTableName(), coll).getId());
									}
								}
							} finally {
								coll.close();
							}

							if (list.size() > 0) {
								println(MessageFormat.format("{0} dm_audittrail to purge", list.size()));
								IDfApplyPurgeAudit applyPurgeAudit = (IDfApplyPurgeAudit) DfAdminCommand.getCommand(DfAdminCommand.APPLY_PURGE_AUDIT);
								applyPurgeAudit.setDeletMode("PREDICATE");
								StringBuilder dql = new StringBuilder();
								dql.append("dm_audittrail");
								final int size = 200;
								int i = 0;
								while (i * size < list.size()) {
									if (i == 0) {
										dql.append(" WHERE r_object_id IN ('");
									} else {
										dql.append(" OR r_object_id IN ('");
									}
									dql.append(StringUtils.join(list.subList(i * size, ((i + 1) * size > list.size()) ? list.size() : (i + 1) * size), "','"));
									dql.append("')");
									i++;
								}

								applyPurgeAudit.setDQLPredicate(dql.toString());
								applyPurgeAudit.setPurgeNonArchived(true);
								println(MessageFormat.format("Launching purge_query with predicate : {0}", dql.toString()));
								IDfCollection dfCollection = applyPurgeAudit.execute(dfSession);
								try {
									if (dfCollection.next()) {
										boolean result = dfCollection.getBoolean("result");
										int deletedObjects = dfCollection.getInt("deleted_objects");
										if (result && deletedObjects == list.size()) {
											println(MessageFormat.format("Purge operation has succeed : {0} object(s) deleted", deletedObjects));
											commit = true;
										} else {
											if (!result) {
												println("Error : Purge operation failed !");
											} else {
												println(MessageFormat.format("Error : Expected to delete {0} object(s) but {1} object(s) were deleted", list.size(), deletedObjects));
											}
										}
									} else {
										println("Error : Unable to retrieve purge operation result");
									}
								} finally {
									dfCollection.close();
								}
							} else {
								println("Nothing to do !");
							}
						} finally {
							if (commit) {
								connection.commit();
								println("SqlConnection transaction commited !");
							} else {
								connection.rollback();
								println("SqlConnection transaction rollbacked !");
							}
						}
					} finally {
						connection.close();
					}
				} finally {
					if (commit) {
						dfSession.commitTrans();
						println("IdfSession transaction commited !");
					} else {
						dfSession.abortTrans();
						println("IdfSession transaction rollbacked !");
					}
				}
			} finally {
				sessionManager.release(dfSession);
			}
		} while (list.size() > 0 && timeOut.after(Calendar.getInstance()));
	}

	@SuppressWarnings("unchecked")
	public final void execute(Map arg0, OutputStream arg1) throws Exception {
		execute(arg0, new PrintWriter(arg1));
	}

	@SuppressWarnings("unchecked")
	public final int execute(Map arg0, PrintWriter arg1) throws Exception {
		try {
			this.printWriter = new PrintWriter(arg1, true);
			try {
				println(MessageFormat.format("Start time : {0,date,dd/MM/yyyy HH:mm:ss}", new Date()));
				println(MessageFormat.format("Method {0} launched with arguments {1}", this.getClass().getSimpleName(), arg0.keySet()));
				println("Vendor-name : " + getVendorString());
				println("Version : " + getVersion());
				if (loadParameters) {
					for (String parameter : requiredParameters()) {
						String[] values = (String[]) arg0.get(parameter);
						String value = (values == null || values.length == 0) ? null : values[0];
						validateParam(parameter, value);
					}
					for (Iterator iter = arg0.keySet().iterator(); iter.hasNext();) {
						String key = (String) iter.next();
						Method method = getSetterMethods().get(key);
						if (method == null) {
							println("Unknown argument " + key);
						} else {
							method.invoke(this, ((String[]) arg0.get(key))[0]);
						}
					}
				}
				execute();
				println(MessageFormat.format("End time : {0,date,dd/MM/yyyy HH:mm:ss}", new Date()));
			} catch (Throwable e) {
				println(MessageHelper.getStackTraceAsString(e));
				throw new Exception(e);
			}
		} finally {
			try {
				if (this.printWriter != null) {
					this.printWriter.close();
				}
			} catch (Exception e) {

			}
		}
		return 0;
	}

	public String getRepositoryName() {
		return repositoryName;
	}

	protected Map<String, Method> getSetterMethods() throws SecurityException, NoSuchMethodException {
		Map<String, Method> map = new HashMap<String, Method>();
		map.put("user_name", getClass().getMethod("setUserName", String.class));
		map.put("docbase_name", getClass().getMethod("setRepositoryName", String.class));
		map.put("table", getClass().getMethod("setTableName", String.class));
		map.put("grace_period", getClass().getMethod("setGracePeriod", String.class));
		map.put("qualification", getClass().getMethod("setQualification", String.class));
		map.put("commit_size", getClass().getMethod("setCommitSize", String.class));
		return map;
	}

	public String getTableName() {
		return tableName;
	}

	public String getUserName() {
		return userName;
	}

	public String getVendorString() {
		return getProperty("Vendor-Name", "PricewaterhouseCoopers (c) 2013");
	}

	public String getVersion() {
		return getProperty("Build-Version", "1.0");
	}

	private IDfId insertAuditTrails(Connection connection, String table, IDfCollection coll) throws SQLException, DfException, IOException {
		IDfId result;
		StringBuilder sql = new StringBuilder();
		sql.append(" INSERT INTO AUDIT_TRAIL.AUDIT_TRAILS_{0}_{1}");
		sql.append(" (ATTRIBUTE_LIST,AUDITED_OBJ_ID,CHRONICLE_ID,EVENT_DESCRIPTION,EVENT_NAME,HOST_NAME,ID_1,ID_2,ID_3,ID_4,ID_5,OBJECT_NAME,OWNER_NAME,STRING_1,STRING_2,STRING_3,STRING_4,STRING_5,TIME_STAMP,USER_ID,USER_NAME,VERSION_LABEL,OBJECT_TYPE,APPLICATION_CODE)");
		sql.append(" VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)");

		DateFormat year = new SimpleDateFormat("yyyy");
		PreparedStatement statement = connection.prepareStatement(MessageFormat.format(sql.toString(), StringUtils.upperCase(StringUtils.trimToEmpty(table)), year.format(coll.getTime("time_stamp").getDate())));
		try {
			File file = new File(new File(System.getProperty("java.io.tmpdir")), "attribute_list_" + coll.getId("audited_obj_id").getId() + ".txt");
			try {
				PrintWriter printWriter = new PrintWriter(file);
				try {
					printWriter.print(coll.getString("attribute_list"));
					if (!coll.getId("attribute_list_id").isNull()) {
						IDfPersistentObject dmiAuditAttrs = coll.getObjectSession().getObject(coll.getId("attribute_list_id"));
						for (int i = 0; i < dmiAuditAttrs.getValueCount("attribute_list"); i++) {
							printWriter.print(dmiAuditAttrs.getRepeatingString("attribute_list", i));
						}
					}
				} finally {
					printWriter.close();
				}
				InputStream inputStream = (file.exists()) ? new FileInputStream(file) : new ByteArrayInputStream(new byte[] {});
				try {
					Blob blob = connection.createBlob();
					try {
						copy(inputStream, blob);
						statement.setBlob(1, blob);
						result = coll.getId("r_object_id");
						statement.setString(2, coll.getId("audited_obj_id").getId());
						statement.setString(3, coll.getId("chronicle_id").getId());
						statement.setString(4, coll.getString("event_description"));
						statement.setString(5, coll.getString("event_name"));
						statement.setString(6, coll.getString("host_name"));
						statement.setString(7, coll.getId("id_1").getId());
						statement.setString(8, coll.getId("id_2").getId());
						statement.setString(9, coll.getId("id_3").getId());
						statement.setString(10, coll.getId("id_4").getId());
						statement.setString(11, coll.getId("id_5").getId());
						statement.setString(12, coll.getString("object_name"));
						statement.setString(13, coll.getString("owner_name"));
						statement.setString(14, coll.getString("string_1"));
						statement.setString(15, coll.getString("string_2"));
						statement.setString(16, coll.getString("string_3"));
						statement.setString(17, coll.getString("string_4"));
						statement.setString(18, coll.getString("string_5"));
						statement.setTime(19, new Time(coll.getTime("time_stamp").getDate().getTime()));
						statement.setString(20, coll.getId("user_id").getId());
						statement.setString(21, coll.getString("user_name"));
						statement.setString(22, coll.getString("version_label"));
						statement.setString(23, coll.getString("object_type"));
						statement.setString(24, coll.getString("application_code"));
						statement.executeUpdate();
					} finally {
						blob.free();
					}
				} finally {
					inputStream.close();
				}
			} finally {
				file.delete();
			}
		} finally {
			statement.close();
		}
		return result;
	}

	public boolean isCompatible(String arg0) {
		return true;
	}

	final public boolean isLoadParameters() {
		return loadParameters;
	}

	protected void print(Object... obj) {
		String message = StringUtils.join(obj, "");
		if (printWriter == null) {
			System.out.print(message);
		} else {
			printWriter.print(message);
			printWriter.flush();
		}
	}

	protected void println(Object... obj) {
		String message = StringUtils.join(obj, "");
		if (printWriter == null) {
			System.out.println(message);
		} else {
			printWriter.println(message);
			printWriter.flush();
		}
	}

	protected String[] requiredParameters() {
		return new String[] { "docbase_name", "user_name", "table", "grace_period" };
	}

	public void setGracePeriod(String gracePeriod) {
		try {
			this.gracePeriod = Integer.parseInt(gracePeriod);
		} catch (Throwable e) {
			if (printWriter != null) {
				e.printStackTrace(printWriter);
			} else {
				e.printStackTrace();
			}
		}
		if (this.gracePeriod < 0) {
			this.gracePeriod = 0;
		}
	}

	public void setCommitSize(String commitSize) {
		try {
			this.commitSize = Integer.parseInt(commitSize);
		} catch (Throwable e) {
			if (printWriter != null) {
				e.printStackTrace(printWriter);
			} else {
				e.printStackTrace();
			}
		}
	}

	public int getCommitSize() {
		return (commitSize <= 0) ? 200 : commitSize;
	}

	final public void setLoadParameters(boolean loadParameters) {
		this.loadParameters = loadParameters;
	}

	public void setQualification(String qualification) {
		this.qualification = qualification;
	}

	public void setRepositoryName(String repositoryName) {
		this.repositoryName = repositoryName;
	}

	public void setTableName(String tableName) {
		this.tableName = tableName;
	}

	public void setUserName(String userName) {
		this.userName = userName;
	}

	@Override
	public boolean supportsFeature(String arg0) {
		return true;
	}

	private void validateParam(String parameter, String value) throws Exception {
		if (StringUtils.isBlank(value)) {
			throw new Exception(MessageFormat.format("Non-null value is required for parameter {0}", parameter));
		}
	}

}

Best regards,

Huseyin OZVEREN

Page 1 of 53:1 2 3 4 »Last »
bottom-img
Copyright ® 2012 Huseyin Ozveren. No reproduction, even partial, can be used from this site and all its contents including text, documents, images, etc.. without the express permission of the author.