Newer
Older
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
*/
package uk.ac.sanger.artemis.util;
import uk.ac.sanger.artemis.io.ChadoCanonicalGene;
import uk.ac.sanger.artemis.io.DocumentEntry;
import uk.ac.sanger.artemis.io.PartialSequence;
import uk.ac.sanger.artemis.io.ReadFormatException;
import uk.ac.sanger.artemis.chado.ChadoTransactionManager;
import uk.ac.sanger.artemis.chado.FeatureForUpdatingResidues;
import uk.ac.sanger.artemis.chado.IBatisDAO;
import uk.ac.sanger.artemis.chado.JdbcDAO;
import uk.ac.sanger.artemis.chado.GmodDAO;
import uk.ac.sanger.artemis.chado.ChadoTransaction;
import uk.ac.sanger.artemis.components.database.DatabaseEntrySource;
import uk.ac.sanger.artemis.components.genebuilder.GeneUtils;
Eric Rasche
committed
import uk.ac.sanger.artemis.util.DatabaseLocationParser;
import org.gmod.schema.sequence.Feature;
import org.gmod.schema.sequence.FeatureProp;
import org.gmod.schema.sequence.FeatureLoc;
import org.gmod.schema.sequence.FeatureRelationship;
import org.gmod.schema.sequence.FeatureSynonym;
import org.gmod.schema.sequence.FeatureCvTerm;
import org.gmod.schema.sequence.FeatureCvTermProp;
import org.gmod.schema.cv.Cv;
import org.gmod.schema.general.DbXRef;
import org.gmod.schema.pub.PubDbXRef;
import org.gmod.schema.pub.Pub;
import org.postgresql.largeobject.LargeObjectManager;
import com.ibatis.common.jdbc.SimpleDataSource;
import java.util.Collections;
import java.util.Comparator;
* Objects of this class are Documents created from a relational database.
*/
public class DatabaseDocument extends Document
/** source feature_id */
private String srcFeatureId = "1";
/** database schema */
private String schema = "public";
private InputStreamProgressListener progress_listener;
/** JDBC DAO */
private JdbcDAO jdbcDAO = null;
/** iBatis DAO */
/** entries to split into - each is given a name and the features within the entry */
{
{ {"repeats"} , {"repeat_region", "direct_repeat"} },
{ {"EST"} , {"EST_match", "match_part"} },
{ {"contig+gap"}, {"contig", "gap"}}
};
private boolean gene_builder;
// include children in reading from the database
private boolean readChildren = true;
private boolean lazyFeatureLoad = true;
public static String EXONMODEL = "exon-model";
public static String TRANSCRIPT = "mRNA";
public static boolean CHADO_INFER_CDS = false;
private static Vector<String> cvControledCuratioNames;
// controlled vocabulary
/** controlled_curation controlled vocabulary */
/** controlled vocabulary */
public static String RILEY_TAG_CVNAME = "RILEY";
private static org.apache.log4j.Logger logger4j =
org.apache.log4j.Logger.getLogger(DatabaseDocument.class);
*
* Create a new Document from a database.
*
* @param location
* This should be a URL string giving:
* jdbc:postgresql://host:port/database_name?user=username
public DatabaseDocument(String location, JPasswordField pfield)
if(location.indexOf('=') > -1)
this.schema = location.substring( location.indexOf('=')+ 1);
if(System.getProperty("ibatis") != null ||
System.getProperty("jdbc") == null)
*
* Create a new Document from a database.
*
* @param location
* This should be a URL string giving:
* @param feature_id
* ID of a feature to be extracted.
*
*/
public DatabaseDocument(String location, JPasswordField pfield,
String srcFeatureId, String schema)
this.srcFeatureId = srcFeatureId;
if(System.getProperty("ibatis") != null ||
System.getProperty("jdbc") == null)
*
* Create a new Document from a database.
*
* @param location
* This should be a URL string giving:
* jdbc:postgresql://host:port/database_name?user=username
* @param srcFeatureId
* ID of a feature to be extracted.
* @param splitGFFEntry
* split into separate entries based on feature types.
* @param progress_listener
* input stream progress listener
*
*/
public DatabaseDocument(String location, JPasswordField pfield,
String srcFeatureId, String schema, boolean splitGFFEntry,
InputStreamProgressListener progress_listener)
{
super(location);
this.srcFeatureId = srcFeatureId;
this.progress_listener = progress_listener;
if(System.getProperty("ibatis") != null ||
System.getProperty("jdbc") == null)
/**
* Used by the gene builder to read a database entry
* for a single gene.
* @param location
* @param pfield
* @param srcFeatureId
* @param schema
* @param gene_builder
*/
public DatabaseDocument(String location, JPasswordField pfield,
String srcFeatureId, String schema, boolean gene_builder)
{
super(location);
this.pfield = pfield;
this.srcFeatureId = srcFeatureId;
this.schema = schema;
this.gene_builder = gene_builder;
if(System.getProperty("ibatis") != null ||
System.getProperty("jdbc") == null)
{
iBatis = true;
System.setProperty("chado", location);
}
public DatabaseDocument(String location, JPasswordField pfield,
String srcFeatureId, String schema,
this.srcFeatureId = srcFeatureId;
if(System.getProperty("ibatis") != null ||
System.getProperty("jdbc") == null)
/**
* Use another DatabaseDocument to make a new document.
* @param originalDocument
* @param srcFeatureId
* @param schema
* @param gene_builder
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
public DatabaseDocument (final DatabaseDocument originalDocument,
final String schema, final Feature geneFeature,
final Range range,
final InputStreamProgressListener progress_listener)
{
this((String)originalDocument.getLocation(),
originalDocument.getPfield(),
"-1", schema, false);
this.progress_listener = progress_listener;
this.range = range;
this.geneFeature = geneFeature;
}
/**
* Use another DatabaseDocument to make a new document.
* @param originalDocument
* @param srcFeatureId
* @param schema
* @param gene_builder
* @param region_grab
* @param progress_listener
*/
public DatabaseDocument (final DatabaseDocument originalDocument,
final String srcFeatureId,
final String schema,
final boolean gene_builder,
final InputStreamProgressListener progress_listener)
{
this((String)originalDocument.getLocation(),
originalDocument.getPfield(),
srcFeatureId, schema, gene_builder);
{
// add username & host to MDC data for logging
try
catch(NullPointerException npe)
{
org.apache.log4j.MDC.put("username",System.getProperty("user.name"));
}
try
{
org.apache.log4j.MDC.put("host",
}
catch(Exception e) {}
}
public void setReadChildren(final boolean readChildren)
{
this.readChildren = readChildren;
/**
* Reset the schema.
* @param location
* @param schema
*/
private void reset(String location, String schema)
{
this.schema = schema;
if(!location.endsWith("="+schema))
{
int index = location.lastIndexOf('=');
setLocation(location.substring(0,index+1) + schema);
if(iBatis && connIB != null)
{
try
{
connIB.close();
}
catch(SQLException e)
{
logger4j.warn(e.getMessage());
}
connIB = null;
}
jdbcDAO = null;
System.setProperty("chado", (String)getLocation());
/**
* Reset connection.
*/
public void reset()
{
if(iBatis && connIB != null)
{
try
{
connIB.close();
}
catch(SQLException e)
{
logger4j.warn(e.getMessage());
}
connIB = null;
}
jdbcDAO = null;
}
return new DatabaseDocument( ((String)getLocation()) + name, pfield);
* Return the name of this Document (the last element of the Document
* location).
*/
public String getName()
int ind = ((String) getLocation()).indexOf("?");
String name = ((String) getLocation()).substring(0, ind);
/**
* Set the name of this document.
*/
public void setName(String name)
{
this.name = name;
}
public DatabaseDocument createDatabaseDocument()
return new DatabaseDocument( (String)getLocation(), pfield,
srcFeatureId, schema );
* Return true if and only if the Document refered to by this object exists
* and is readable. Always returns true.
*/
public boolean readable()
* Return true if and only if the Document refered to by this object exists
* and can be written to. Always returns false.
*/
public boolean writable()
* Create a new InputStream object from this Document. The contents of the
* Document can be read from the InputStream.
*
* @exception IOException
* Thrown if the Document can't be read from (for example if it
* doesn't exist).
*/
public InputStream getInputStream() throws IOException
ByteArrayInputStream instream;
if(gff_buff != null)
{
instream = new ByteArrayInputStream(gff_buff.getBytes());
schemaList, dao, readChildren);
else if(range != null)
{
//
// Retrieve all features within a range
final Feature srcFeature;
if(geneFeature != null)
{
Collection<FeatureLoc> featureLocs = geneFeature.getFeatureLocsForFeatureId();
Iterator<FeatureLoc> it = featureLocs.iterator();
final FeatureLoc featureLoc = it.next();
int srcfeatureid = featureLoc.getFeatureBySrcFeatureId().getFeatureId();
srcFeature = dao.getFeatureById(srcfeatureid);
this.srcFeatureId = Integer.toString(srcfeatureid);
}
else
{
srcFeature = dao.getFeatureById(Integer.parseInt(srcFeatureId));
}
final ByteBuffer entryBuffer = getFeaturesInRange(srcFeature, range, dao);
getChadoSequence(srcFeature, entryBuffer);
return new ByteArrayInputStream(entryBuffer.getBytes());
}
ByteBuffer entryBuffer = new ByteBuffer();
DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
Calendar cal = Calendar.getInstance();
entryBuffer.append("#created " + dateFormat.format(cal.getTime()) + "\n");
if(dao instanceof IBatisDAO)
((IBatisDAO) dao).startTransaction();
logger4j.debug("RETRIEVE SOURCE FEATURE FROM: "+getLocation());
entryBuffer.append("##sequence-region " + srcFeature.getUniqueName() +
}
else
{
for(int i = 0; i < gff_buffer.length; i++)
{
if(gff_buffer[i].size() > 0)
if(dao instanceof IBatisDAO)
((IBatisDAO) dao).commitTransaction();
}
finally
{
if(dao instanceof IBatisDAO)
((IBatisDAO) dao).endTransaction();
catch(RuntimeException re)
{
JOptionPane.showMessageDialog(null, "Problems Reading...\n" +
re.getMessage(),
"Problems Reading From the Database ",
JOptionPane.ERROR_MESSAGE);
re.printStackTrace();
}
JOptionPane.showMessageDialog(null, "Problems Reading...\n" +
sqlExp.getMessage(),
"Problems Reading From the Database ",
sqlExp.printStackTrace();
}
return null;
}
*
* Called (by DatabaseEntrySource) to retrieve all the documents for each
* entry created.
*
*/
public DatabaseDocument[] getGffDocuments(String location, String id,
String schema)
{
if(gff_buffer[i].size() > 0)
nentries++;
}
DatabaseDocument[] new_docs = new DatabaseDocument[nentries];
nentries = 0;
new_docs[nentries] = new DatabaseDocument(location, pfield, id, schema,
gff_buffer[i], name);
* Create an array of GFF lines.
* @param dao the data access object
* @param parentFeatureID the parent identifier for the features to
* extract
* @return the <code>ByteBuffer</code> array of GFF lines
private ByteBuffer[] getGff(final GmodDAO dao,
final Feature srcFeature)
featureloc.setFeatureBySrcFeatureId(srcFeature);
// ignore match_part (BLAST HSPs)
CvTerm cvTerm;
try
{
cvTerm = getCvTermByCvAndCvTerm("match_part", "sequence");
}
catch(NullPointerException ne)
{
cvTerm = dao.getCvTermByNameAndCvName("match_part", "sequence");
}
child.setAnalysis(false);
child.setCvTerm(cvTerm);
final List<Feature> featList = dao.getFeaturesByLocatedOnFeature(child);
final Hashtable<String, Feature> id_store = new Hashtable<String, Feature>(feature_size);
for(int i = 0; i < feature_size; i++)
{
id_store.put(Integer.toString(feat.getFeatureId()), feat);
if(lazyFeatureLoad)
idFeatureStore = id_store;
final Hashtable<Integer, List<String>> dbxrefs;
final Hashtable<Integer, List<FeatureSynonym>> synonym;
final Hashtable<Integer, List<FeatureCvTerm>> featureCvTerms;
final Hashtable<Integer, List<FeatureCvTermDbXRef>> featureCvTermDbXRefs;
Hashtable<Integer, List<FeatureCvTermPub>> featureCvTermPubs = null;
final Hashtable<Integer, List<FeaturePub>> featurePubs;
final List<PubDbXRef> pubDbXRefs;
if(lazyFeatureLoad)
{
dbxrefs = null;
synonym = null;
featureCvTerms = null;
featureCvTermDbXRefs = null;
featureCvTermPubs = null;
featurePubs = null;
pubDbXRefs = null;
}
else
{
dbxrefs= IBatisDAO.mergeDbXRef(
featureCvTerms = getFeatureCvTermsByFeature(dao,
featureCvTermDbXRefs = getFeatureCvTermDbXRef(dao,
try
{
featureCvTermPubs = getFeatureCvTermPub(dao,
dao.getFeatureCvTermPubBySrcFeature(srcFeature));
}
catch(Exception e)
{
e.printStackTrace();
if(dao instanceof IBatisDAO)
{
try
{
((IBatisDAO) dao).endTransaction();
((IBatisDAO) dao).startTransaction();
}
catch(SQLException sqle){}
}
}
dao.getFeaturePubsBySrcFeature(srcFeature));
String typeName = getCvtermName(type_id, dao, gene_builder);
for(int k=0; k<TYPES[j][1].length; k++)
if(TYPES[j][1][k].equals(typeName))
dbxrefs, synonym, featureCvTerms,
feat.getFeatureLoc(), this_buff, gene_builder);
if( i%10 == 0 || i == feature_size-1)
progress_listener.progressMade("Read from database: " +
* Get a <code>Hashtable</code> of feature_id keys and their corresponding
* feature_synonym
private Hashtable<Integer, List<FeatureSynonym>> getAllFeatureSynonyms(final List<FeatureSynonym> list)
Hashtable<Integer, List<FeatureSynonym>> synonym = new Hashtable<Integer, List<FeatureSynonym>>();
Integer featureId;
featureId = new Integer(alias.getFeature().getFeatureId());
if(synonym.containsKey(featureId))
synonym.put(featureId, value);
/**
* Get FeaturePub's (i.e. /literature qualifiers).
* @param dao
private Hashtable<Integer, List<FeaturePub>> getFeaturePubs(final GmodDAO dao,
final List<FeaturePub> list)
final Hashtable<Integer, List<FeaturePub>> featurePubs = new Hashtable<Integer, List<FeaturePub>>();
FeaturePub featurePub;
for(int i=0; i<list.size(); i++)
{
featureId = new Integer(featurePub.getFeature().getFeatureId());
if(featurePubs.containsKey(featureId))
value.add(featurePub);
featurePubs.put(featureId, value);
}
return featurePubs;
}
/**
* @param dao
* @param chadoFeature null if we want them all
* @return
*/
private Hashtable<Integer, List<FeatureCvTerm>> getFeatureCvTermsByFeature(
final GmodDAO dao,
final List<FeatureCvTerm> list)
Hashtable<Integer, List<FeatureCvTerm>> featureCvTerms = new Hashtable<Integer, List<FeatureCvTerm>>();
Integer featureId;
FeatureCvTerm feature_cvterm;
for(int i=0; i<list.size(); i++)
{
featureId = new Integer(feature_cvterm.getFeature().getFeatureId());
if(featureCvTerms.containsKey(featureId))
value.add(feature_cvterm);
featureCvTerms.put(featureId, value);
}
return featureCvTerms;
}
/**
*
* @param dao
* @param chadoFeature null if we want all
* @return
*/
private Hashtable<Integer, List<FeatureCvTermDbXRef>> getFeatureCvTermDbXRef(
final GmodDAO dao, final List<FeatureCvTermDbXRef> list)
Hashtable<Integer, List<FeatureCvTermDbXRef>> featureCvTermDbXRefs =
new Hashtable<Integer, List<FeatureCvTermDbXRef>>(list.size());
for(int i=0; i<list.size(); i++)
{
FeatureCvTermDbXRef featureCvTermDbXRef =
featureCvTermDbXRefId = new Integer(
featureCvTermDbXRef.getFeatureCvTerm().getFeatureCvTermId());
if(featureCvTermDbXRefs.containsKey(featureCvTermDbXRefId))
value = featureCvTermDbXRefs.get(featureCvTermDbXRefId);
value.add(featureCvTermDbXRef);
featureCvTermDbXRefs.put(featureCvTermDbXRefId, value);
private Hashtable<Integer, List<FeatureCvTermPub>> getFeatureCvTermPub(
final GmodDAO dao,
final List<FeatureCvTermPub> list)
List<FeatureCvTermPub> value;
Hashtable<Integer, List<FeatureCvTermPub>> featureCvTermPubs =
new Hashtable<Integer, List<FeatureCvTermPub>>(list.size());
for(int i=0; i<list.size(); i++)
{
FeatureCvTermPub featureCvTermPub =
featureCvTermId = new Integer(
featureCvTermPub.getFeatureCvTerm().getFeatureCvTermId());
if(featureCvTermPubs.containsKey(featureCvTermId))
value.add(featureCvTermPub);
featureCvTermPubs.put(featureCvTermId, value);
}
return featureCvTermPubs;
}
/**
* Retrieve the features in a given range
* @param srcFeature
* @param range
* @param dao
* @return
*/
private ByteBuffer getFeaturesInRange(final Feature srcFeature,
final Range range,
final GmodDAO dao)
{
ByteBuffer buff = new ByteBuffer();
List featuresInRange = dao.getFeaturesByRange(range.getStart()-1,
range.getEnd(), 0, srcFeature, null);
List<Integer> featureIds = new Vector<Integer>(featuresInRange.size());
for(int i=0; i<featuresInRange.size(); i++)
{
Feature thisFeature = (Feature)featuresInRange.get(i);
featureIds.add(new Integer(thisFeature.getFeatureId()));
}
FeatureLoc featureLoc = new FeatureLoc();
featureLoc.setFmin(new Integer(range.getStart()));
featureLoc.setFmax(new Integer(range.getEnd()));
srcFeature.setFeatureLoc(featureLoc);
Hashtable<Integer, List<String>> dbxrefs = IBatisDAO.mergeDbXRef(
Hashtable<Integer, List<FeatureSynonym>> synonym = getAllFeatureSynonyms(
Hashtable<Integer, List<FeatureCvTerm>> featureCvTerms = getFeatureCvTermsByFeature(dao,
Hashtable<Integer, List<FeatureCvTermDbXRef>> featureCvTermDbXRefs = getFeatureCvTermDbXRef(dao,
Hashtable<Integer, List<FeatureCvTermPub>> featureCvTermPubs = getFeatureCvTermPub(dao,
Hashtable<Integer, List<FeaturePub>> featurePubs = getFeaturePubs(dao,
Hashtable<String, Feature> id_store = new Hashtable<String, Feature>(featuresInRange.size());
// build feature name store
for(int i = 0; i < featuresInRange.size(); i++)
{
Feature chadoFeature = (Feature)featuresInRange.get(i);
String featureId = Integer.toString(chadoFeature.getFeatureId());