Newer
Older
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
*/
package uk.ac.sanger.artemis.util;
import com.ibatis.sqlmap.client.SqlMapClient;
import uk.ac.sanger.ibatis.*;
import java.sql.*;
import java.io.*;
import java.util.Hashtable;
import javax.swing.JOptionPane;
/**
* Objects of this class are Documents created from a relational database.
*
**/
public class DatabaseDocument extends Document
{
private String name = null;
private String feature_id = "1";
private InputStreamProgressListener progress_listener;
private Hashtable db;
private Vector organism;
System.getProperty("file.separator")+"art_sql_debug.log";
private ByteBuffer[] gff_buffer;
private ByteBuffer gff_buff;
private String[] types = { "exon", "gene", "CDS", "transcript" };
private boolean splitGFFEntry;
/**
*
* Create a new Document from a database.
* @param location This should be a URL string giving:
* jdbc:postgresql://host:port/datbase_name?user=username
*
**/
public DatabaseDocument(String location)
{
super(location);
}
/**
*
* Create a new Document from a database.
* @param location This should be a URL string giving:
* jdbc:postgresql://host:port/datbase_name?user=username
* @param feature_id ID of a feature to be extracted.
*
**/
public DatabaseDocument(String location, String feature_id)
{
super(location);
this.feature_id = feature_id;
/**
*
* Create a new Document from a database.
* @param location This should be a URL string giving:
* jdbc:postgresql://host:port/datbase_name?user=username
* @param feature_id ID of a feature to be extracted.
* @param splitGFFEntry split into separate entries based on feature
* types.
* @param progress_listener input stream progress listener
*
**/
public DatabaseDocument(String location, String feature_id,
InputStreamProgressListener progress_listener)
{
super(location);
this.feature_id = feature_id;
this.progress_listener = progress_listener;
public DatabaseDocument(String location, String feature_id,
{
super(location);
this.feature_id = feature_id;
this.gff_buff = gff_buff;
this.name = name;
/**
*
* Append a String to the Document location.
* @param name The name to append.
*
**/
public Document append(String name) throws IOException
{
return new DatabaseDocument(((String)getLocation()) + name);
}
/**
*
* Return the name of this Document (the last element of the Document
* location).
*
**/
public String getName()
{
if(name == null)
{
int ind = ((String)getLocation()).indexOf("?");
String name = ((String)getLocation()).substring(0,ind);
ind = name.lastIndexOf("/");
/**
*
* Return a Document with the last element stripped off.
*
**/
public Document getParent()
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
/**
*
* Return true if and only if the Document refered to by this object exists
* and is readable. Always returns true.
*
**/
public boolean readable ()
{
return true;
}
/**
*
* Return true if and only if the Document refered to by this object exists
* and can be written to. Always returns false.
*
**/
public boolean writable()
{
return true;
}
/**
* Create a new InputStream object from this Document. The contents of the
* Document can be read from the InputStream.
* @exception IOException Thrown if the Document can't be read from
* (for example if it doesn't exist).
**/
public InputStream getInputStream() throws IOException
{
ByteArrayInputStream instream;
if(gff_buff != null)
{
instream = new ByteArrayInputStream(gff_buff.getBytes());
if(!iBatis)
conn = getConnection();
if(iBatis)
gff_buffer = getGFFiBatis(feature_id);
else
gff_buffer = getGFFJdbc(conn,feature_id);
{
if(gff_buffer[0].size() > 0)
entry.append(gff_buffer[0]);
if(iBatis)
getSequenceIbatis(entry);
else
getSequence(conn, entry);
else
{
for(int i=0; i<gff_buffer.length; i++)
{
if(gff_buffer[i].size() > 0)
entry.append(gff_buffer[i]);
}
if(iBatis)
getSequenceIbatis(entry);
else
getSequence(conn, entry);
if(System.getProperty("debug") != null)
appendToLogFile(new String(entry.getBytes()),sqlLog);
instream = new ByteArrayInputStream(entry.getBytes());
return instream;
}
catch(java.sql.SQLException sqlExp)
{
System.out.println("Problems connecting...");
sqlExp.printStackTrace();
}
return null;
}
/**
*
* Called (by DatabaseEntrySource) to retrieve all the documents for each
* entry created.
*
*/
public DatabaseDocument[] getGffDocuments(String location, String id)
{
int nentries = 0;
for(int i=1; i<gff_buffer.length; i++)
{
if(gff_buffer[i].size() > 0)
nentries++;
}
DatabaseDocument[] new_docs = new DatabaseDocument[nentries];
nentries = 0;
String name;
if(i >= types.length)
name = "other";
else
name = types[i];
new_docs[nentries] = new DatabaseDocument(location, id, gff_buffer[i], name);
nentries++;
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
/**
*
* Return a feature name given the feature_id.
*
**/
private String getFeatureNameIbatis(String feature_id)
throws java.sql.SQLException
{
SqlMapClient sqlMap = DbSqlConfig.getSqlMapInstance();
return (String)sqlMap.queryForObject("getFeatureName",
feature_id);
}
/**
*
* Return a feature name given the feature_id.
*
**/
private String getFeatureNameJdbc(String feature_id, Connection conn)
throws java.sql.SQLException
{
Statement st = conn.createStatement();
String sql = "SELECT name FROM feature WHERE feature_id= "+feature_id;
appendToLogFile(sql,sqlLog);
ResultSet rs = st.executeQuery(sql);
rs.next();
return rs.getString("name");
}
private ByteBuffer[] getGFFiBatis(String parentFeatureID)
throws java.sql.SQLException
{
SqlMapClient sqlMap = DbSqlConfig.getSqlMapInstance();
List featList = sqlMap.queryForList("getGffLine",
new Integer(feature_id));
ByteBuffer[] buffers = new ByteBuffer[types.length+1];
for(int i=0; i<buffers.length; i++)
buffers[i] = new ByteBuffer();
String parentFeature = getFeatureNameIbatis(parentFeatureID);
Hashtable hstore = new Hashtable();
ByteBuffer this_buff;
int feature_size = featList.size();
for(int i=0; i<feature_size; i++)
{
Feature feat = (Feature)featList.get(i);
int fmin = feat.getFmin()+1;
int fmax = feat.getFmax();
long type_id = feat.getType_id();
long prop_type_id = feat.getProp_type_id();
int strand = feat.getStrand();
String name = feat.getUniquename();
String typeName = getCvtermName(null,type_id);
String propTypeName = getCvtermName(null,prop_type_id);
String timelastmodified = feat.getTimelastmodified().toString();
String feature_id = Integer.toString(feat.getId());
hstore.put(feature_id, name);
String parent_id = feat.getObject_id();
if(parent_id != null && hstore.containsKey(parent_id))
parent_id = (String)hstore.get(parent_id);
// make gff format
// select buffer
this_buff = buffers[types.length];
for(int j=0; j<types.length; j++)
{
if(types[j].equals(typeName))
this_buff = buffers[j];
}
this_buff.append(parentFeature+"\t"); // seqid
this_buff.append("chado\t"); // source
this_buff.append(typeName+"\t"); // type
this_buff.append(fmin+"\t"); // start
this_buff.append(fmax+"\t"); // end
this_buff.append(".\t"); // score
if(strand == -1) // strand
this_buff.append("-\t");
else if(strand == 1)
this_buff.append("+\t");
else
this_buff.append(".\t");
this_buff.append(".\t"); // phase
this_buff.append("ID="+name+";");
if(parent_id != null)
this_buff.append("Parent="+parent_id+";");
this_buff.append("timelastmodified="+timelastmodified+";");
String value = "";
if(feat.getValue() != null)
value = GFFStreamFeature.encode(feat.getValue());
this_buff.append(propTypeName+"="+value); // attributes
// is the next line part of the same feature, if so merge
boolean rewind = false;
Feature featNext = null;
if(i < feature_size-1)
featNext = (Feature)featList.get(i+1);
// merge next line if part of the same feature
while(featNext != null &&
featNext.getUniquename().equals(name))
{
value = GFFStreamFeature.encode(featNext.getValue());
this_buff.append(";"+propTypeName+"="+value);
i++;
if(i < feature_size-1)
featNext = (Feature)featList.get(i+1);
else
break;
}
this_buff.append("\n");
progress_listener.progressMade("Read from database: "+name);
}
return buffers;
}
/**
*
* Given a parent (chromosome, contig, supercontig) retrieve the features
* in the form of a GFF stream.
*
*/
private ByteBuffer[] getGFFJdbc(Connection conn, String parentFeatureID)
Statement st = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
String sql =
"SELECT timelastmodified, feature.feature_id, object_id, strand, fmin, fmax, uniquename,"+
" feature.type_id, featureprop.type_id AS prop_type_id, featureprop.value"+
" FROM featureloc, feature"+
" LEFT JOIN feature_relationship ON feature_relationship.subject_id=feature.feature_id"+
" LEFT JOIN featureprop ON featureprop.feature_id=feature.feature_id"+
" WHERE srcfeature_id = "+parentFeatureID+" and featureloc.feature_id=feature.feature_id"+
" and (featureloc.rank=feature_relationship.rank OR feature_relationship.rank IS NULL)"+
" ORDER BY feature.type_id, uniquename";
Hashtable hstore = new Hashtable();
int fmin = rs.getInt("fmin")+1;
long type_id = rs.getLong("type_id");
long prop_type_id = rs.getLong("prop_type_id");
int strand = rs.getInt("strand");
String name = rs.getString("uniquename");
String typeName = getCvtermName(conn,type_id);
String propTypeName = getCvtermName(conn,prop_type_id);
String timelastmodified = rs.getString("timelastmodified");
String feature_id = rs.getString("feature_id");
hstore.put(feature_id, name);
String parent_id = rs.getString("object_id");
if(parent_id != null && hstore.containsKey(parent_id))
parent_id = (String)hstore.get(parent_id);
// select buffer
this_buff = buffers[types.length];
for(int i=0; i<types.length; i++)
{
if(types[i].equals(typeName))
this_buff = buffers[i];
}
this_buff.append(parentFeature+"\t"); // seqid
this_buff.append("chado\t"); // source
this_buff.append(typeName+"\t"); // type
this_buff.append(fmin+"\t"); // start
this_buff.append(fmax+"\t"); // end
this_buff.append(".\t"); // score
this_buff.append(".\t"); // phase
this_buff.append("ID="+name+";");
this_buff.append("timelastmodified="+timelastmodified+";");
String value = "";
if(rs.getString("value") != null)
value = GFFStreamFeature.encode(rs.getString("value"));
this_buff.append(propTypeName+"="+value); // attributes
boolean rewind = false;
while( (rewind = rs.next()) && rs.getString("uniquename").equals(name))
{
prop_type_id = rs.getLong("prop_type_id");
propTypeName = getCvtermName(conn,prop_type_id);
progress_listener.progressMade("Read from database: "+name);
{
Enumeration enum_cvterm = cvterm.keys();
while(enum_cvterm.hasMoreElements())
{
Long key = (Long)enum_cvterm.nextElement();
if(name.equals(cvterm.get(key)))
private String getCvtermName(Connection conn, long id)
{
if(cvterm == null)
{
if(iBatis)
getCvtermIbatis(null);
else
getCvterm(conn,null);
}
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
/**
*
* Look up cvterms names and id and return in a hashtable.
*
*/
private Hashtable getCvtermIbatis(String cv_name)
{
cvterm = new Hashtable();
try
{
SqlMapClient sqlMap = DbSqlConfig.getSqlMapInstance();
List cvtem_list = sqlMap.queryForList("getCvterm", null);
Iterator it = cvtem_list.iterator();
while(it.hasNext())
{
Cvterm cv = (Cvterm)it.next();
cvterm.put(new Long(cv.getId()), cv.getName());
}
}
catch (SQLException sqle)
{
System.err.println(this.getClass() + ": SQLException retrieving CvTerms");
System.err.println(sqle);
}
return cvterm;
}
/**
*
* Look up cvterms names and id and return in a hashtable.
*
*/
private Hashtable getCvterm(Connection conn, String cv_name)
{
String sql = "SELECT cvterm.cvterm_id, cvterm.name " +
"FROM cvterm, cv WHERE cv.cv_id = cvterm.cv_id";
if(cv_name != null)
sql = sql + " AND cv.name='"+cv_name+"'";
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
cvterm = new Hashtable();
try
{
Statement s = conn.createStatement();
ResultSet rs = s.executeQuery(sql);
while(rs.next())
{
long id = rs.getLong("cvterm_id");
String name = rs.getString("name");
if(cvterm.get(name) != null)
System.err.println(this.getClass() + ": WARNING - read multiple CvTerms with name = '"
+ name + "'");
cvterm.put(new Long(id),name);
}
}
catch (SQLException sqle)
{
System.err.println(this.getClass() + ": SQLException retrieving CvTerms");
System.err.println(sqle);
}
return cvterm;
}
public ByteBuffer getSequenceIbatis(ByteBuffer buff) throws java.sql.SQLException
{
SqlMapClient sqlMap = DbSqlConfig.getSqlMapInstance();
Feature feat = (Feature)sqlMap.queryForObject("getSequence", new Integer(feature_id));
buff.append("##FASTA\n>");
buff.append(feat.getName());
buff.append("\n");
buff.append(feat.getResidues());
return buff;
}
public ByteBuffer getSequence(Connection conn, ByteBuffer buff) throws java.sql.SQLException
{
Statement st = conn.createStatement();
String sql = "SELECT name, residues from feature where feature_id = '"+
feature_id+"'";
buff.append("##FASTA\n>");
buff.append(rs.getBytes("name"));
buff.append("\n");
buff.append(rs.getBytes("residues"));
return buff;
// return "##FASTA\n>" + name + "\n" + rs.getString("residues");
public Hashtable getDatabaseEntries()
{
if(iBatis)
return getDatabaseEntriesIbatis();
else
return getDatabaseEntriesJdbc();
}
/**
*
* Create a hashtable of the available entries.
*
**/
db = new Hashtable();
organism = new Vector();
try
{
Connection conn = getConnection();
System.out.println("Connected");
Statement st = conn.createStatement();
String sql = "SELECT DISTINCT type_id FROM feature WHERE residues notnull";
Vector cvterm_id = new Vector();
while(rs.next())
cvterm_id.add(rs.getString("type_id"));
sql = new String("SELECT abbreviation, name, feature_id, type_id FROM organism, feature WHERE (");
for(int i=0; i<cvterm_id.size(); i++)
{
sql = sql + " type_id = "+ (String)cvterm_id.get(i);
if(i<cvterm_id.size()-1)
sql = sql + " OR ";
}
sql = sql + ") and organism.organism_id=feature.organism_id "+
"and residues notnull "+
String org = rs.getString("abbreviation");
String typeName = getCvtermName(conn,rs.getLong("type_id"));
db.put(org+" - "+typeName+" - "+rs.getString("name"),
rs.getString("feature_id"));
if(!organism.contains(org))
organism.add(org);
}
JOptionPane.showMessageDialog(null, "SQL Problems...",
"SQL Error",
JOptionPane.ERROR_MESSAGE);
sqlExp.printStackTrace();
}
catch(java.net.ConnectException conn)
{
JOptionPane.showMessageDialog(null, "Problems connecting...",
"Database Connection Error - Check Server",
JOptionPane.ERROR_MESSAGE);
conn.printStackTrace();
}
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
/**
*
* Create a hashtable of the available entries.
*
**/
private Hashtable getDatabaseEntriesIbatis()
{
db = new Hashtable();
organism = new Vector();
try
{
SqlMapClient sqlMap = DbSqlConfig.getSqlMapInstance();
List list = sqlMap.queryForList("getResidueType", null);
List list_residue_features = sqlMap.queryForList("getResidueFeatures", list);
Iterator it = list_residue_features.iterator();
while(it.hasNext())
{
Feature feat = (Feature)it.next();
String org = feat.getAbbreviation();
String typeName = getCvtermName(null,feat.getType_id());
db.put(org+" - "+typeName+" - "+feat.getName(),
Integer.toString(feat.getId()));
if(!organism.contains(org))
organism.add(org);
}
}
catch(java.sql.SQLException sqlExp)
{
JOptionPane.showMessageDialog(null, "SQL Problems...",
"SQL Error",
JOptionPane.ERROR_MESSAGE);
sqlExp.printStackTrace();
}
return db;
}
public Vector getOrganism()
{
return organism;
}
public Connection getConnection() throws java.sql.SQLException,
java.net.ConnectException
{
return DriverManager.getConnection((String)getLocation());
}
/**
* Create a new OutputStream object from this Document. The contents of the
* Document can be written from the stream.
* @exception IOException Thrown if the Document can't be written.
**/
public OutputStream getOutputStream() throws IOException
{
System.out.println("DatabaseDocument - ReadOnlyException");
throw new ReadOnlyException ("this Database Document can not be written to");
}
/**
*
* Appends a log entry to the log file
* @param logEntry entry to add to log file
* @param logFileName log file name
*
*/
private void appendToLogFile(String logEntry, String logFileName)
{
BufferedWriter bw = null;
try
{
String dat = new java.util.Date().toString();
bw = new BufferedWriter(new FileWriter(logFileName, true));
bw.write(dat+":: "+logEntry);
bw.newLine();
bw.flush();
}
catch (Exception ioe)
{
System.out.println("Error writing to log file "+logFileName);
ioe.printStackTrace();
}
finally // always close the file
{
if(bw != null)
try
{
bw.close();
}
catch (IOException ioe2) {}
}
}
public void commit(Vector sql)
{
try
{
Connection conn = getConnection();
int row = 0;
for(int i=0; i<sql.size(); i++)
{
ChadoTransaction tsn = (ChadoTransaction)sql.get(i);
String[] sql_array = tsn.getSqlQuery();
for(int j=0; j<sql_array.length; j++)
{
System.out.println(sql_array[j]);
Statement st = conn.createStatement();
row += st.executeUpdate(sql_array[j]);
}
}
conn.close();
}
catch(java.sql.SQLException sqlExp)
{
sqlExp.printStackTrace();
}
catch(java.net.ConnectException conn)
{
JOptionPane.showMessageDialog(null, "Problems connecting...",
"Database Connection Error - Check Server",
JOptionPane.ERROR_MESSAGE);
conn.printStackTrace();
}
}