Newer
Older
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
*/
package uk.ac.sanger.artemis.util;
import uk.ac.sanger.artemis.chado.*;
* Objects of this class are Documents created from a relational database.
*
*/
/** database schema */
private String schema = "public";
private InputStreamProgressListener progress_listener;
/** JDBC DAO */
private JdbcDAO jdbcDAO = null;
/** iBatis DAO */
private IBatisDAO connIB = null;
private String[] types = { "exon", "gene", "CDS", "transcript" };
*
* Create a new Document from a database.
*
* @param location
* This should be a URL string giving:
* jdbc:postgresql://host:port/datbase_name?user=username
*
*/
public DatabaseDocument(String location, JPasswordField pfield)
*
* Create a new Document from a database.
*
* @param location
* This should be a URL string giving:
* jdbc:postgresql://host:port/datbase_name?user=username
* @param feature_id
* ID of a feature to be extracted.
*
*/
public DatabaseDocument(String location, JPasswordField pfield,
*
* Create a new Document from a database.
*
* @param location
* This should be a URL string giving:
* jdbc:postgresql://host:port/datbase_name?user=username
* @param feature_id
* ID of a feature to be extracted.
* @param splitGFFEntry
* split into separate entries based on feature types.
* @param progress_listener
* input stream progress listener
*
*/
public DatabaseDocument(String location, JPasswordField pfield,
InputStreamProgressListener progress_listener)
{
super(location);
this.progress_listener = progress_listener;
public DatabaseDocument(String location, JPasswordField pfield,
return new DatabaseDocument( ((String)getLocation()) + name, pfield);
* Return the name of this Document (the last element of the Document
* location).
*/
public String getName()
int ind = ((String) getLocation()).indexOf("?");
String name = ((String) getLocation()).substring(0, ind);
/**
* Set the name of this document.
*/
public void setName(String name)
{
this.name = name;
}
* Return a Document with the last element stripped off.
*/
* Return true if and only if the Document refered to by this object exists
* and is readable. Always returns true.
*/
public boolean readable()
* Return true if and only if the Document refered to by this object exists
* and can be written to. Always returns false.
*/
public boolean writable()
* Create a new InputStream object from this Document. The contents of the
* Document can be read from the InputStream.
*
* @exception IOException
* Thrown if the Document can't be read from (for example if it
* doesn't exist).
*/
public InputStream getInputStream() throws IOException
ByteArrayInputStream instream;
if(gff_buff != null)
{
instream = new ByteArrayInputStream(gff_buff.getBytes());
ChadoDAO dao = getDAO();
gff_buffer = getGff(dao, feature_id);
{
if(gff_buffer[0].size() > 0)
entry.append(gff_buffer[0]);
{
if(gff_buffer[i].size() > 0)
entry.append(gff_buffer[i]);
}
}
instream = new ByteArrayInputStream(entry.getBytes());
return instream;
}
catch(java.sql.SQLException sqlExp)
{
JOptionPane.showMessageDialog(null, "Problems Reading...\n" +
sqlExp.getMessage(),
"Problems Reading From the Database ",
sqlExp.printStackTrace();
}
return null;
}
*
* Called (by DatabaseEntrySource) to retrieve all the documents for each
* entry created.
*
*/
public DatabaseDocument[] getGffDocuments(String location, String id,
String schema)
{
if(gff_buffer[i].size() > 0)
nentries++;
}
DatabaseDocument[] new_docs = new DatabaseDocument[nentries];
nentries = 0;
String name;
if(i >= types.length)
name = "other";
else
name = types[i];
new_docs[nentries] = new DatabaseDocument(location, pfield, id, schema,
gff_buffer[i], name);
* Create an array of GFF lines.
* @param dao the data access object
* @param parentFeatureID the parent identifier for the features to
* extract
* @return the <code>ByteBuffer</code> array of GFF lines
* @throws java.sql.SQLException
private ByteBuffer[] getGff(ChadoDAO dao, String parentFeatureID)
throws java.sql.SQLException
ByteBuffer[] buffers = new ByteBuffer[types.length + 1];
for(int i = 0; i < buffers.length; i++)
String parentFeature = dao.getFeatureName(srcfeature_id, schema);
Hashtable id_store = new Hashtable(feature_size);
// build feature name store
for(int i = 0; i < feature_size; i++)
{
String name = feat.getUniquename();
String feature_id = Integer.toString(feat.getId());
// get all synonyms
Hashtable synonym = dao.getAlias(schema, null);
{
gff_source = null;
int fmin = feat.getFeatureloc().getFmin() + 1;
int fmax = feat.getFeatureloc().getFmax();
long type_id = feat.getCvterm().getId(); //.getType_id();
int strand = feat.getFeatureloc().getStrand();
int phase = feat.getFeatureloc().getPhase();
String name = feat.getUniquename();
String typeName = getCvtermName(type_id);
String timelastmodified = Long.toString(feat.getTimelastmodified().getTime());
String parent_id = null;
if(feat.getFeature_relationship() != null)
parent_id = Integer.toString(feat.getFeature_relationship().getObject_id());
if(parent_id != null && id_store.containsKey(parent_id))
parent_id = (String)id_store.get(parent_id);
{
if(types[j].equals(typeName))
this_buff = buffers[j];
}
Vector dbxref = null;
// append dbxrefs
if(dbxrefs != null &&
dbxrefs.containsKey(new Integer(feature_id)))
{
dbxref = (Vector)dbxrefs.get(new Integer(feature_id));
for(int j=0; j<dbxref.size(); j++)
{
if(((String)dbxref.get(j)).startsWith("GFF_source:"))
{
gff_source = ((String)dbxref.get(j)).substring(11);
dbxref.removeElementAt(j);
}
}
}
if(gff_source != null)
this_buff.append(gff_source+"\t"); // source
else
this_buff.append("chado\t");
this_buff.append(typeName + "\t"); // type
this_buff.append(fmin + "\t"); // start
this_buff.append(fmax + "\t"); // end
this_buff.append(".\t"); // score
this_buff.append("-\t");
else if(strand == 1)
this_buff.append("+\t");
else
this_buff.append(".\t");
if(phase > 3)
this_buff.append(".\t"); // phase
else
this_buff.append(phase+"\t");
this_buff.append("timelastmodified=" + timelastmodified + ";");
// attributes
Hashtable qualifiers = feat.getQualifiers();
if(qualifiers != null)
Enumeration e_qualifiers = qualifiers.keys();
while(e_qualifiers.hasMoreElements())
{
Long qualifier_type_id = (Long)e_qualifiers.nextElement();
String qualifier_name = getCvtermName(qualifier_type_id.longValue());
if(qualifier_name == null)
continue;
Vector qualifier_value = (Vector)qualifiers.get(qualifier_type_id);
for(int j=0; j<qualifier_value.size(); j++)
{
ChadoFeatureProp featprop = (ChadoFeatureProp)qualifier_value.get(j);
if(dbxref != null && dbxref.size() > 0)
{
this_buff.append("Dbxref=");
for(int j=0; j<dbxref.size(); j++)
{
this_buff.append((String)dbxref.get(j));
if(j<dbxref.size()-1)
this_buff.append(",");
}
}
if(synonym != null &&
synonym.containsKey(new Integer(feature_id)))
{
this_buff.append(";");
Alias alias;
Vector v_synonyms = (Vector)synonym.get(new Integer(feature_id));
for(int j=0; j<v_synonyms.size(); j++)
{
alias = (Alias)v_synonyms.get(j);
this_buff.append(alias.getCvterm_name()+"=");
this_buff.append(alias.getName());
progress_listener.progressMade("Read from database: " + name);
/**
* Look up the cvterm_id for a controlled vocabulary name.
* @param name
* @return
*/
{
Enumeration enum_cvterm = cvterm.keys();
while(enum_cvterm.hasMoreElements())
{
Long key = (Long)enum_cvterm.nextElement();
if(name.equals(cvterm.get(key)))
* Look up a cvterm name from the collection of cvterms.
* @param id a cvterm_id
* @return the cvterm name
try
{
getCvterm(getDAO());
}
catch(ConnectException ce)
{
ce.printStackTrace();
}
catch(SQLException sqle)
{
JOptionPane.showMessageDialog(null,
"Problems Looking Up cvterm Name (cvterm_id="+
Long.toString(id)+") ...\n" +
sqle.getMessage(),
"Cvterm Name Look Up",
JOptionPane.ERROR_MESSAGE);
* @param dao the data access object
* @return the cvterm <code>Hashtable</code>
Iterator it = cvtem_list.iterator();
while(it.hasNext())
{
Cvterm cv = (Cvterm)it.next();
cvterm.put(new Long(cv.getId()), cv.getName());
}
}
{
System.err.println(this.getClass() + ": SQLException retrieving CvTerms");
System.err.println(sqle);
}
return cvterm;
}
/**
* Get the sequence for a feature.
* @param dao the data access object
* @param buff the buffer to add the sequence to
* @return the resulting buffer
* @throws java.sql.SQLException
*/
private ByteBuffer getSequence(ChadoDAO dao, ByteBuffer buff)
throws java.sql.SQLException
ChadoFeature feature = dao.getSequence(Integer.parseInt(feature_id),
/**
* Get the <code>List</code> of available schemas.
* @return the <code>List</code> of available schemas
*/
public List getSchema()
{
return schema_list;
}
* Create a hashtable of the available entries with residues.
* @return a <code>Hashtable</code> of the <code>String</code>
* representation (schema-type-feature_name) and the
* corresponding feature_id
* @throws ConnectException
* @throws java.sql.SQLException
*/
ChadoDAO dao = null;
try
{
dao = getDAO();
}
catch(ConnectException exp)
{
JOptionPane.showMessageDialog(null, "Connection Problems...\n"+
exp.getMessage(),
"Connection Error",
JOptionPane.ERROR_MESSAGE);
throw exp;
}
catch(java.sql.SQLException sqlExp)
{
JOptionPane.showMessageDialog(null, "SQL Problems...\n"+
sqlExp.getMessage(),
"SQL Error",
JOptionPane.ERROR_MESSAGE);
throw sqlExp;
}
if(list.size() == 0) // no residues for this organism
continue;
List list_residue_features = dao.getResidueFeatures(list, schema);
Iterator it_residue_features = list_residue_features.iterator();
while(it_residue_features.hasNext())
{
ChadoFeature feature = (ChadoFeature)it_residue_features.next();
String typeName = getCvtermName(feature.getCvterm().getId());
db.put(schema + " - " + typeName + " - " + feature.getName(),
JOptionPane.showMessageDialog(null, "SQL Problems...\n"+
sqlExp.getMessage(),
"SQL Error",
/**
* Get the data access object (DAO).
* @return data access object
*/
private ChadoDAO getDAO()
throws java.net.ConnectException, SQLException
{
if(!iBatis)
{
if(jdbcDAO == null)
jdbcDAO = new JdbcDAO((String)getLocation(), pfield);
return jdbcDAO;
}
else
{
if(connIB == null)
connIB = new IBatisDAO(pfield);
return connIB;
}
}
* Create a new OutputStream object from this Document. The contents of the
* Document can be written from the stream.
*
* @exception IOException
* Thrown if the Document can't be written.
*/
public OutputStream getOutputStream() throws IOException
{
final File write_file = new File(System.getProperty("user.dir")+
System.getProperty("file.separator")+
getName());
final FileOutputStream file_output_stream =
new FileOutputStream(write_file);
if(write_file.getName().endsWith(".gz"))
{
// assume this file should be gzipped
return new java.util.zip.GZIPOutputStream (file_output_stream);
}
else
return file_output_stream;
/**
* Commit the <code>ChadoTransaction</code> SQL back to the
* database.
* @param sql the collection of <code>ChadoTransaction</code> objects
* @return
*/
public int commit(Vector sql)
boolean unchanged;
//
// check feature timestamps have not changed
Vector names_checked = new Vector();
if(tsn.getType() != ChadoTransaction.INSERT_FEATURE ||
tsn.getType() != ChadoTransaction.DELETE_FEATURE)
{
if(names_checked.contains((String)uniquename.get(j)))
continue;
names_checked.add((String)uniquename.get(j));
unchanged = checkFeatureTimestamp(schema,
(String)uniquename.get(j),
tsn.getLastModified(), dao);
if(!unchanged)
return 0;
}
}
//
// commit to database
for(i = 0; i < sql.size(); i++)
{
ChadoTransaction tsn = (ChadoTransaction) sql.get(i);
if(tsn.getType() == ChadoTransaction.UPDATE)
dao.updateAttributes(schema, tsn);
else if(tsn.getType() == ChadoTransaction.INSERT)
dao.insertAttributes(schema, tsn);
else if(tsn.getType() == ChadoTransaction.DELETE)
dao.deleteAttributes(schema, tsn);
else if(tsn.getType() == ChadoTransaction.INSERT_FEATURE)
dao.insertFeature(schema, tsn, feature_id);
else if(tsn.getType() == ChadoTransaction.DELETE_FEATURE)
dao.deleteFeature(schema, tsn);
else if(tsn.getType() == ChadoTransaction.DELETE_DBXREF)
dao.deleteFeatureDbxref(schema, tsn);
else if(tsn.getType() == ChadoTransaction.INSERT_DBXREF)
dao.insertFeatureDbxref(schema, tsn);
else if(tsn.getType() == ChadoTransaction.DELETE_ALIAS)
dao.deleteFeatureAlias(schema, tsn);
else if(tsn.getType() == ChadoTransaction.INSERT_ALIAS)
dao.insertFeatureAlias(schema, tsn);
//
// update timelastmodified timestamp
Timestamp ts = null;
Timestamp ts2;
names_checked = new Vector();
for(int j = 0; j < sql.size(); j++)
{
ChadoTransaction tsn = (ChadoTransaction) sql.get(j);
if(tsn.getType() != ChadoTransaction.INSERT_FEATURE ||
tsn.getType() != ChadoTransaction.DELETE_FEATURE)
{
final List uniquename = tsn.getUniquename();
// update timelastmodified timestamp
for(int k=0; k<uniquename.size(); k++)
{
if(names_checked.contains((String)uniquename.get(k)))
continue;
names_checked.add((String)uniquename.get(k));
dao.writeTimeLastModified(schema, (String)uniquename.get(k), ts);
ts2 = dao.getTimeLastModified(schema, (String)uniquename.get(k));
if(ts2 == null)
if(ts == null)
ts = ts2;
GFFStreamFeature gff_feature = (GFFStreamFeature)tsn.getFeatureObject();
gff_feature.setLastModified(ts);
JOptionPane.showMessageDialog(null, "Problems Writing...\n" +
JOptionPane.showMessageDialog(null, "Problems connecting..."+
conn_ex.getMessage(),
"Database Connection Error - Check Server",
JOptionPane.ERROR_MESSAGE);
/**
* Check the <code>Timestamp</code> on a feature (for versioning).
* @param schema the schema
* @param uniquename the feature uniquename
* @param timestamp the last read feature timestamp
* @throws SQLException
*/
public boolean checkFeatureTimestamp(final String schema,
final String uniquename,
final Timestamp timestamp,
final ChadoDAO dao)
throws SQLException
{
Timestamp now = dao.getTimeLastModified(schema, uniquename);
{
now.setNanos(0);
timestamp.setNanos(0);
if(now.compareTo(timestamp) != 0)
{
SimpleDateFormat date_format =
new SimpleDateFormat("dd.MM.yyyy hh:mm:ss z");
//System.out.println(date_format.format(now)+" "+
// date_format.format(timestamp));
int select = JOptionPane.showConfirmDialog(null, uniquename +
date_format.format(now)+"\nOverwite?",
"Feature Changed",
JOptionPane.OK_CANCEL_OPTION);
if(select == JOptionPane.OK_OPTION)
return true;
else
return false;
public static void main(String args[])
{
try
{
DbSqlConfig.init(new JPasswordField());
SqlMapClient sqlMap = DbSqlConfig.getSqlMapInstance();
List featureList = sqlMap.queryForList("getFeature", feature);
int fmin = feature.getFeatureloc().getFmin() + 1;
int fmax = feature.getFeatureloc().getFmax();
//System.out.print(" "+feature.getProp_cvterm().getId());
System.out.print(" "+feature.getFeatureloc().getStrand());
System.out.println(" "+Integer.toString(feature.getId()));
}
}
catch(SQLException sqle)
{
sqle.printStackTrace();
}
}