Newer
Older
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
*/
package uk.ac.sanger.artemis.util;
import uk.ac.sanger.artemis.chado.*;
* Objects of this class are Documents created from a relational database.
*
*/
/** database schema */
private String schema = "public";
private InputStreamProgressListener progress_listener;
/** JDBC DAO */
private JdbcDAO jdbcDAO = null;
/** iBatis DAO */
private IBatisDAO connIB = null;
private String[] types = { "exon", "gene", "CDS", "transcript" };
*
* Create a new Document from a database.
*
* @param location
* This should be a URL string giving:
* jdbc:postgresql://host:port/datbase_name?user=username
*
*/
public DatabaseDocument(String location, JPasswordField pfield)
*
* Create a new Document from a database.
*
* @param location
* This should be a URL string giving:
* jdbc:postgresql://host:port/datbase_name?user=username
* @param feature_id
* ID of a feature to be extracted.
*
*/
public DatabaseDocument(String location, JPasswordField pfield,
*
* Create a new Document from a database.
*
* @param location
* This should be a URL string giving:
* jdbc:postgresql://host:port/datbase_name?user=username
* @param feature_id
* ID of a feature to be extracted.
* @param splitGFFEntry
* split into separate entries based on feature types.
* @param progress_listener
* input stream progress listener
*
*/
public DatabaseDocument(String location, JPasswordField pfield,
InputStreamProgressListener progress_listener)
{
super(location);
this.progress_listener = progress_listener;
public DatabaseDocument(String location, JPasswordField pfield,
*
* Append a String to the Document location.
*
* @param name
* The name to append.
*
*/
public Document append(String name) throws IOException
return new DatabaseDocument( ((String)getLocation()) + name, pfield);
*
* Return the name of this Document (the last element of the Document
* location).
*
*/
public String getName()
int ind = ((String) getLocation()).indexOf("?");
String name = ((String) getLocation()).substring(0, ind);
/**
*
* Set the name of this document.
*
*/
public void setName(String name)
{
this.name = name;
}
*
* Return a Document with the last element stripped off.
*
*/
*
* Return true if and only if the Document refered to by this object exists
* and is readable. Always returns true.
*
*/
public boolean readable()
*
* Return true if and only if the Document refered to by this object exists
* and can be written to. Always returns false.
*
*/
public boolean writable()
* Create a new InputStream object from this Document. The contents of the
* Document can be read from the InputStream.
*
* @exception IOException
* Thrown if the Document can't be read from (for example if it
* doesn't exist).
*/
public InputStream getInputStream() throws IOException
ByteArrayInputStream instream;
if(gff_buff != null)
{
instream = new ByteArrayInputStream(gff_buff.getBytes());
ChadoDAO dao = getDAO();
gff_buffer = getGff(dao, feature_id);
{
if(gff_buffer[0].size() > 0)
entry.append(gff_buffer[0]);
{
if(gff_buffer[i].size() > 0)
entry.append(gff_buffer[i]);
}
}
instream = new ByteArrayInputStream(entry.getBytes());
return instream;
}
catch(java.sql.SQLException sqlExp)
{
System.out.println("Problems connecting...");
sqlExp.printStackTrace();
}
return null;
}
*
* Called (by DatabaseEntrySource) to retrieve all the documents for each
* entry created.
*
*/
public DatabaseDocument[] getGffDocuments(String location, String id,
String schema)
{
if(gff_buffer[i].size() > 0)
nentries++;
}
DatabaseDocument[] new_docs = new DatabaseDocument[nentries];
nentries = 0;
String name;
if(i >= types.length)
name = "other";
else
name = types[i];
new_docs[nentries] = new DatabaseDocument(location, pfield, id, schema,
gff_buffer[i], name);
private ByteBuffer[] getGff(ChadoDAO dao, String parentFeatureID)
throws java.sql.SQLException
ByteBuffer[] buffers = new ByteBuffer[types.length + 1];
for(int i = 0; i < buffers.length; i++)
String parentFeature = dao.getFeatureName(srcfeature_id, schema);
Hashtable id_store = new Hashtable(feature_size);
// build feature name store
for(int i = 0; i < feature_size; i++)
{
String name = feat.getUniquename();
String feature_id = Integer.toString(feat.getId());
int fmin = feat.getFmin() + 1;
int fmax = feat.getFmax();
long type_id = feat.getType_id();
long prop_type_id = feat.getProp_type_id();
int strand = feat.getStrand();
String typeName = getCvtermName(type_id);
String propTypeName = getCvtermName(prop_type_id);
if(parent_id != null && id_store.containsKey(parent_id))
parent_id = (String)id_store.get(parent_id);
{
if(types[j].equals(typeName))
this_buff = buffers[j];
}
this_buff.append(parentFeature + "\t"); // seqid
this_buff.append("chado\t"); // source
this_buff.append(typeName + "\t"); // type
this_buff.append(fmin + "\t"); // start
this_buff.append(fmax + "\t"); // end
this_buff.append(".\t"); // score
if(strand == -1) // strand
this_buff.append("-\t");
else if(strand == 1)
this_buff.append("+\t");
else
this_buff.append(".\t");
if(phase > 3)
this_buff.append(".\t"); // phase
else
this_buff.append(phase+"\t");
this_buff.append("timelastmodified=" + timelastmodified + ";");
String value = "";
if(feat.getValue() != null)
value = GFFStreamFeature.encode(feat.getValue());
// attributes
Hashtable qualifiers = feat.getQualifiers();
if(qualifiers != null)
Enumeration e_qualifiers = qualifiers.keys();
while(e_qualifiers.hasMoreElements())
{
Long qualifier_type_id = (Long)e_qualifiers.nextElement();
String qualifier_name = getCvtermName(qualifier_type_id.longValue());
if(qualifier_name == null)
continue;
Vector qualifier_value = (Vector)qualifiers.get(qualifier_type_id);
for(int j=0; j<qualifier_value.size(); j++)
{
this_buff.append(qualifier_name+ "=" +
GFFStreamFeature.encode((String)qualifier_value.get(j))+";");
}
}
}
progress_listener.progressMade("Read from database: " + name);
{
Enumeration enum_cvterm = cvterm.keys();
while(enum_cvterm.hasMoreElements())
{
Long key = (Long)enum_cvterm.nextElement();
if(name.equals(cvterm.get(key)))
/**
*
* Lookup a cvterm name from the collection of cvterms.
*
*/
private String getCvtermName(long id)
try
{
getCvterm(getDAO());
}
catch(ConnectException ce)
{
ce.printStackTrace();
}
catch(SQLException sqle)
{
sqle.printStackTrace();
}
Iterator it = cvtem_list.iterator();
while(it.hasNext())
{
Cvterm cv = (Cvterm)it.next();
cvterm.put(new Long(cv.getId()), cv.getName());
}
}
{
System.err.println(this.getClass() + ": SQLException retrieving CvTerms");
System.err.println(sqle);
}
return cvterm;
}
private ByteBuffer getSequence(ChadoDAO dao, ByteBuffer buff)
throws java.sql.SQLException
ChadoFeature feature = dao.getSequence(Integer.parseInt(feature_id),
public Hashtable getSchemaEntries()
{
return org2schema;
}
organism = new Vector();
org2schema = new Hashtable();
ChadoDAO dao = getDAO();
List schema_list = dao.getSchema();
if(list.size() == 0) // no residues for this organism
continue;
List list_residue_features = dao.getResidueFeatures(list, schema);
Iterator it_residue_features = list_residue_features.iterator();
while(it_residue_features.hasNext())
{
ChadoFeature feature = (ChadoFeature)it_residue_features.next();
db.put(org + " - " + typeName + " - " + feature.getName(),
Integer.toString(feature.getId()));
if(!organism.contains(org))
organism.add(org);
if(!org2schema.containsKey(org))
org2schema.put(org, schema);
}
catch(ConnectException ce)
{
ce.printStackTrace();
}
JOptionPane.showMessageDialog(null, "SQL Problems...", "SQL Error",
JOptionPane.ERROR_MESSAGE);
public Vector getOrganism()
{
return organism;
}
*
* Make a connetion with the jdbc
* jdbc:postgresql://localhost:13001/chadoCVS?user=es2
*
*/
// if(!iBatis)
// jdbcDAO = new JdbcDAO((String)getLocation(), pfield);
// else
// connIB = new IBatisDAO(pfield);
// assume we have a password
final int index = location.indexOf("?user=");
return DriverManager.getConnection(location.substring(0, index),
location.substring(index + 6),
new String(pfield.getPassword()));
}
/**
*
* Get the data access object (DAO).
* @return data access object
*
*/
private ChadoDAO getDAO()
throws java.net.ConnectException, SQLException
{
if(!iBatis)
{
if(jdbcDAO == null)
jdbcDAO = new JdbcDAO((String)getLocation(), pfield);
return jdbcDAO;
}
else
{
if(connIB == null)
connIB = new IBatisDAO(pfield);
return connIB;
}
}
* Create a new OutputStream object from this Document. The contents of the
* Document can be written from the stream.
*
* @exception IOException
* Thrown if the Document can't be written.
*/
public OutputStream getOutputStream() throws IOException
{
System.out.println("DatabaseDocument - ReadOnlyException");
throw new ReadOnlyException("this Database Document can not be written to");
public void commit(Vector sql)
{
try
{
Connection conn = getConnection();
int row = 0;
if(tsn.getType() == ChadoTransaction.UPDATE)
dao.updateAttributes(schema, tsn);
else if(tsn.getType() == ChadoTransaction.INSERT)
dao.insertAttributes(schema, tsn);
else if(tsn.getType() == ChadoTransaction.DELETE)
dao.deleteAttributes(schema, tsn);
else if(tsn.getType() == ChadoTransaction.INSERT_FEATURE)
dao.insertFeature(schema, tsn, feature_id);
else if(tsn.getType() == ChadoTransaction.DELETE_FEATURE)
dao.deleteFeature(schema, tsn);
"Database Connection Error - Check Server",
JOptionPane.ERROR_MESSAGE);
public static void main(String args[])
{
try
{
DbSqlConfig.init(new JPasswordField());
SqlMapClient sqlMap = DbSqlConfig.getSqlMapInstance();
feature.setId(Integer.parseInt(args[0]));
feature.setSchema(args[1]);
List featureList = sqlMap.queryForList("getGffLine", feature);
for(int i = 0; i < featureList.size(); i++)
{
int fmin = feature.getFmin() + 1;
int fmax = feature.getFmax();
System.out.print(fmin+" "+fmax);
System.out.print(" "+feature.getType_id());
System.out.print(" "+feature.getProp_type_id());
System.out.print(" "+feature.getStrand());
System.out.print(" "+feature.getUniquename());
System.out.print(" "+feature.getTimelastmodified().toString());
System.out.println(" "+Integer.toString(feature.getId()));
}
}
catch(SQLException sqle)
{
sqle.printStackTrace();
}
}