From 9dd106dc5531c9c55de60d5edfa3320aa27d84cd Mon Sep 17 00:00:00 2001 From: Dipsy Kapoor Date: Mon, 23 Dec 2013 16:31:20 -0800 Subject: [PATCH 01/11] removed code to load ontologies --- .../edu/isi/karma/rdf/JSONRDFGenerator.java | 34 ++----------------- 1 file changed, 3 insertions(+), 31 deletions(-) diff --git a/src/main/java/edu/isi/karma/rdf/JSONRDFGenerator.java b/src/main/java/edu/isi/karma/rdf/JSONRDFGenerator.java index 1f8c94f0a..24e1859c0 100644 --- a/src/main/java/edu/isi/karma/rdf/JSONRDFGenerator.java +++ b/src/main/java/edu/isi/karma/rdf/JSONRDFGenerator.java @@ -1,6 +1,5 @@ package edu.isi.karma.rdf; -import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.util.HashMap; @@ -15,16 +14,16 @@ import edu.isi.karma.kr2rml.KR2RMLWorksheetRDFGenerator; import edu.isi.karma.kr2rml.R2RMLMappingIdentifier; import edu.isi.karma.kr2rml.WorksheetR2RMLJenaModelParser; -import edu.isi.karma.modeling.ontology.OntologyManager; import edu.isi.karma.modeling.semantictypes.SemanticTypeUtil; import edu.isi.karma.rep.Worksheet; import edu.isi.karma.rep.Workspace; -import edu.isi.karma.util.EncodingDetector; import edu.isi.karma.util.JSONUtil; import edu.isi.karma.webserver.KarmaException; import edu.isi.karma.webserver.ServletContextParameterMap; import edu.isi.karma.webserver.ServletContextParameterMap.ContextParameter; + +//If running in offline mode, need to set manual.alignment=true in modeling.peoperties public class JSONRDFGenerator extends RdfGenerator { private static Logger logger = LoggerFactory.getLogger(JSONRDFGenerator.class); @@ -102,33 +101,6 @@ private void initOfflineWorkspaceSettings(Workspace workspace) { ContextParameter.TRAINING_EXAMPLE_MAX_COUNT, "200"); SemanticTypeUtil.setSemanticTypeTrainingStatus(false); - - OntologyManager ontologyManager = workspace.getOntologyManager(); - /** Check if any ontology needs to be preloaded **/ - String preloadedOntDir = getRootFolder() + "preloaded-ontologies"; - logger.info("Loading ontologies from dir: " + preloadedOntDir); - File ontDir = new File(preloadedOntDir); - if (ontDir.exists()) { - File[] ontologies = ontDir.listFiles(); - for (File ontology: ontologies) { - if (ontology.getName().endsWith(".owl") || ontology.getName().endsWith(".rdf") || ontology.getName().endsWith(".xml")) { - logger.info("Loading ontology file: " + ontology.getAbsolutePath()); - try { - String encoding = EncodingDetector.detect(ontology); - ontologyManager.doImport(ontology, encoding); - } catch (Exception t) { - logger.error ("Error loading ontology: " + ontology.getAbsolutePath(), t); - } - } - } - // update the cache at the end when all files are added to the model - ontologyManager.updateCache(); - } else { - logger.info("No directory for preloading ontologies exists."); - } - } - - private String getRootFolder() { - return getClass().getClassLoader().getResource(".").getPath() + "/../../"; } + } From c6cbbc4d2496c5d88e44e8a0410eac40a417aa3d Mon Sep 17 00:00:00 2001 From: Dipsy Kapoor Date: Mon, 23 Dec 2013 16:57:01 -0800 Subject: [PATCH 02/11] Close printWriter after generating RDF --- src/main/java/edu/isi/karma/rdf/JSONRDFGenerator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/isi/karma/rdf/JSONRDFGenerator.java b/src/main/java/edu/isi/karma/rdf/JSONRDFGenerator.java index 24e1859c0..946cdd270 100644 --- a/src/main/java/edu/isi/karma/rdf/JSONRDFGenerator.java +++ b/src/main/java/edu/isi/karma/rdf/JSONRDFGenerator.java @@ -80,7 +80,7 @@ public void generateRDF(String sourceName, String jsonData, boolean addProvenanc KR2RMLWorksheetRDFGenerator rdfGen = new KR2RMLWorksheetRDFGenerator(worksheet, workspace.getFactory(), workspace.getOntologyManager(), pw, mapping, errorReport, addProvenance); - rdfGen.generateRDF(false); + rdfGen.generateRDF(true); removeWorkspace(workspace); logger.debug("Generated rdf for " + sourceName); } From 0513c4d071157b1c0945ee69322ec760b552bebc Mon Sep 17 00:00:00 2001 From: szeke Date: Mon, 23 Dec 2013 17:46:02 -0800 Subject: [PATCH 03/11] Remove all warnings. --- .../InvokeDataMiningServiceCommand.java | 94 ++++++++----------- .../GenerateCleaningRulesCommand.java | 7 -- .../update/CSVImportPreviewUpdate.java | 1 + .../karma/er/helper/SPARQLGeneratorUtil.java | 3 - .../isi/karma/er/helper/TripleStoreUtil.java | 2 +- .../isi/karma/rep/alignment/LiteralNode.java | 1 + .../isi/karma/rep/alignment/SimpleNode.java | 1 + 7 files changed, 44 insertions(+), 65 deletions(-) diff --git a/src/main/java/edu/isi/karma/controller/command/alignment/InvokeDataMiningServiceCommand.java b/src/main/java/edu/isi/karma/controller/command/alignment/InvokeDataMiningServiceCommand.java index 2152b4031..7c4ddfdfd 100644 --- a/src/main/java/edu/isi/karma/controller/command/alignment/InvokeDataMiningServiceCommand.java +++ b/src/main/java/edu/isi/karma/controller/command/alignment/InvokeDataMiningServiceCommand.java @@ -21,22 +21,9 @@ package edu.isi.karma.controller.command.alignment; -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import java.util.Map; -import org.apache.http.Header; -import org.apache.http.HttpEntity; -import org.apache.http.HttpResponse; -import org.apache.http.NameValuePair; -import org.apache.http.client.HttpClient; -import org.apache.http.client.entity.UrlEncodedFormEntity; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.impl.client.DefaultHttpClient; -import org.apache.http.message.BasicNameValuePair; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -124,45 +111,45 @@ public CommandType getCommandType() { } // Pedro: this is not being used. Candidate for deletion. - // - private String fetch_data_temp() - { - HttpClient httpclient = new DefaultHttpClient(); - TripleStoreUtil utilObj = new TripleStoreUtil(); - StringBuffer jsonString = new StringBuffer(); - try { - - JSONObject result = utilObj.fetch_data(this.modelContext, null); - logger.debug(result.toString()); - - List formparams = new ArrayList(); - formparams = new ArrayList(); - formparams.add(new BasicNameValuePair("data", result.toString())); - - HttpPost httppost = new HttpPost("http://localhost:1234/consumejson"); - httppost.setEntity(new UrlEncodedFormEntity(formparams, "UTF-8")); - HttpResponse response = httpclient.execute(httppost); - - for(Header h : response.getAllHeaders()) { - logger.debug(h.getName() + " : " + h.getValue()); - } - HttpEntity entity = response.getEntity(); - if (entity != null) { - BufferedReader buf = new BufferedReader(new InputStreamReader(entity.getContent())); - String line = buf.readLine(); - while(line != null) { - logger.debug(line); - jsonString.append(line); - line = buf.readLine(); - } - - } - return jsonString.toString(); - } catch (Exception e) { - logger.error(e.getMessage()); - } - return ""; - } + // Is this from Shrikanth? +// private String fetch_data_temp() +// { +// HttpClient httpclient = new DefaultHttpClient(); +// TripleStoreUtil utilObj = new TripleStoreUtil(); +// StringBuffer jsonString = new StringBuffer(); +// try { +// +// JSONObject result = utilObj.fetch_data(this.modelContext, null); +// logger.debug(result.toString()); +// +// List formparams = new ArrayList(); +// formparams = new ArrayList(); +// formparams.add(new BasicNameValuePair("data", result.toString())); +// +// HttpPost httppost = new HttpPost("http://localhost:1234/consumejson"); +// httppost.setEntity(new UrlEncodedFormEntity(formparams, "UTF-8")); +// HttpResponse response = httpclient.execute(httppost); +// +// for(Header h : response.getAllHeaders()) { +// logger.debug(h.getName() + " : " + h.getValue()); +// } +// HttpEntity entity = response.getEntity(); +// if (entity != null) { +// BufferedReader buf = new BufferedReader(new InputStreamReader(entity.getContent())); +// String line = buf.readLine(); +// while(line != null) { +// logger.debug(line); +// jsonString.append(line); +// line = buf.readLine(); +// } +// +// } +// return jsonString.toString(); +// } catch (Exception e) { +// logger.error(e.getMessage()); +// } +// return ""; +// } @Override public UpdateContainer doIt(Workspace workspace) throws CommandException { @@ -206,8 +193,7 @@ public UpdateContainer doIt(Workspace workspace) throws CommandException { String query = genObj.get_query(mappingGen.getKR2RMLMapping(), this.modelContext); // execute the query on the triple store - TripleStoreUtil utilObj = new TripleStoreUtil(); - String data = utilObj.invokeSparqlQuery(query, tripleStoreUrl, "application/sparql-results+json", null); + String data = TripleStoreUtil.invokeSparqlQuery(query, tripleStoreUrl, "application/sparql-results+json", null); // prepare the input for the data mining service // int row_num = 0; diff --git a/src/main/java/edu/isi/karma/controller/command/cleaning/GenerateCleaningRulesCommand.java b/src/main/java/edu/isi/karma/controller/command/cleaning/GenerateCleaningRulesCommand.java index cb706f138..6ab111fb6 100644 --- a/src/main/java/edu/isi/karma/controller/command/cleaning/GenerateCleaningRulesCommand.java +++ b/src/main/java/edu/isi/karma/controller/command/cleaning/GenerateCleaningRulesCommand.java @@ -310,12 +310,10 @@ public UpdateContainer doIt(Workspace workspace) throws CommandException { HashMap> resdata = new HashMap>(); HashSet keys = new HashSet(); while (iter.hasNext()) { - long _time5 = System.currentTimeMillis(); String tpid = iter.next(); ValueCollection rvco = rtf.getTransformedValues_debug(tpid); if (rvco == null) continue; - long _time6 = System.currentTimeMillis(); // constructing displaying data HashMap xyzHashMap = new HashMap(); for (String key : rvco.getNodeIDs()) { @@ -368,11 +366,6 @@ public UpdateContainer doIt(Workspace workspace) throws CommandException { } if(!rtf.nullRule) keys.add(getBestExample(xyzHashMap, expFeData)); - long _time7 = System.currentTimeMillis(); - long time6 = 0; - long time7 = 0; - time6 += _time6 - _time5; - time7 = _time7 - _time6; } // find the best row String vars = ""; diff --git a/src/main/java/edu/isi/karma/controller/update/CSVImportPreviewUpdate.java b/src/main/java/edu/isi/karma/controller/update/CSVImportPreviewUpdate.java index 79a8a28bd..17cd707ab 100644 --- a/src/main/java/edu/isi/karma/controller/update/CSVImportPreviewUpdate.java +++ b/src/main/java/edu/isi/karma/controller/update/CSVImportPreviewUpdate.java @@ -138,6 +138,7 @@ public void generateJson(String prefix, PrintWriter pw, JSONArray arr = new JSONArray(headers); writer.key(JsonKeys.headers.name()).value(arr); rowCount++; + reader.close(); continue; } diff --git a/src/main/java/edu/isi/karma/er/helper/SPARQLGeneratorUtil.java b/src/main/java/edu/isi/karma/er/helper/SPARQLGeneratorUtil.java index 31b1f88b1..d205275ca 100644 --- a/src/main/java/edu/isi/karma/er/helper/SPARQLGeneratorUtil.java +++ b/src/main/java/edu/isi/karma/er/helper/SPARQLGeneratorUtil.java @@ -42,9 +42,6 @@ public class SPARQLGeneratorUtil { private int var_count; private HashMap ParentMapingInfoList; - private void SPARQLGeneratorUtil() { - this.var_count = 0; - } // this private class ParentMapingInfo { public TriplesMap parent; diff --git a/src/main/java/edu/isi/karma/er/helper/TripleStoreUtil.java b/src/main/java/edu/isi/karma/er/helper/TripleStoreUtil.java index 51cfb26f8..4bfbe4c3e 100644 --- a/src/main/java/edu/isi/karma/er/helper/TripleStoreUtil.java +++ b/src/main/java/edu/isi/karma/er/helper/TripleStoreUtil.java @@ -460,7 +460,7 @@ public org.json.JSONObject fetch_data(String graph, String tripleStoreUrl) throw if (tripleStoreUrl == null || tripleStoreUrl.isEmpty()) { tripleStoreUrl = defaultDataRepoUrl; } - JSONObject retVal = new JSONObject(); + //JSONObject retVal = new JSONObject(); StringBuffer queryString = new StringBuffer(); queryString.append("SELECT ?x ?z ") .append("WHERE { GRAPH <").append(graph.trim()).append("> { ") diff --git a/src/main/java/edu/isi/karma/rep/alignment/LiteralNode.java b/src/main/java/edu/isi/karma/rep/alignment/LiteralNode.java index 592dd3d91..eba105c37 100644 --- a/src/main/java/edu/isi/karma/rep/alignment/LiteralNode.java +++ b/src/main/java/edu/isi/karma/rep/alignment/LiteralNode.java @@ -27,6 +27,7 @@ public class LiteralNode extends Node { /** * */ + @SuppressWarnings("unused") private static final long serialVersionUID = 1L; private final String value; private final Label datatype; diff --git a/src/main/java/edu/isi/karma/rep/alignment/SimpleNode.java b/src/main/java/edu/isi/karma/rep/alignment/SimpleNode.java index 5ccbe0c94..aa859bb3a 100644 --- a/src/main/java/edu/isi/karma/rep/alignment/SimpleNode.java +++ b/src/main/java/edu/isi/karma/rep/alignment/SimpleNode.java @@ -34,6 +34,7 @@ public class SimpleNode extends Node { /** * */ + @SuppressWarnings("unused") private static final long serialVersionUID = 1L; public SimpleNode(String id, Label label) { From 78c79d5ad01368a484007eca996c9333c819e067 Mon Sep 17 00:00:00 2001 From: szeke Date: Mon, 23 Dec 2013 19:29:15 -0800 Subject: [PATCH 04/11] Better logging of modeling commands. --- .../isi/karma/controller/command/Command.java | 75 +++++- .../ChangeInternalNodeLinksCommand.java | 129 ++++++---- ...ChangeInternalNodeLinksCommandFactory.java | 29 +-- .../alignment/SetMetaPropertyCommand.java | 228 ++++++++++++------ .../SetMetaPropertyCommandFactory.java | 2 +- .../WorksheetCommandHistoryExecutor.java | 6 + 6 files changed, 317 insertions(+), 152 deletions(-) diff --git a/src/main/java/edu/isi/karma/controller/command/Command.java b/src/main/java/edu/isi/karma/controller/command/Command.java index 08f5cfd55..ce5f0e5f9 100644 --- a/src/main/java/edu/isi/karma/controller/command/Command.java +++ b/src/main/java/edu/isi/karma/controller/command/Command.java @@ -27,6 +27,10 @@ import java.util.ArrayList; import java.util.List; +import org.json.JSONException; +import org.json.JSONObject; +import org.slf4j.Logger; + import edu.isi.karma.controller.update.UpdateContainer; import edu.isi.karma.rep.Entity; import edu.isi.karma.rep.Workspace; @@ -84,24 +88,26 @@ public abstract UpdateContainer doIt(Workspace workspace) * Has this command been executed already? */ private boolean isExecuted = false; - + /** - * Flag that should be unset if you don't want this command instance to be written into the history + * Flag that should be unset if you don't want this command instance to be + * written into the history */ private boolean saveInHistory = true; - + /** - * Flag to tell if the command history should be written after this command has been executed + * Flag to tell if the command history should be written after this command + * has been executed */ private boolean writeWorksheetHistoryAfterCommandExecutes = true; - + private boolean appendToHistory = false; - + /** * List of tags for the command */ private List tags = new ArrayList(); - + private String inputParameterJson; public enum CommandTag { @@ -119,19 +125,19 @@ public boolean isExecuted() { public void setExecuted(boolean isExecuted) { this.isExecuted = isExecuted; } - + public boolean isSavedInHistory() { return saveInHistory; } - + public void saveInHistory(boolean flag) { this.saveInHistory = flag; } - + public void writeWorksheetHistoryAfterCommandExecutes(boolean flag) { this.writeWorksheetHistoryAfterCommandExecutes = flag; } - + public boolean writeWorksheetHistoryAfterCommandExecutes() { return this.writeWorksheetHistoryAfterCommandExecutes; } @@ -186,4 +192,51 @@ public boolean appendToHistory() { public void setAppendToHistory(boolean appendToHistory) { this.appendToHistory = appendToHistory; } + + // ///////////////////////////////////////////////////////////////////////////// + // + // Methods to help with logging and error reporting. + // + // ///////////////////////////////////////////////////////////////////////////// + + protected void logCommand(Logger logger, Workspace workspace) { + try { + logger.info("Executing command:\n" + + getArgsJSON(workspace).toString(4)); + } catch (JSONException e) { + e.printStackTrace(); + } + } + + /* + * Pedro + * + * We should have an abstraction for Commands that operate on worksheets, + * and this method should go there. + */ + protected String formatWorsheetId(Workspace workspace, String worksheetId) { + return worksheetId + " (" + + workspace.getWorksheet(worksheetId).getTitle() + ")"; + } + + /* + * Pedro + * + * Return an HNodeId in a nice format for printing on command logs. + */ + protected String formatHNodeId(Workspace workspace, String hNodeId) { + return hNodeId + " (" + workspace.getFactory().getColumnName(hNodeId) + + ")"; + } + + /* + * Pedro + * + * Meant to be overriden, but would need to define for all commands. We are + * going to need a nicer way to record the inputs of commands to reason + * about them, so perhaps this will also be easier and nicer to do. + */ + protected JSONObject getArgsJSON(Workspace workspace) { + return new JSONObject(); + } } diff --git a/src/main/java/edu/isi/karma/controller/command/alignment/ChangeInternalNodeLinksCommand.java b/src/main/java/edu/isi/karma/controller/command/alignment/ChangeInternalNodeLinksCommand.java index 5881b5c35..004fc9511 100644 --- a/src/main/java/edu/isi/karma/controller/command/alignment/ChangeInternalNodeLinksCommand.java +++ b/src/main/java/edu/isi/karma/controller/command/alignment/ChangeInternalNodeLinksCommand.java @@ -30,6 +30,7 @@ import edu.isi.karma.controller.command.Command; import edu.isi.karma.controller.command.CommandException; +import edu.isi.karma.controller.command.alignment.ChangeInternalNodeLinksCommandFactory.Arguments; import edu.isi.karma.controller.update.AlignmentSVGVisualizationUpdate; import edu.isi.karma.controller.update.SemanticTypesUpdate; import edu.isi.karma.controller.update.UpdateContainer; @@ -49,18 +50,19 @@ public class ChangeInternalNodeLinksCommand extends Command { private final String alignmentId; private JSONArray initialEdges; private JSONArray newEdges; - + // Required for undo - private Alignment oldAlignment; + private Alignment oldAlignment; private DirectedWeightedMultigraph oldGraph; - + private StringBuilder descStr = new StringBuilder(); - private static Logger logger = LoggerFactory.getLogger(ChangeInternalNodeLinksCommand.class); - + private static Logger logger = LoggerFactory + .getLogger(ChangeInternalNodeLinksCommand.class); + private enum JsonKeys { edgeSourceId, edgeId, edgeTargetId } - + public ChangeInternalNodeLinksCommand(String id, String worksheetId, String alignmentId, JSONArray initialEdges, JSONArray newEdges) { super(id); @@ -68,7 +70,7 @@ public ChangeInternalNodeLinksCommand(String id, String worksheetId, this.alignmentId = alignmentId; this.initialEdges = initialEdges; this.newEdges = newEdges; - + addTag(CommandTag.Modeling); } @@ -95,56 +97,72 @@ public CommandType getCommandType() { @SuppressWarnings("unchecked") @Override public UpdateContainer doIt(Workspace workspace) throws CommandException { -// String alignmentId = AlignmentManager.Instance().constructAlignmentId(workspace.getId(), worksheetId); - Alignment alignment = AlignmentManager.Instance().getAlignment(alignmentId); + logCommand(logger, workspace); + // String alignmentId = + // AlignmentManager.Instance().constructAlignmentId(workspace.getId(), + // worksheetId); + Alignment alignment = AlignmentManager.Instance().getAlignment( + alignmentId); Worksheet worksheet = workspace.getWorksheet(worksheetId); OntologyManager ontMgr = workspace.getOntologyManager(); - + // Save the original alignment for undo oldAlignment = alignment.getAlignmentClone(); - oldGraph = (DirectedWeightedMultigraph)alignment.getGraph().clone(); - - // First delete the links that are not present in newEdges and present in intialEdges + oldGraph = (DirectedWeightedMultigraph) alignment + .getGraph().clone(); + + // First delete the links that are not present in newEdges and present + // in intialEdges try { deleteLinks(alignment); addNewLinks(alignment, ontMgr); alignment.align(); - + } catch (JSONException e) { e.printStackTrace(); } - + return getAlignmentUpdateContainer(alignment, worksheet, workspace); } - private void addNewLinks(Alignment alignment, OntologyManager ontMgr) throws JSONException { - for (int j=0; j oldGraph; private SemanticType oldType; private SemanticType newType; - - private final Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); - - protected SetMetaPropertyCommand(String id, String worksheetId, String hNodeId, - METAPROPERTY_NAME metaPropertyName, String metaPropertyValue, boolean trainAndShowUpdates, + + private final Logger logger = LoggerFactory.getLogger(this.getClass() + .getSimpleName()); + + protected SetMetaPropertyCommand(String id, String worksheetId, + String hNodeId, METAPROPERTY_NAME metaPropertyName, + String metaPropertyValue, boolean trainAndShowUpdates, String rdfLiteralType) { super(id); this.hNodeId = hNodeId; @@ -86,7 +90,7 @@ protected SetMetaPropertyCommand(String id, String worksheetId, String hNodeId, this.metaPropertyName = metaPropertyName; this.metaPropertyValue = metaPropertyValue; this.rdfLiteralType = rdfLiteralType; - + addTag(CommandTag.Modeling); } @@ -113,20 +117,25 @@ public CommandType getCommandType() { @SuppressWarnings("unchecked") @Override public UpdateContainer doIt(Workspace workspace) throws CommandException { + logCommand(logger, workspace); /*** Get the Alignment for this worksheet ***/ Worksheet worksheet = workspace.getWorksheet(worksheetId); OntologyManager ontMgr = workspace.getOntologyManager(); - String alignmentId = AlignmentManager.Instance().constructAlignmentId(workspace.getId(), worksheetId); - Alignment alignment = AlignmentManager.Instance().getAlignment(alignmentId); + String alignmentId = AlignmentManager.Instance().constructAlignmentId( + workspace.getId(), worksheetId); + Alignment alignment = AlignmentManager.Instance().getAlignment( + alignmentId); if (alignment == null) { alignment = new Alignment(ontMgr); - AlignmentManager.Instance().addAlignmentToMap(alignmentId, alignment); + AlignmentManager.Instance().addAlignmentToMap(alignmentId, + alignment); } - + // Save the original alignment for undo oldAlignment = alignment.getAlignmentClone(); - oldGraph = (DirectedWeightedMultigraph)alignment.getGraph().clone(); - + oldGraph = (DirectedWeightedMultigraph) alignment + .getGraph().clone(); + /*** Add the appropriate nodes and links in alignment graph ***/ newType = null; @@ -136,113 +145,150 @@ public UpdateContainer doIt(Workspace workspace) throws CommandException { boolean semanticTypeAlreadyExists = false; Link oldIncomingLinkToColumnNode = null; Node oldDomainNode = null; - List columnNodeIncomingLinks = alignment.getIncomingLinks(columnNode.getId()); - if (columnNodeIncomingLinks != null && !columnNodeIncomingLinks.isEmpty()) { // SemanticType already assigned + List columnNodeIncomingLinks = alignment + .getIncomingLinks(columnNode.getId()); + if (columnNodeIncomingLinks != null + && !columnNodeIncomingLinks.isEmpty()) { // SemanticType already + // assigned semanticTypeAlreadyExists = true; oldIncomingLinkToColumnNode = columnNodeIncomingLinks.get(0); oldDomainNode = oldIncomingLinkToColumnNode.getSource(); } - + if (metaPropertyName.equals(METAPROPERTY_NAME.isUriOfClass)) { Node classNode = alignment.getNodeById(metaPropertyValue); if (semanticTypeAlreadyExists) { - clearOldSemanticTypeLink(oldIncomingLinkToColumnNode, oldDomainNode, alignment, classNode); + clearOldSemanticTypeLink(oldIncomingLinkToColumnNode, + oldDomainNode, alignment, classNode); } - + if (classNode == null) { Label classNodeLabel = ontMgr.getUriLabel(metaPropertyValue); if (classNodeLabel == null) { - logger.error("URI/ID does not exist in the ontology or model: " + metaPropertyValue); - return new UpdateContainer(EmptyUpdate.getInstance()); + String errorMessage = "Error while setting a classLink. MetaPropertyValue '" + + metaPropertyValue + + "' should be in the Ontology Manager, but it is not."; + logger.error(errorMessage); + return new UpdateContainer(new ErrorUpdate(errorMessage)); } classNode = alignment.addInternalNode(classNodeLabel); } - - alignment.addClassInstanceLink(classNode, columnNode, LinkKeyInfo.UriOfInstance); + + alignment.addClassInstanceLink(classNode, columnNode, + LinkKeyInfo.UriOfInstance); alignment.align(); - + // Create the semantic type object - newType = new SemanticType(hNodeId, ClassInstanceLink.getFixedLabel(), classNode.getLabel(), SemanticType.Origin.User, 1.0, false); - } else if (metaPropertyName.equals(METAPROPERTY_NAME.isSpecializationForEdge)) { + newType = new SemanticType(hNodeId, + ClassInstanceLink.getFixedLabel(), classNode.getLabel(), + SemanticType.Origin.User, 1.0, false); + } else if (metaPropertyName + .equals(METAPROPERTY_NAME.isSpecializationForEdge)) { Link propertyLink = alignment.getLinkById(metaPropertyValue); if (propertyLink == null) { - logger.error("Link should exist in the alignment: " + metaPropertyValue); - return new UpdateContainer(new ErrorUpdate( - "Error occured while setting the semantic type!")); + String errorMessage = "Error while specializing a link. The Link '" + + metaPropertyValue + + "' should already be in the alignment, but it is not."; + logger.error(errorMessage); + return new UpdateContainer(new ErrorUpdate(errorMessage)); } - - Node classInstanceNode = alignment.getNodeById(LinkIdFactory.getLinkSourceId(metaPropertyValue)); + + Node classInstanceNode = alignment.getNodeById(LinkIdFactory + .getLinkSourceId(metaPropertyValue)); if (semanticTypeAlreadyExists) { - clearOldSemanticTypeLink(oldIncomingLinkToColumnNode, oldDomainNode, alignment, classInstanceNode); + clearOldSemanticTypeLink(oldIncomingLinkToColumnNode, + oldDomainNode, alignment, classInstanceNode); } if (propertyLink instanceof DataPropertyLink) { - String targetHNodeId = ((ColumnNode) propertyLink.getTarget()).getHNodeId(); - alignment.addDataPropertyOfColumnLink(classInstanceNode, columnNode, targetHNodeId); + String targetHNodeId = ((ColumnNode) propertyLink.getTarget()) + .getHNodeId(); + alignment.addDataPropertyOfColumnLink(classInstanceNode, + columnNode, targetHNodeId); // Create the semantic type object - newType = new SemanticType(hNodeId, DataPropertyOfColumnLink.getFixedLabel(), classInstanceNode.getLabel(), SemanticType.Origin.User, 1.0, false); + newType = new SemanticType(hNodeId, + DataPropertyOfColumnLink.getFixedLabel(), + classInstanceNode.getLabel(), SemanticType.Origin.User, + 1.0, false); } else if (propertyLink instanceof ObjectPropertyLink) { - alignment.addObjectPropertySpecializationLink(classInstanceNode, columnNode, propertyLink.getId()); + alignment.addObjectPropertySpecializationLink( + classInstanceNode, columnNode, propertyLink.getId()); // Create the semantic type object - newType = new SemanticType(hNodeId, ObjectPropertySpecializationLink.getFixedLabel(), classInstanceNode.getLabel(), SemanticType.Origin.User, 1.0, false); + newType = new SemanticType(hNodeId, + ObjectPropertySpecializationLink.getFixedLabel(), + classInstanceNode.getLabel(), SemanticType.Origin.User, + 1.0, false); } - + alignment.align(); } else if (metaPropertyName.equals(METAPROPERTY_NAME.isSubclassOfClass)) { Node classNode = alignment.getNodeById(metaPropertyValue); if (semanticTypeAlreadyExists) { - clearOldSemanticTypeLink(oldIncomingLinkToColumnNode, oldDomainNode, alignment, classNode); + clearOldSemanticTypeLink(oldIncomingLinkToColumnNode, + oldDomainNode, alignment, classNode); } - + if (classNode == null) { Label classNodeLabel = ontMgr.getUriLabel(metaPropertyValue); if (classNodeLabel == null) { - logger.error("URI/ID does not exist in the ontology or model: " + metaPropertyValue); - return new UpdateContainer(EmptyUpdate.getInstance()); + String errorMessage = "Error while setting an advances subclass. MetaPropertyValue '" + + metaPropertyValue + + "' should be in the Ontology Manager, but it is not."; + logger.error(errorMessage); + return new UpdateContainer(new ErrorUpdate(errorMessage)); } classNode = alignment.addInternalNode(classNodeLabel); } alignment.addColumnSubClassOfLink(classNode, columnNode); alignment.align(); - + // Create the semantic type object - newType = new SemanticType(hNodeId, ColumnSubClassLink.getFixedLabel(), classNode.getLabel(), SemanticType.Origin.User, 1.0, false); + newType = new SemanticType(hNodeId, + ColumnSubClassLink.getFixedLabel(), classNode.getLabel(), + SemanticType.Origin.User, 1.0, false); } - + columnNode.setUserSelectedSemanticType(newType); - + UpdateContainer c = new UpdateContainer(); CRFModelHandler crfModelHandler = workspace.getCrfModelHandler(); -// CRFModelHandler crfModelHandler = vWorkspace.getWorkspace().getCrfModelHandler(); + // CRFModelHandler crfModelHandler = + // vWorkspace.getWorkspace().getCrfModelHandler(); // Save the old SemanticType object and CRF Model for undo - oldType = worksheet.getSemanticTypes().getSemanticTypeForHNodeId(hNodeId); + oldType = worksheet.getSemanticTypes().getSemanticTypeForHNodeId( + hNodeId); oldColumnModel = worksheet.getCrfModel().getModelByHNodeId(hNodeId); - oldSynonymTypes = worksheet.getSemanticTypes().getSynonymTypesForHNodeId(newType.getHNodeId()); + oldSynonymTypes = worksheet.getSemanticTypes() + .getSynonymTypesForHNodeId(newType.getHNodeId()); // Update the SemanticTypes data structure for the worksheet worksheet.getSemanticTypes().addType(newType); // Update the synonym semanticTypes -// worksheet.getSemanticTypes().addSynonymTypesForHNodeId(newType.getHNodeId(), newSynonymTypes); + // worksheet.getSemanticTypes().addSynonymTypesForHNodeId(newType.getHNodeId(), + // newSynonymTypes); - if(trainAndShowUpdates) { + if (trainAndShowUpdates) { c.add(new SemanticTypesUpdate(worksheet, worksheetId, alignment)); try { // Add the visualization update - c.add(new AlignmentSVGVisualizationUpdate(worksheetId, alignment)); + c.add(new AlignmentSVGVisualizationUpdate(worksheetId, + alignment)); } catch (Exception e) { - logger.error("Error occured while setting the semantic type!", e); + logger.error("Error occured while setting the semantic type!", + e); return new UpdateContainer(new ErrorUpdate( "Error occured while setting the semantic type!")); } - + // Train the semantic type in a separate thread - Thread t = new Thread(new SemanticTypeTrainingThread(crfModelHandler, worksheet, newType)); + Thread t = new Thread(new SemanticTypeTrainingThread( + crfModelHandler, worksheet, newType)); t.start(); - + return c; - + } return c; } @@ -250,8 +296,8 @@ public UpdateContainer doIt(Workspace workspace) throws CommandException { private void clearOldSemanticTypeLink(Link oldIncomingLinkToColumnNode, Node oldDomainNode, Alignment alignment, Node newDomainNode) { alignment.removeLink(oldIncomingLinkToColumnNode.getId()); -// if (oldDomainNode != newDomainNode) -// alignment.removeNode(oldDomainNode.getId()); + // if (oldDomainNode != newDomainNode) + // alignment.removeNode(oldDomainNode.getId()); } @Override @@ -259,19 +305,24 @@ public UpdateContainer undoIt(Workspace workspace) { UpdateContainer c = new UpdateContainer(); Worksheet worksheet = workspace.getWorksheet(worksheetId); if (oldType == null) { - worksheet.getSemanticTypes().unassignColumnSemanticType(newType.getHNodeId()); + worksheet.getSemanticTypes().unassignColumnSemanticType( + newType.getHNodeId()); } else { worksheet.getSemanticTypes().addType(oldType); - worksheet.getSemanticTypes().addSynonymTypesForHNodeId(newType.getHNodeId(), oldSynonymTypes); + worksheet.getSemanticTypes().addSynonymTypesForHNodeId( + newType.getHNodeId(), oldSynonymTypes); } - worksheet.getCrfModel().addColumnModel(newType.getHNodeId(), oldColumnModel); + worksheet.getCrfModel().addColumnModel(newType.getHNodeId(), + oldColumnModel); // Replace the current alignment with the old alignment - String alignmentId = AlignmentManager.Instance().constructAlignmentId(workspace.getId(), worksheetId); - AlignmentManager.Instance().addAlignmentToMap(alignmentId, oldAlignment); + String alignmentId = AlignmentManager.Instance().constructAlignmentId( + workspace.getId(), worksheetId); + AlignmentManager.Instance() + .addAlignmentToMap(alignmentId, oldAlignment); oldAlignment.setGraph(oldGraph); - + // Get the alignment update if any try { c.add(new SemanticTypesUpdate(worksheet, worksheetId, oldAlignment)); @@ -283,19 +334,38 @@ public UpdateContainer undoIt(Workspace workspace) { } return c; } + + @Override + protected JSONObject getArgsJSON(Workspace workspace) { + JSONObject args = new JSONObject(); + try { + args.put("command", getTitle()) + .put(Arguments.metaPropertyName.name(), metaPropertyName) + .put(Arguments.metaPropertyValue.name(), metaPropertyValue) + .put(Arguments.worksheetId.name(), + formatWorsheetId(workspace, worksheetId)) + .put(Arguments.hNodeId.name(), + formatHNodeId(workspace, hNodeId)); + } catch (JSONException e) { + e.printStackTrace(); + } + return args; + } -// private ColumnNode getColumnNode(Alignment alignment, HNode hNode) { -// String columnName = hNode.getColumnName(); -// ColumnNode columnNode = alignment.getColumnNodeByHNodeId(hNodeId); -// -// if (columnNode == null) { -// columnNode = alignment.addColumnNode(hNodeId, columnName, rdfLiteralType, null); -// } else { -// // Remove old column node if it exists -// alignment.removeNode(columnNode.getId()); -// columnNode = alignment.addColumnNode(hNodeId, columnName, rdfLiteralType, null); -// } -// return columnNode; -// } + // private ColumnNode getColumnNode(Alignment alignment, HNode hNode) { + // String columnName = hNode.getColumnName(); + // ColumnNode columnNode = alignment.getColumnNodeByHNodeId(hNodeId); + // + // if (columnNode == null) { + // columnNode = alignment.addColumnNode(hNodeId, columnName, rdfLiteralType, + // null); + // } else { + // // Remove old column node if it exists + // alignment.removeNode(columnNode.getId()); + // columnNode = alignment.addColumnNode(hNodeId, columnName, rdfLiteralType, + // null); + // } + // return columnNode; + // } } diff --git a/src/main/java/edu/isi/karma/controller/command/alignment/SetMetaPropertyCommandFactory.java b/src/main/java/edu/isi/karma/controller/command/alignment/SetMetaPropertyCommandFactory.java index 0c193923d..c34a7c5c5 100644 --- a/src/main/java/edu/isi/karma/controller/command/alignment/SetMetaPropertyCommandFactory.java +++ b/src/main/java/edu/isi/karma/controller/command/alignment/SetMetaPropertyCommandFactory.java @@ -39,7 +39,7 @@ public enum METAPROPERTY_NAME { isUriOfClass, isSubclassOfClass, isSpecializationForEdge } - private enum Arguments { + enum Arguments { worksheetId, hNodeId, metaPropertyName, metaPropertyValue, trainAndShowUpdates, rdfLiteralType } diff --git a/src/main/java/edu/isi/karma/controller/history/WorksheetCommandHistoryExecutor.java b/src/main/java/edu/isi/karma/controller/history/WorksheetCommandHistoryExecutor.java index 0b76761cb..acde225f9 100644 --- a/src/main/java/edu/isi/karma/controller/history/WorksheetCommandHistoryExecutor.java +++ b/src/main/java/edu/isi/karma/controller/history/WorksheetCommandHistoryExecutor.java @@ -21,6 +21,8 @@ package edu.isi.karma.controller.history; +import java.io.PrintWriter; +import java.io.StringWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -107,6 +109,10 @@ private UpdateContainer executeCommand(JSONObject commObject) workspace.getCommandHistory().doCommand(comm, workspace); } catch(Exception e) { logger.error("Error executing command: "+ commandName + ". Please notify this error"); + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + e.printStackTrace(pw); + logger.error(sw.toString()); //make these InfoUpdates so that the UI can still process the rest of the model return new UpdateContainer(new TrivialErrorUpdate("Error executing command " + commandName + " from history")); } From 3bfcca202cde04708504b621b4e38a4035d3c22e Mon Sep 17 00:00:00 2001 From: szeke Date: Mon, 23 Dec 2013 19:35:47 -0800 Subject: [PATCH 05/11] Fix typo in error message. --- .../command/alignment/ChangeInternalNodeLinksCommand.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/isi/karma/controller/command/alignment/ChangeInternalNodeLinksCommand.java b/src/main/java/edu/isi/karma/controller/command/alignment/ChangeInternalNodeLinksCommand.java index 004fc9511..00e4b413e 100644 --- a/src/main/java/edu/isi/karma/controller/command/alignment/ChangeInternalNodeLinksCommand.java +++ b/src/main/java/edu/isi/karma/controller/command/alignment/ChangeInternalNodeLinksCommand.java @@ -142,14 +142,14 @@ private void addNewLinks(Alignment alignment, OntologyManager ontMgr) if (sourceNode == null) { String errorMessage = "Error while adding new links: the new link goes FROM node '" + sourceId - + "', but this node is in the alignment."; + + "', but this node is NOT in the alignment."; logger.error(errorMessage); } Node targetNode = alignment.getNodeById(targetId); if (targetNode == null) { String errorMessage = "Error while adding new links: the new link goes TO node '" + targetId - + "', but this node is in the alignment."; + + "', but this node is NOT in the alignment."; logger.error(errorMessage); } Label linkLabel = ontMgr.getUriLabel(edgeUri); From 43b8c3a5843267e634300575fb9d05b80565dd5b Mon Sep 17 00:00:00 2001 From: szeke Date: Mon, 23 Dec 2013 20:14:25 -0800 Subject: [PATCH 06/11] Better logging --- .../command/worksheet/AddColumnCommand.java | 2 ++ .../WorksheetCommandHistoryExecutor.java | 8 ++------ .../edu/isi/karma/rdf/OfflineRdfGenerator.java | 2 +- src/main/java/edu/isi/karma/util/Util.java | 18 +++++++++++++++--- 4 files changed, 20 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/isi/karma/controller/command/worksheet/AddColumnCommand.java b/src/main/java/edu/isi/karma/controller/command/worksheet/AddColumnCommand.java index 36afbcaa8..a7b393f04 100644 --- a/src/main/java/edu/isi/karma/controller/command/worksheet/AddColumnCommand.java +++ b/src/main/java/edu/isi/karma/controller/command/worksheet/AddColumnCommand.java @@ -41,6 +41,7 @@ import edu.isi.karma.rep.RepFactory; import edu.isi.karma.rep.Worksheet; import edu.isi.karma.rep.Workspace; +import edu.isi.karma.util.Util; import edu.isi.karma.webserver.KarmaException; /** @@ -155,6 +156,7 @@ public UpdateContainer doIt(Workspace workspace) throws CommandException { return c; } catch (Exception e) { logger.error("Error in AddColumnCommand" + e.toString()); + Util.logException(logger, e); return new UpdateContainer(new ErrorUpdate(e.getMessage())); } } diff --git a/src/main/java/edu/isi/karma/controller/history/WorksheetCommandHistoryExecutor.java b/src/main/java/edu/isi/karma/controller/history/WorksheetCommandHistoryExecutor.java index acde225f9..d741bea23 100644 --- a/src/main/java/edu/isi/karma/controller/history/WorksheetCommandHistoryExecutor.java +++ b/src/main/java/edu/isi/karma/controller/history/WorksheetCommandHistoryExecutor.java @@ -21,8 +21,6 @@ package edu.isi.karma.controller.history; -import java.io.PrintWriter; -import java.io.StringWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -46,6 +44,7 @@ import edu.isi.karma.rep.HNode; import edu.isi.karma.rep.HTable; import edu.isi.karma.rep.Workspace; +import edu.isi.karma.util.Util; import edu.isi.karma.webserver.ExecutionController; import edu.isi.karma.webserver.KarmaException; import edu.isi.karma.webserver.WorkspaceRegistry; @@ -109,10 +108,7 @@ private UpdateContainer executeCommand(JSONObject commObject) workspace.getCommandHistory().doCommand(comm, workspace); } catch(Exception e) { logger.error("Error executing command: "+ commandName + ". Please notify this error"); - StringWriter sw = new StringWriter(); - PrintWriter pw = new PrintWriter(sw); - e.printStackTrace(pw); - logger.error(sw.toString()); + Util.logException(logger, e); //make these InfoUpdates so that the UI can still process the rest of the model return new UpdateContainer(new TrivialErrorUpdate("Error executing command " + commandName + " from history")); } diff --git a/src/main/java/edu/isi/karma/rdf/OfflineRdfGenerator.java b/src/main/java/edu/isi/karma/rdf/OfflineRdfGenerator.java index bb78cfa41..e43e268e2 100644 --- a/src/main/java/edu/isi/karma/rdf/OfflineRdfGenerator.java +++ b/src/main/java/edu/isi/karma/rdf/OfflineRdfGenerator.java @@ -229,7 +229,7 @@ private static void generateRdfFromFile(CommandLine cl, String inputType, String sourceName = (String) cl.getValue("--sourcename"); if(sourceName == null) { - logger.error("No source name provided"); + logger.error("You need to supply a value for '--sourcename'"); return; } R2RMLMappingIdentifier id = new R2RMLMappingIdentifier(sourceName, modelURL); diff --git a/src/main/java/edu/isi/karma/util/Util.java b/src/main/java/edu/isi/karma/util/Util.java index a70cd4e11..386e268da 100644 --- a/src/main/java/edu/isi/karma/util/Util.java +++ b/src/main/java/edu/isi/karma/util/Util.java @@ -20,6 +20,8 @@ ******************************************************************************/ package edu.isi.karma.util; +import java.io.PrintWriter; +import java.io.StringWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; @@ -27,6 +29,8 @@ import java.util.Map; import java.util.TreeSet; +import org.slf4j.Logger; + public class Util { /** * Sorts a HashMap based on the values with Double data type @@ -34,7 +38,8 @@ public class Util { * @param input * @return */ - public static HashMap sortHashMap(HashMap input) { + public static HashMap sortHashMap( + HashMap input) { Map tempMap = new HashMap(); for (String wsState : input.keySet()) { tempMap.put(wsState, input.get(wsState)); @@ -45,12 +50,19 @@ public static HashMap sortHashMap(HashMap input) HashMap sortedMap = new LinkedHashMap(); TreeSet sortedSet = new TreeSet(mapValues); Object[] sortedArray = sortedSet.toArray(); - + int size = sortedArray.length; - for (int i = size-1; i >= 0; i--) { + for (int i = size - 1; i >= 0; i--) { sortedMap.put(mapKeys.get(mapValues.indexOf(sortedArray[i])), (Double) sortedArray[i]); } return sortedMap; } + + public static void logException(Logger logger, Exception e) { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + e.printStackTrace(pw); + logger.error(sw.toString()); + } } From cc741b434a219e6320c57f72aeb4ff412d3a1d97 Mon Sep 17 00:00:00 2001 From: Jason Slepicka Date: Tue, 24 Dec 2013 13:18:30 -0800 Subject: [PATCH 07/11] Make sure we initialize data structures in OntologyCache --- .../modeling/ontology/OntologyCache.java | 113 +++++++++--------- 1 file changed, 59 insertions(+), 54 deletions(-) diff --git a/src/main/java/edu/isi/karma/modeling/ontology/OntologyCache.java b/src/main/java/edu/isi/karma/modeling/ontology/OntologyCache.java index fba93613a..4d850f67c 100644 --- a/src/main/java/edu/isi/karma/modeling/ontology/OntologyCache.java +++ b/src/main/java/edu/isi/karma/modeling/ontology/OntologyCache.java @@ -128,12 +128,71 @@ class OntologyCache { public OntologyCache(OntologyHandler ontHandler) { this.ontHandler = ontHandler; + + allocateDataStructures(); } public void init() { logger.debug("start building the ontology cache ..."); + + long start = System.currentTimeMillis(); + + // create a list of classes and properties of the model + this.loadClasses(); + this.loadProperties(); + + logger.debug("number of classes:" + classes.size()); + logger.debug("number of all properties:" + properties.size()); + logger.debug("number of data properties:" + dataProperties.size() ); + logger.debug("number of object properties:" + objectProperties.size() ); + // A = number of all properties including rdf:Property + // B = number of properties defined as Data Property + // C = number of properties defined as Object Property + // properties = A + // dataproperties = A - C + // objectproperties = A - B + logger.debug("number of properties explicitly defined as owl:DatatypeProperty:" + (properties.size() - objectProperties.size()) ); + logger.debug("number of properties explicitly defined as owl:ObjectProperty:" + (properties.size() - dataProperties.size()) ); + + // create a hierarchy of classes and properties of the model + this.buildClassHierarchy(classHierarchy); + this.buildDataPropertyHierarchy(dataPropertyHierarchy); + this.buildObjectPropertyHierarchy(objectPropertyHierarchy); + + // build hashmaps for indirect subclass and subproperty relationships + this.buildSubClassesMaps(); + this.buildSuperClassesMaps(); + this.buildSubPropertiesMaps(); + this.buildSuperPropertiesMaps(); + + // build hashmaps to include inverse(Of) properties + this.buildInverseProperties(); + + // build some hashmaps that will be used in alignment + this.buildDataPropertiesMaps(); + this.buildObjectPropertiesMaps(); + // update hashmaps to include the subproperty relations + this.updateMapsWithSubpropertyDefinitions(); + + // classify different types of properties + this.classifyProperties(); + + // build connectivity hashmaps + this.buildConnectivityMaps(); + + // build hashmaps to speed up adding links to the graph +// this.buildObjectPropertyDomainRangeMap(); + + // add some common properties like rdfs:label, rdfs:comment, ... + this.addPropertiesOfRDFVocabulary(); + + float elapsedTimeSec = (System.currentTimeMillis() - start)/1000F; + logger.debug("time to build the ontology cache: " + elapsedTimeSec); + } + + private void allocateDataStructures() { this.classes = new HashMap(); this.properties = new HashMap(); this.dataProperties = new HashMap(); @@ -193,60 +252,6 @@ public void init() { this.connectedByIndirectProperties = new HashSet(); this.connectedByDomainlessProperties = new HashSet(); this.connectedByRangelessProperties = new HashSet(); - - long start = System.currentTimeMillis(); - - // create a list of classes and properties of the model - this.loadClasses(); - this.loadProperties(); - - logger.debug("number of classes:" + classes.size()); - logger.debug("number of all properties:" + properties.size()); - logger.debug("number of data properties:" + dataProperties.size() ); - logger.debug("number of object properties:" + objectProperties.size() ); - // A = number of all properties including rdf:Property - // B = number of properties defined as Data Property - // C = number of properties defined as Object Property - // properties = A - // dataproperties = A - C - // objectproperties = A - B - logger.debug("number of properties explicitly defined as owl:DatatypeProperty:" + (properties.size() - objectProperties.size()) ); - logger.debug("number of properties explicitly defined as owl:ObjectProperty:" + (properties.size() - dataProperties.size()) ); - - // create a hierarchy of classes and properties of the model - this.buildClassHierarchy(classHierarchy); - this.buildDataPropertyHierarchy(dataPropertyHierarchy); - this.buildObjectPropertyHierarchy(objectPropertyHierarchy); - - // build hashmaps for indirect subclass and subproperty relationships - this.buildSubClassesMaps(); - this.buildSuperClassesMaps(); - this.buildSubPropertiesMaps(); - this.buildSuperPropertiesMaps(); - - // build hashmaps to include inverse(Of) properties - this.buildInverseProperties(); - - // build some hashmaps that will be used in alignment - this.buildDataPropertiesMaps(); - this.buildObjectPropertiesMaps(); - // update hashmaps to include the subproperty relations - this.updateMapsWithSubpropertyDefinitions(); - - // classify different types of properties - this.classifyProperties(); - - // build connectivity hashmaps - this.buildConnectivityMaps(); - - // build hashmaps to speed up adding links to the graph -// this.buildObjectPropertyDomainRangeMap(); - - // add some common properties like rdfs:label, rdfs:comment, ... - this.addPropertiesOfRDFVocabulary(); - - float elapsedTimeSec = (System.currentTimeMillis() - start)/1000F; - logger.debug("time to build the ontology cache: " + elapsedTimeSec); } public HashMap getClasses() { From 117f7cd25d88586ebdb1370f4fa25789e86dcedb Mon Sep 17 00:00:00 2001 From: Jason Slepicka Date: Tue, 24 Dec 2013 15:17:35 -0800 Subject: [PATCH 08/11] Added ability to set manual alignment to true in ModelingConfiguration RdfGenerator by default sets manual alignment to true. --- .../java/edu/isi/karma/modeling/ModelingConfiguration.java | 4 ++++ src/main/java/edu/isi/karma/rdf/RdfGenerator.java | 7 +++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/isi/karma/modeling/ModelingConfiguration.java b/src/main/java/edu/isi/karma/modeling/ModelingConfiguration.java index 3de7e8ea6..5fc44248f 100644 --- a/src/main/java/edu/isi/karma/modeling/ModelingConfiguration.java +++ b/src/main/java/edu/isi/karma/modeling/ModelingConfiguration.java @@ -217,5 +217,9 @@ public static boolean isLearnerEnabled() { return learnerEnabled; } + public static void setManualAlignment(Boolean newManualAlignment) + { + manualAlignment = newManualAlignment; + } } diff --git a/src/main/java/edu/isi/karma/rdf/RdfGenerator.java b/src/main/java/edu/isi/karma/rdf/RdfGenerator.java index 12be94b2c..fce3b86ab 100644 --- a/src/main/java/edu/isi/karma/rdf/RdfGenerator.java +++ b/src/main/java/edu/isi/karma/rdf/RdfGenerator.java @@ -32,6 +32,7 @@ import edu.isi.karma.controller.command.Command.CommandTag; import edu.isi.karma.controller.history.WorksheetCommandHistoryExecutor; import edu.isi.karma.kr2rml.KR2RMLMapping; +import edu.isi.karma.modeling.ModelingConfiguration; import edu.isi.karma.rep.Worksheet; import edu.isi.karma.rep.Workspace; import edu.isi.karma.rep.WorkspaceManager; @@ -44,14 +45,16 @@ public abstract class RdfGenerator { private static Logger logger = LoggerFactory.getLogger(RdfGenerator.class); protected Workspace initializeWorkspace() { + Workspace workspace = WorkspaceManager.getInstance().createWorkspace(); - WorkspaceRegistry.getInstance().register(new ExecutionController(workspace)); + WorkspaceRegistry.getInstance().register(new ExecutionController(workspace)); + ModelingConfiguration.setManualAlignment(true); return workspace; } protected void removeWorkspace(Workspace workspace) { WorkspaceManager.getInstance().removeWorkspace(workspace.getId()); - WorkspaceRegistry.getInstance().deregister(workspace.getId()); + WorkspaceRegistry.getInstance().deregister(workspace.getId()); } protected void applyHistoryToWorksheet(Workspace workspace, Worksheet worksheet, From 512f5a0cda91bf369003ce7d5dbad4d7ec4d8240 Mon Sep 17 00:00:00 2001 From: Jason Slepicka Date: Tue, 24 Dec 2013 16:02:34 -0800 Subject: [PATCH 09/11] Make sure ManualAlignment isn't overwritten in ModelingConfiguration --- src/main/java/edu/isi/karma/rdf/RdfGenerator.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/isi/karma/rdf/RdfGenerator.java b/src/main/java/edu/isi/karma/rdf/RdfGenerator.java index fce3b86ab..8c7f393b4 100644 --- a/src/main/java/edu/isi/karma/rdf/RdfGenerator.java +++ b/src/main/java/edu/isi/karma/rdf/RdfGenerator.java @@ -48,6 +48,7 @@ protected Workspace initializeWorkspace() { Workspace workspace = WorkspaceManager.getInstance().createWorkspace(); WorkspaceRegistry.getInstance().register(new ExecutionController(workspace)); + ModelingConfiguration.load(); ModelingConfiguration.setManualAlignment(true); return workspace; } From ebc24ab3fae00afc80b94c3e0d6271d871b47b51 Mon Sep 17 00:00:00 2001 From: szeke Date: Wed, 25 Dec 2013 08:35:28 -0800 Subject: [PATCH 10/11] Added GRAPHVIZ to .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index cb43c51da..1f641d74f 100644 --- a/.gitignore +++ b/.gitignore @@ -31,6 +31,7 @@ log/* src/main/webapp/publish/KML/*.* src/main/webapp/publish/Source Description/*.* src/main/webapp/publish/History/*.* +src/main/webapp/publish/GRAPHVIZ/*.* src/main/webapp/publish/CSV/*.* src/main/webapp/publish/RDF/*.* src/main/webapp/publish/MDB/*.* From bed97eb39a8eb3eaccf7ec6946e5e7594345e2df Mon Sep 17 00:00:00 2001 From: szeke Date: Wed, 25 Dec 2013 22:05:51 -0800 Subject: [PATCH 11/11] Simplify tests so they don't write files. --- .../isi/karma/rdf/TestJSONRDFGenerator.java | 84 ++++++++++--------- 1 file changed, 46 insertions(+), 38 deletions(-) diff --git a/src/test/java/edu/isi/karma/rdf/TestJSONRDFGenerator.java b/src/test/java/edu/isi/karma/rdf/TestJSONRDFGenerator.java index 8f15029b6..1db4142e0 100644 --- a/src/test/java/edu/isi/karma/rdf/TestJSONRDFGenerator.java +++ b/src/test/java/edu/isi/karma/rdf/TestJSONRDFGenerator.java @@ -21,14 +21,13 @@ package edu.isi.karma.rdf; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.fail; -import java.io.BufferedWriter; import java.io.File; -import java.io.FileOutputStream; -import java.io.OutputStreamWriter; import java.io.PrintWriter; +import java.io.StringWriter; import org.junit.After; import org.junit.AfterClass; @@ -41,12 +40,12 @@ /** * @author dipsy - * + * */ public class TestJSONRDFGenerator { JSONRDFGenerator rdfGen; - + /** * @throws java.lang.Exception */ @@ -68,12 +67,14 @@ public static void tearDownAfterClass() throws Exception { public void setUp() throws Exception { rdfGen = JSONRDFGenerator.getInstance(); - //Add the models in + // Add the models in R2RMLMappingIdentifier modelIdentifier = new R2RMLMappingIdentifier( - "people-model", new File(getTestDataFolder() + "/people-model.ttl").toURI().toURL()); + "people-model", new File(getTestDataFolder() + + "/people-model.ttl").toURI().toURL()); rdfGen.addModel(modelIdentifier); - modelIdentifier = new R2RMLMappingIdentifier( - "cs548-events-model", new File(getTestDataFolder() + "/cs548-events-model.ttl").toURI().toURL()); + modelIdentifier = new R2RMLMappingIdentifier("cs548-events-model", + new File(getTestDataFolder() + "/cs548-events-model.ttl") + .toURI().toURL()); rdfGen.addModel(modelIdentifier); } @@ -85,59 +86,66 @@ public void tearDown() throws Exception { } /** - * Test method for {@link edu.isi.karma.rdf.JSONRDFGenerator#generateRDF(java.lang.String, java.lang.String, boolean, java.io.PrintWriter)}. + * Test method for + * {@link edu.isi.karma.rdf.JSONRDFGenerator#generateRDF(java.lang.String, java.lang.String, boolean, java.io.PrintWriter)} + * . */ @Test public void testGenerateRDF1() { try { - + String filename = getTestDataFolder() + "/people.json"; System.out.println("Load json file: " + filename); - String jsonData = EncodingDetector.getString(new File(filename), "utf-8"); - - String outputFilePath = getTestDataFolder() + "/people.rdf"; - System.out.println("Generate RDF: " + outputFilePath); - OutputStreamWriter fw = new OutputStreamWriter(new FileOutputStream(outputFilePath), "UTF-8"); - BufferedWriter bw = new BufferedWriter(fw); - PrintWriter pw = new PrintWriter(bw); - + String jsonData = EncodingDetector.getString(new File(filename), + "utf-8"); + + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + rdfGen.generateRDF("people-model", jsonData, true, pw); - String readRDF = EncodingDetector.getString(new File(outputFilePath), "utf-8"); - assertNotEquals(readRDF.length(), 0); - } catch(Exception e) { + String rdf = sw.toString(); + + assertNotEquals(rdf.length(), 0); + String[] lines = rdf.split("\n"); + assertEquals(102, lines.length); + } catch (Exception e) { fail("Execption: " + e.getMessage()); } } - + /** - * Test method for {@link edu.isi.karma.rdf.JSONRDFGenerator#generateRDF(java.lang.String, java.lang.String, boolean, java.io.PrintWriter)}. + * Test method for + * {@link edu.isi.karma.rdf.JSONRDFGenerator#generateRDF(java.lang.String, java.lang.String, boolean, java.io.PrintWriter)} + * . */ @Test public void testGenerateRDF2() { try { - + String filename = getTestDataFolder() + "/cs548-events.json"; System.out.println("Load json file: " + filename); - String jsonData = EncodingDetector.getString(new File(filename), "utf-8"); - - String outputFilePath = getTestDataFolder() + "/cs548-events.rdf"; - System.out.println("Generate RDF: " + outputFilePath); - OutputStreamWriter fw = new OutputStreamWriter(new FileOutputStream(outputFilePath), "UTF-8"); - BufferedWriter bw = new BufferedWriter(fw); - PrintWriter pw = new PrintWriter(bw); - + String jsonData = EncodingDetector.getString(new File(filename), + "utf-8"); + + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + rdfGen.generateRDF("cs548-events-model", jsonData, true, pw); - String readRDF = EncodingDetector.getString(new File(outputFilePath), "utf-8"); - assertNotEquals(readRDF.length(), 0); - } catch(Exception e) { + String rdf = sw.toString(); + + assertNotEquals(rdf.length(), 0); + String[] lines = rdf.split("\n"); + assertEquals(234, lines.length); + } catch (Exception e) { fail("Execption: " + e.getMessage()); } } private String getRootFolder() { - return getClass().getClassLoader().getResource(".").getPath() + "/../../"; + return getClass().getClassLoader().getResource(".").getPath() + + "/../../"; } - + private String getTestDataFolder() { return getRootFolder() + "src/test/karma-data"; }