diff --git a/silk-core/src/main/scala/org/silkframework/config/Port.scala b/silk-core/src/main/scala/org/silkframework/config/Port.scala index b3eeff9390..c76d56dee2 100644 --- a/silk-core/src/main/scala/org/silkframework/config/Port.scala +++ b/silk-core/src/main/scala/org/silkframework/config/Port.scala @@ -66,3 +66,7 @@ case class FixedNumberOfInputs(ports: Seq[Port]) extends InputPorts case class FlexibleNumberOfInputs(portDefinition: Port = FlexibleSchemaPort(), min: Int = 0, max: Option[Int] = None) extends InputPorts + +object InputPorts { + final val NoInputPorts = FixedNumberOfInputs(Seq.empty) +} \ No newline at end of file diff --git a/silk-core/src/main/scala/org/silkframework/config/SilkVocab.scala b/silk-core/src/main/scala/org/silkframework/config/SilkVocab.scala index d4b5521cc7..13c7063786 100644 --- a/silk-core/src/main/scala/org/silkframework/config/SilkVocab.scala +++ b/silk-core/src/main/scala/org/silkframework/config/SilkVocab.scala @@ -22,6 +22,9 @@ object SilkVocab { val RestTaskResultResponseBody: String = RestTaskResult + "/responseBody" + // Clear dataset + val ClearDatasetType: String = namespace + "ClearDatasetType" + // Empty table val EmptySchemaType: String = namespace + "EmptySchemaType" diff --git a/silk-core/src/main/scala/org/silkframework/dataset/DataSink.scala b/silk-core/src/main/scala/org/silkframework/dataset/DataSink.scala index c425dde598..df14aba8bb 100644 --- a/silk-core/src/main/scala/org/silkframework/dataset/DataSink.scala +++ b/silk-core/src/main/scala/org/silkframework/dataset/DataSink.scala @@ -11,5 +11,5 @@ trait DataSink extends CloseableDataset { * * @param force Forces the clearing of the dataset. E.g. even when "clear before execution" flag is not set. */ - def clear(force: Boolean = false)(implicit userContext: UserContext): Unit + def clear(force: Boolean = false)(implicit userContext: UserContext): Unit = { } } diff --git a/silk-core/src/main/scala/org/silkframework/dataset/DatasetSpec.scala b/silk-core/src/main/scala/org/silkframework/dataset/DatasetSpec.scala index 6717c89cea..c77f0fd6a6 100644 --- a/silk-core/src/main/scala/org/silkframework/dataset/DatasetSpec.scala +++ b/silk-core/src/main/scala/org/silkframework/dataset/DatasetSpec.scala @@ -78,7 +78,7 @@ case class DatasetSpec[+DatasetType <: Dataset](plugin: DatasetType, /** Datasets don't define input schemata, because any data can be written to them. */ override def inputPorts: InputPorts = { if(readOnly || characteristics.readOnly) { - FixedNumberOfInputs(Seq.empty) + InputPorts.NoInputPorts } else if(characteristics.supportsMultipleWrites) { FlexibleNumberOfInputs() } else { diff --git a/silk-core/src/main/scala/org/silkframework/dataset/DirtyTrackingFileDataSink.scala b/silk-core/src/main/scala/org/silkframework/dataset/DirtyTrackingFileDataSink.scala index f6ffb7d896..eca558249c 100644 --- a/silk-core/src/main/scala/org/silkframework/dataset/DirtyTrackingFileDataSink.scala +++ b/silk-core/src/main/scala/org/silkframework/dataset/DirtyTrackingFileDataSink.scala @@ -15,6 +15,11 @@ trait DirtyTrackingFileDataSink extends DataSink { DirtyTrackingFileDataSink.addUpdatedFile(resource.name) super.close() } + + abstract override def clear(force: Boolean)(implicit userContext: UserContext): Unit = { + DirtyTrackingFileDataSink.addUpdatedFile(resource.name) + super.clear(force) + } } object DirtyTrackingFileDataSink { diff --git a/silk-core/src/main/scala/org/silkframework/dataset/operations/ClearDatasetOperator.scala b/silk-core/src/main/scala/org/silkframework/dataset/operations/ClearDatasetOperator.scala new file mode 100644 index 0000000000..91b052d1ac --- /dev/null +++ b/silk-core/src/main/scala/org/silkframework/dataset/operations/ClearDatasetOperator.scala @@ -0,0 +1,35 @@ +package org.silkframework.dataset.operations + +import org.silkframework.config._ +import org.silkframework.entity.EntitySchema +import org.silkframework.execution.EmptyEntityHolder +import org.silkframework.execution.local.LocalEntities +import org.silkframework.runtime.plugin.annotations.Plugin + +@Plugin( + id = "clearDataset", + label = "Clear dataset", + description = + """Clears the dataset that is connected to the output of this operator.""" +) +case class ClearDatasetOperator() extends CustomTask { + + /** + * The input ports and their schemata. + */ + override def inputPorts: InputPorts = InputPorts.NoInputPorts + + /** + * The output port and it's schema. + * None, if this operator does not generate any output. + */ + override def outputPort: Option[Port] = Some(FixedSchemaPort(ClearDatasetOperator.clearDatasetSchema)) +} + +object ClearDatasetOperator { + private val clearDatasetSchema = EntitySchema(SilkVocab.ClearDatasetType, IndexedSeq.empty) + + case class ClearDatasetTable(task: Task[TaskSpec]) extends LocalEntities with EmptyEntityHolder { + override def entitySchema: EntitySchema = clearDatasetSchema + } +} \ No newline at end of file diff --git a/silk-core/src/main/scala/org/silkframework/dataset/operations/ClearDatasetOperatorLocalExecutor.scala b/silk-core/src/main/scala/org/silkframework/dataset/operations/ClearDatasetOperatorLocalExecutor.scala new file mode 100644 index 0000000000..7c94e27ed7 --- /dev/null +++ b/silk-core/src/main/scala/org/silkframework/dataset/operations/ClearDatasetOperatorLocalExecutor.scala @@ -0,0 +1,47 @@ +package org.silkframework.dataset.operations + +import org.silkframework.config.{Task, TaskSpec} +import org.silkframework.dataset.operations.ClearDatasetOperator.ClearDatasetTable +import org.silkframework.execution.local.{LocalEntities, LocalExecution, LocalExecutor} +import org.silkframework.execution.{ExecutionReport, ExecutionReportUpdater, ExecutorOutput, SimpleExecutionReport} +import org.silkframework.runtime.activity.ActivityContext +import org.silkframework.runtime.plugin.PluginContext + +/** Executes a clear dataset operator. */ +case class ClearDatasetOperatorLocalExecutor() extends LocalExecutor[ClearDatasetOperator] { + + override def execute(task: Task[ClearDatasetOperator], + inputs: Seq[LocalEntities], + output: ExecutorOutput, + execution: LocalExecution, + context: ActivityContext[ExecutionReport]) + (implicit pluginContext: PluginContext): Option[LocalEntities] = { + context.value.update(SimpleExecutionReport( + task = task, + summary = Seq.empty, + warnings = Seq.empty, + error = None, + isDone = true, + entityCount = 1, + operation = Some("generate clear instruction"), + operationDesc = "clear instruction generated" + )) + Some(ClearDatasetTable(task)) + } +} + +case class ClearDatasetOperatorExecutionReportUpdater(task: Task[TaskSpec], + context: ActivityContext[ExecutionReport]) extends ExecutionReportUpdater { + + override def operationLabel: Option[String] = Some("clear dataset") + + override def entityLabelSingle: String = "dataset" + override def entityLabelPlural: String = "datasets" + override def entityProcessVerb: String = "cleared" + + override def minEntitiesBetweenUpdates: Int = 1 + + override def additionalFields(): Seq[(String, String)] = Seq( + "Cleared dataset" -> task.fullLabel + ) +} \ No newline at end of file diff --git a/silk-core/src/main/scala/org/silkframework/execution/local/LocalDatasetExecutor.scala b/silk-core/src/main/scala/org/silkframework/execution/local/LocalDatasetExecutor.scala index b95ea673d9..d0604fff58 100644 --- a/silk-core/src/main/scala/org/silkframework/execution/local/LocalDatasetExecutor.scala +++ b/silk-core/src/main/scala/org/silkframework/execution/local/LocalDatasetExecutor.scala @@ -4,6 +4,8 @@ import org.silkframework.config.{Prefixes, Task, TaskSpec} import org.silkframework.dataset.CloseableDataset.using import org.silkframework.dataset.DatasetSpec.{EntitySinkWrapper, GenericDatasetSpec} import org.silkframework.dataset._ +import org.silkframework.dataset.operations.ClearDatasetOperator.ClearDatasetTable +import org.silkframework.dataset.operations.ClearDatasetOperatorExecutionReportUpdater import org.silkframework.dataset.bulk.{BulkResourceBasedDataset, ZipWritableResource} import org.silkframework.dataset.rdf._ import org.silkframework.dataset.sql.SqlDataset @@ -161,6 +163,8 @@ abstract class LocalDatasetExecutor[DatasetType <: Dataset] extends DatasetExecu uploadFilesViaGraphStore(dataset, files.typedEntities, UploadFilesViaGspReportUpdater(dataset, context)) case SparqlUpdateEntitySchema(queries) => executeSparqlUpdateQueries(dataset, queries, execution) + case _: ClearDatasetTable => + executeClearDataset(dataset) case SqlUpdateEntitySchema(queries) => executeSqlStatement(dataset, queries, execution) case et: LocalEntities => @@ -263,6 +267,17 @@ abstract class LocalDatasetExecutor[DatasetType <: Dataset] extends DatasetExecu } } + private def executeClearDataset(dataset: Task[DatasetSpec[DatasetType]]) + (implicit userContext: UserContext, context: ActivityContext[ExecutionReport]): Unit = { + if(dataset.readOnly) { + throw new RuntimeException(s"Cannot clear dataset '${dataset.fullLabel}', because it is configured as read-only.") + } + val executionReport = ClearDatasetOperatorExecutionReportUpdater(dataset, context) + dataset.entitySink.clear(force = true) + executionReport.increaseEntityCounter() + executionReport.executionDone() + } + /** Buffers queries to make prediction about how many queries will be executed. * * @param bufferSize max size of queries that should be buffered diff --git a/silk-core/src/main/scala/org/silkframework/plugins/CorePlugins.scala b/silk-core/src/main/scala/org/silkframework/plugins/CorePlugins.scala index e14c21b5ff..f70421f06d 100644 --- a/silk-core/src/main/scala/org/silkframework/plugins/CorePlugins.scala +++ b/silk-core/src/main/scala/org/silkframework/plugins/CorePlugins.scala @@ -19,6 +19,7 @@ import org.silkframework.config.Task.GenericTaskFormat import org.silkframework.config.TaskSpec.TaskSpecXmlFormat import org.silkframework.dataset.DatasetSpec.{DatasetSpecFormat, DatasetTaskXmlFormat} import org.silkframework.dataset.VariableDataset +import org.silkframework.dataset.operations.{ClearDatasetOperator, ClearDatasetOperatorLocalExecutor} import org.silkframework.dataset.operations.{AddProjectFilesOperator, DeleteFilesOperator, GetProjectFilesOperator, LocalAddProjectFilesOperatorExecutor, LocalDeleteFilesOperatorExecutor, LocalGetProjectFilesOperatorExecutor} import org.silkframework.entity.EntitySchema.EntitySchemaFormat import org.silkframework.entity.ValueType @@ -49,6 +50,8 @@ class CorePlugins extends PluginModule { classOf[LocalDeleteFilesOperatorExecutor] :: classOf[GetProjectFilesOperator] :: classOf[LocalGetProjectFilesOperatorExecutor] :: + classOf[ClearDatasetOperator] :: + classOf[ClearDatasetOperatorLocalExecutor] :: Nil } diff --git a/silk-plugins/silk-plugins-csv/src/main/scala/org/silkframework/plugins/dataset/csv/CsvDataset.scala b/silk-plugins/silk-plugins-csv/src/main/scala/org/silkframework/plugins/dataset/csv/CsvDataset.scala index eff5202c83..70301b02a4 100644 --- a/silk-plugins/silk-plugins-csv/src/main/scala/org/silkframework/plugins/dataset/csv/CsvDataset.scala +++ b/silk-plugins/silk-plugins-csv/src/main/scala/org/silkframework/plugins/dataset/csv/CsvDataset.scala @@ -45,8 +45,8 @@ case class CsvDataset ( quoteEscapeCharacter: String = "\"", @Param(label = "ZIP file regex", value = "If the input resource is a ZIP file, files inside the file are filtered via this regex.", advanced = true) override val zipFileRegex: String = CsvDataset.defaultZipFileRegex, - @Param(label = "Delete file before workflow execution", - value = "If set to true this will clear the specified file before executing a workflow that writes to it.", + @Param(label = "Delete file before workflow execution (deprecated)", + value = "This is deprecated, use the 'Clear dataset' operator instead to clear a dataset in a workflow. If set to true this will clear the specified file before executing a workflow that writes to it.", advanced = true) clearBeforeExecution: Boolean = false, @Param(label = "Trim whitespace and non-printable characters.", diff --git a/silk-plugins/silk-plugins-csv/src/main/scala/org/silkframework/plugins/dataset/csv/CsvSink.scala b/silk-plugins/silk-plugins-csv/src/main/scala/org/silkframework/plugins/dataset/csv/CsvSink.scala index 7353bd8327..b10d4cd17b 100644 --- a/silk-plugins/silk-plugins-csv/src/main/scala/org/silkframework/plugins/dataset/csv/CsvSink.scala +++ b/silk-plugins/silk-plugins-csv/src/main/scala/org/silkframework/plugins/dataset/csv/CsvSink.scala @@ -9,7 +9,7 @@ import org.silkframework.util.Uri import java.io.{File, IOException} import java.util.logging.Logger -class CsvSink(val resource: WritableResource, settings: CsvSettings) extends DataSink with DirtyTrackingFileDataSink { +class CsvSink(val resource: WritableResource, settings: CsvSettings) extends DirtyTrackingFileDataSink { private val log: Logger = Logger.getLogger(getClass.getName) @volatile @@ -56,6 +56,7 @@ class CsvSink(val resource: WritableResource, settings: CsvSettings) extends Dat case e: IOException => log.warning("IO exception occurred when deleting CRC file: " + e.getMessage) } + super.clear(force) } } } diff --git a/silk-plugins/silk-plugins-json/src/main/scala/org/silkframework/plugins/dataset/json/JsonSink.scala b/silk-plugins/silk-plugins-json/src/main/scala/org/silkframework/plugins/dataset/json/JsonSink.scala index 3c5a4fece5..ffc8f1df4c 100644 --- a/silk-plugins/silk-plugins-json/src/main/scala/org/silkframework/plugins/dataset/json/JsonSink.scala +++ b/silk-plugins/silk-plugins-json/src/main/scala/org/silkframework/plugins/dataset/json/JsonSink.scala @@ -27,6 +27,7 @@ class JsonSink (val resource: WritableResource, */ override def clear(force: Boolean = false)(implicit userContext: UserContext): Unit = { resource.delete() + super.clear(force) } } diff --git a/silk-plugins/silk-plugins-json/src/main/scala/org/silkframework/plugins/dataset/json/JsonSourceStreaming.scala b/silk-plugins/silk-plugins-json/src/main/scala/org/silkframework/plugins/dataset/json/JsonSourceStreaming.scala index 3f23d3b649..f1e267a4b5 100644 --- a/silk-plugins/silk-plugins-json/src/main/scala/org/silkframework/plugins/dataset/json/JsonSourceStreaming.scala +++ b/silk-plugins/silk-plugins-json/src/main/scala/org/silkframework/plugins/dataset/json/JsonSourceStreaming.scala @@ -20,7 +20,11 @@ class JsonSourceStreaming(taskId: Identifier, resource: Resource, basePath: Stri protected def createParser(): JsonParser = { val factory = new JsonFactoryBuilder().configure(StreamReadFeature.AUTO_CLOSE_SOURCE, true).build() - factory.createParser(resource.inputStream) + if(resource.exists) { + factory.createParser(resource.inputStream) + } else { + factory.createParser("{}") + } } override def retrieve(entitySchema: EntitySchema, limit: Option[Int])(implicit context: PluginContext): EntityHolder = { diff --git a/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/InMemoryDataset.scala b/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/InMemoryDataset.scala index 7bc7529a0c..96597fce64 100644 --- a/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/InMemoryDataset.scala +++ b/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/InMemoryDataset.scala @@ -14,9 +14,10 @@ import org.silkframework.runtime.plugin.annotations.{Param, Plugin} categories = Array(DatasetCategories.embedded), description = "A Dataset that holds all data in-memory." ) -case class InMemoryDataset(@Param(label = "Clear graph before workflow execution", - value = "If set to true this will clear this dataset before it is used in a workflow execution.") - clearGraphBeforeExecution: Boolean = true) extends RdfDataset with TripleSinkDataset { +case class InMemoryDataset(@Param(label = "Clear graph before workflow execution (deprecated)", + value = "This is deprecated, use the 'Clear dataset' operator instead to clear a dataset in a workflow. If set to true this will clear this dataset before it is used in a workflow execution.", + advanced = true) + clearGraphBeforeExecution: Boolean = false) extends RdfDataset with TripleSinkDataset { private val model = ModelFactory.createDefaultModel() diff --git a/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/RdfInMemoryDataset.scala b/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/RdfInMemoryDataset.scala index 675f55f81b..522551dd5d 100644 --- a/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/RdfInMemoryDataset.scala +++ b/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/RdfInMemoryDataset.scala @@ -20,9 +20,10 @@ import org.silkframework.runtime.plugin.annotations.{Param, Plugin} @Plugin(id = "rdf", label = "RDF in-memory", description = "A Dataset where all entities are given directly in the configuration.") case class RdfInMemoryDataset(data: String, format: String, - @Param(label = "Clear graph before workflow execution", - value = "If set to true this will clear the specified graph before executing a workflow that writes to it.") - clearBeforeExecution: Boolean = true) extends RdfDataset with TripleSinkDataset { + @Param(label = "Clear graph before workflow execution (deprecated)", + value = "This is deprecated, use the 'Clear dataset' operator instead to clear a dataset in a workflow. If set to true this will clear the specified graph before executing a workflow that writes to it.", + advanced = true) + clearBeforeExecution: Boolean = false) extends RdfDataset with TripleSinkDataset { private lazy val model = ModelFactory.createDefaultModel model.read(new StringReader(data), null, format) diff --git a/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/SparqlDataset.scala b/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/SparqlDataset.scala index c59fb42990..6606f8011d 100644 --- a/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/SparqlDataset.scala +++ b/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/SparqlDataset.scala @@ -44,8 +44,9 @@ case class SparqlDataset( strategy: EntityRetrieverStrategy = EntityRetrieverStrategy.parallel, @Param("Enforces the correct ordering of values, if set to `true` (default).") useOrderBy: Boolean = true, - @Param(label = "Clear graph before workflow execution", - value = "If set to `true`, this will clear the specified graph before executing a workflow that writes into it.") + @Param(label = "Clear graph before workflow execution (deprecated)", + value = "This is deprecated, use the 'Clear dataset' operator instead to clear a dataset in a workflow. If set to `true`, this will clear the specified graph before executing a workflow that writes into it.", + advanced = true) clearGraphBeforeExecution: Boolean = false, @Param( label = "SPARQL query timeout (ms)", diff --git a/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/tasks/SparqlUpdateCustomTask.scala b/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/tasks/SparqlUpdateCustomTask.scala index 8fca133bcb..9207817944 100644 --- a/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/tasks/SparqlUpdateCustomTask.scala +++ b/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/tasks/SparqlUpdateCustomTask.scala @@ -57,7 +57,7 @@ case class SparqlUpdateCustomTask( override def inputPorts: InputPorts = { if(isStaticTemplate) { - FixedNumberOfInputs(Seq.empty) + InputPorts.NoInputPorts } else { FixedNumberOfInputs(Seq(FixedSchemaPort(expectedInputSchema))) } diff --git a/silk-plugins/silk-plugins-rdf/src/test/scala/org/silkframework/plugins/dataset/rdf/LocalSparqlUpdateExecutorTest.scala b/silk-plugins/silk-plugins-rdf/src/test/scala/org/silkframework/plugins/dataset/rdf/LocalSparqlUpdateExecutorTest.scala index 0529a6589f..aa9477b361 100644 --- a/silk-plugins/silk-plugins-rdf/src/test/scala/org/silkframework/plugins/dataset/rdf/LocalSparqlUpdateExecutorTest.scala +++ b/silk-plugins/silk-plugins-rdf/src/test/scala/org/silkframework/plugins/dataset/rdf/LocalSparqlUpdateExecutorTest.scala @@ -125,7 +125,7 @@ class LocalSparqlUpdateExecutorTest extends AnyFlatSpec with Matchers with TestW class DummyTaskSpec(params: Map[String, String]) extends CustomTask { - override def inputPorts: InputPorts = FixedNumberOfInputs(Seq.empty) + override def inputPorts: InputPorts = InputPorts.NoInputPorts override def outputPort: Option[Port] = None diff --git a/silk-plugins/silk-plugins-xml/src/main/scala/org/silkframework/plugins/dataset/xml/XmlSink.scala b/silk-plugins/silk-plugins-xml/src/main/scala/org/silkframework/plugins/dataset/xml/XmlSink.scala index 0840462265..589f4c97bc 100644 --- a/silk-plugins/silk-plugins-xml/src/main/scala/org/silkframework/plugins/dataset/xml/XmlSink.scala +++ b/silk-plugins/silk-plugins-xml/src/main/scala/org/silkframework/plugins/dataset/xml/XmlSink.scala @@ -29,5 +29,6 @@ class XmlSink(val resource: WritableResource, */ override def clear(force: Boolean = false)(implicit userContext: UserContext): Unit = { resource.delete() + super.clear(force) } } \ No newline at end of file diff --git a/silk-plugins/silk-plugins-xml/src/main/scala/org/silkframework/plugins/dataset/xml/XmlSourceStreaming.scala b/silk-plugins/silk-plugins-xml/src/main/scala/org/silkframework/plugins/dataset/xml/XmlSourceStreaming.scala index e49fefd78b..02c65c12da 100644 --- a/silk-plugins/silk-plugins-xml/src/main/scala/org/silkframework/plugins/dataset/xml/XmlSourceStreaming.scala +++ b/silk-plugins/silk-plugins-xml/src/main/scala/org/silkframework/plugins/dataset/xml/XmlSourceStreaming.scala @@ -529,6 +529,9 @@ class XmlSourceStreaming(file: Resource, basePath: String, uriPattern: String) e paths.put(Nil, 0) val idx = new AtomicInteger(1) var currentPath = List[String]() + if(!file.exists) { + return Seq.empty + } val inputStream = file.inputStream try { val reader: XMLStreamReader = initStreamReader(inputStream) diff --git a/silk-workbench/silk-workbench-core/test/controllers/core/PluginApiTest.scala b/silk-workbench/silk-workbench-core/test/controllers/core/PluginApiTest.scala index ce9354c82f..1e2c2efc0b 100644 --- a/silk-workbench/silk-workbench-core/test/controllers/core/PluginApiTest.scala +++ b/silk-workbench/silk-workbench-core/test/controllers/core/PluginApiTest.scala @@ -96,7 +96,7 @@ case class AutoCompletableTestPlugin(@Param(value = "Some param", autoCompletion autoCompleteValueWithLabels = true, allowOnlyAutoCompletedValues = true, autoCompletionDependsOnParameters = Array("otherParam")) completableParam: String, otherParam: String) extends CustomTask { - override def inputPorts: InputPorts = FixedNumberOfInputs(Seq.empty) + override def inputPorts: InputPorts = InputPorts.NoInputPorts override def outputPort: Option[Port] = None } diff --git a/silk-workbench/silk-workbench-workflow/test/controllers/workflowApi/WorkflowApiTest.scala b/silk-workbench/silk-workbench-workflow/test/controllers/workflowApi/WorkflowApiTest.scala index 66933cf9ed..8c4c7072d8 100644 --- a/silk-workbench/silk-workbench-workflow/test/controllers/workflowApi/WorkflowApiTest.scala +++ b/silk-workbench/silk-workbench-workflow/test/controllers/workflowApi/WorkflowApiTest.scala @@ -146,7 +146,7 @@ object BlockingTask { /** Task that blocks until externally released. */ case class BlockingTask() extends CustomTask { - override def inputPorts: InputPorts = FixedNumberOfInputs(Seq.empty) + override def inputPorts: InputPorts = InputPorts.NoInputPorts override def outputPort: Option[Port] = None } diff --git a/silk-workbench/silk-workbench-workspace/app/controllers/datasetApi/DatasetApi.scala b/silk-workbench/silk-workbench-workspace/app/controllers/datasetApi/DatasetApi.scala index f04d077841..ec60b5badf 100644 --- a/silk-workbench/silk-workbench-workspace/app/controllers/datasetApi/DatasetApi.scala +++ b/silk-workbench/silk-workbench-workspace/app/controllers/datasetApi/DatasetApi.scala @@ -75,6 +75,10 @@ class DatasetApi @Inject() () extends InjectedController with UserContextActions new ApiResponse( responseCode = "404", description = "If the specified project or dataset has not been found." + ), + new ApiResponse( + responseCode = "409", + description = "If the dataset is currently configured as read-only. The user needs to change the config before trying again." ) ) ) @@ -101,7 +105,7 @@ class DatasetApi @Inject() () extends InjectedController with UserContextActions val sink = dataset.data.entitySink sink.clear(force = true) val typeCache = dataset.activity[TypesCache].control - // This will throw an exception if the previous cache execution as failed. + // This will throw an exception if the previous cache execution has failed. Try(typeCache.waitUntilFinished()) Try(typeCache.start()) NoContent diff --git a/silk-workbench/silk-workbench-workspace/app/controllers/workspace/DatasetApi.scala b/silk-workbench/silk-workbench-workspace/app/controllers/workspace/DatasetApi.scala index 85f48d48ad..66edb9e4d3 100644 --- a/silk-workbench/silk-workbench-workspace/app/controllers/workspace/DatasetApi.scala +++ b/silk-workbench/silk-workbench-workspace/app/controllers/workspace/DatasetApi.scala @@ -26,7 +26,7 @@ import org.silkframework.rule.TransformSpec import org.silkframework.runtime.activity.UserContext import org.silkframework.runtime.plugin.{ParameterValues, PluginContext} import org.silkframework.runtime.serialization.ReadContext -import org.silkframework.runtime.validation.{BadUserInputException, RequestException} +import org.silkframework.runtime.validation.{BadUserInputException, ConflictRequestException, RequestException} import org.silkframework.util.Uri import org.silkframework.workbench.Context import org.silkframework.workbench.utils.ErrorResult diff --git a/silk-workbench/silk-workbench-workspace/test/controllers/workspace/ActivityApiTest.scala b/silk-workbench/silk-workbench-workspace/test/controllers/workspace/ActivityApiTest.scala index 826d8e3b5a..c32e84daf1 100644 --- a/silk-workbench/silk-workbench-workspace/test/controllers/workspace/ActivityApiTest.scala +++ b/silk-workbench/silk-workbench-workspace/test/controllers/workspace/ActivityApiTest.scala @@ -178,7 +178,7 @@ class ActivityApiTest extends PlaySpec with ConfigTestTrait with IntegrationTest } case class MessageTask(message: String) extends CustomTask { - override def inputPorts: InputPorts = FixedNumberOfInputs(Seq.empty) + override def inputPorts: InputPorts = InputPorts.NoInputPorts override def outputPort: Option[Port] = None } diff --git a/silk-workbench/silk-workbench-workspace/test/controllers/workspace/WorkflowExecutionIntegrationTest.scala b/silk-workbench/silk-workbench-workspace/test/controllers/workspace/WorkflowExecutionIntegrationTest.scala index 9f0c693808..1d695510e2 100644 --- a/silk-workbench/silk-workbench-workspace/test/controllers/workspace/WorkflowExecutionIntegrationTest.scala +++ b/silk-workbench/silk-workbench-workspace/test/controllers/workspace/WorkflowExecutionIntegrationTest.scala @@ -55,7 +55,7 @@ object CountingTask { } /** Task that counts its executions. */ case class CountingTask() extends CustomTask { - override def inputPorts: InputPorts = FixedNumberOfInputs(Seq.empty) + override def inputPorts: InputPorts = InputPorts.NoInputPorts override def outputPort: Option[Port] = None } diff --git a/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceProviderTestTrait.scala b/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceProviderTestTrait.scala index 2eff36d67b..56fc7118c8 100644 --- a/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceProviderTestTrait.scala +++ b/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceProviderTestTrait.scala @@ -759,7 +759,7 @@ trait WorkspaceProviderTestTrait extends AnyFlatSpec with Matchers with MockitoS @Plugin(id = "WorkspaceProviderTestTask", label = "test task") case class TestCustomTask(stringParam: String, numberParam: Int) extends CustomTask { - override def inputPorts: InputPorts = FixedNumberOfInputs(Seq.empty) + override def inputPorts: InputPorts = InputPorts.NoInputPorts override def outputPort: Option[Port] = None } @@ -776,7 +776,7 @@ object WorkspaceProviderTestPlugins { throw new FailingTaskException("Failed!") } - override def inputPorts: InputPorts = FixedNumberOfInputs(Seq.empty) + override def inputPorts: InputPorts = InputPorts.NoInputPorts override def outputPort: Option[Port] = None } diff --git a/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceTest.scala b/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceTest.scala index ff0551343e..bd76853466 100644 --- a/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceTest.scala +++ b/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceTest.scala @@ -182,7 +182,7 @@ object WorkspaceTest { } case class TestTask(testParam: String = "test value") extends CustomTask { - override def inputPorts: InputPorts = FixedNumberOfInputs(Seq.empty) + override def inputPorts: InputPorts = InputPorts.NoInputPorts override def outputPort: Option[Port] = None } diff --git a/workspace/src/app/views/pages/MappingEditor/HierarchicalMapping/store.ts b/workspace/src/app/views/pages/MappingEditor/HierarchicalMapping/store.ts index 11cec3ecf5..dc18493ac5 100644 --- a/workspace/src/app/views/pages/MappingEditor/HierarchicalMapping/store.ts +++ b/workspace/src/app/views/pages/MappingEditor/HierarchicalMapping/store.ts @@ -142,7 +142,7 @@ const handleCreatedSelectBoxValue = (data, path): any => { return _.get(data, [path]); }; -export interface IMetaData { +interface IMetaData { // A human-readable label label: string; // An optional description diff --git a/workspace/src/app/views/shared/modals/DeleteModal.tsx b/workspace/src/app/views/shared/modals/DeleteModal.tsx index d073b69439..c1a3800ddc 100644 --- a/workspace/src/app/views/shared/modals/DeleteModal.tsx +++ b/workspace/src/app/views/shared/modals/DeleteModal.tsx @@ -32,6 +32,8 @@ export interface IDeleteModalOptions extends TestableComponent { deleteDisabled?: boolean; alternativeCancelButtonLabel?: string; notifications?: React.ReactNode | React.ReactNode[]; + /** An alternative button text than the 'Delete' text. */ + alternativeDeleteButtonText?: string; } export default function DeleteModal({ @@ -47,6 +49,7 @@ export default function DeleteModal({ submitOnEnter = true, deleteDisabled, alternativeCancelButtonLabel, + alternativeDeleteButtonText, ...otherProps }: IDeleteModalOptions) { const [isConfirmed, setIsConfirmed] = useState(false); @@ -85,7 +88,7 @@ export default function DeleteModal({ disabled={(confirmationRequired && !isConfirmed) || deleteDisabled} data-test-id={"remove-item-button"} > - {t("common.action.delete", "Delete")} + {alternativeDeleteButtonText ?? t("common.action.delete", "Delete")} ,