diff --git a/core/src/main/java/org/polypheny/db/adapter/DataSource.java b/core/src/main/java/org/polypheny/db/adapter/DataSource.java index 7903413587..bb8982433c 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataSource.java @@ -18,68 +18,37 @@ import com.google.gson.JsonObject; import com.google.gson.JsonSerializer; +import java.util.HashSet; import java.util.List; import java.util.Map; -import lombok.AllArgsConstructor; +import java.util.Set; import lombok.Getter; import org.pf4j.ExtensionPoint; import org.polypheny.db.catalog.catalogs.AdapterCatalog; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; -import org.polypheny.db.type.PolyType; +import org.polypheny.db.catalog.logistic.DataModel; @Getter public abstract class DataSource extends Adapter implements ExtensionPoint { private final boolean dataReadOnly; + private Set supportedDataModels; - protected DataSource( final long adapterId, final String uniqueName, final Map settings, boolean dataReadOnly, S catalog ) { + protected DataSource( final long adapterId, final String uniqueName, final Map settings, boolean dataReadOnly, S catalog, Set supportedDataModels ) { super( adapterId, uniqueName, settings, catalog ); this.dataReadOnly = dataReadOnly; - + this.supportedDataModels = supportedDataModels; informationPage.setLabel( "Sources" ); + } - public abstract Map> getExportedColumns(); - - - @AllArgsConstructor - public static class ExportedColumn { - - public final String name; - public final PolyType type; - public final PolyType collectionsType; - public final Integer length; - public final Integer scale; - public final Integer dimension; - public final Integer cardinality; - public final boolean nullable; - public final String physicalSchemaName; - public final String physicalTableName; - public final String physicalColumnName; - public final int physicalPosition; - public final boolean primary; - - - public String getDisplayType() { - String typeStr = type.getName(); - if ( scale != null ) { - typeStr += "(" + length + "," + scale + ")"; - } else if ( length != null ) { - typeStr += "(" + length + ")"; - } - - if ( collectionsType != null ) { - typeStr += " " + collectionsType.getName(); - if ( cardinality != null ) { - typeStr += "(" + dimension + "," + cardinality + ")"; - } else if ( dimension != null ) { - typeStr += "(" + dimension + ")"; - } - } - return typeStr; - } + protected DataSource( final long adapterId, final String uniqueName, final Map settings, boolean dataReadOnly, S catalog ) { + super( adapterId, uniqueName, settings, catalog ); + this.dataReadOnly = dataReadOnly; + this.supportedDataModels = new HashSet<>( List.of( DataModel.getDefault() ) ); + informationPage.setLabel( "Sources" ); } @@ -104,4 +73,5 @@ private AdapterType getAdapterType() { return AdapterType.SOURCE; } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/DocumentDataSource.java b/core/src/main/java/org/polypheny/db/adapter/DocumentDataSource.java new file mode 100644 index 0000000000..350b5cf376 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/DocumentDataSource.java @@ -0,0 +1,38 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter; + +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.polypheny.db.catalog.logistic.EntityType; + +public interface DocumentDataSource { + + List getExportedCollection(); + + @AllArgsConstructor + @Getter + class ExportedDocument { + + private final String name; + private final boolean isModifyable; + private final EntityType type; + + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/RelationalDataSource.java b/core/src/main/java/org/polypheny/db/adapter/RelationalDataSource.java new file mode 100644 index 0000000000..dd9ab3ee8f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/RelationalDataSource.java @@ -0,0 +1,67 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter; + +import java.util.List; +import java.util.Map; +import lombok.AllArgsConstructor; +import org.polypheny.db.type.PolyType; + +public interface RelationalDataSource { + + Map> getExportedColumns(); + + @AllArgsConstructor + class ExportedColumn { + + public final String name; + public final PolyType type; + public final PolyType collectionsType; + public final Integer length; + public final Integer scale; + public final Integer dimension; + public final Integer cardinality; + public final boolean nullable; + public final String physicalSchemaName; + public final String physicalTableName; + public final String physicalColumnName; + public final int physicalPosition; + public final boolean primary; + + + public String getDisplayType() { + String typeStr = type.getName(); + if ( scale != null ) { + typeStr += "(" + length + "," + scale + ")"; + } else if ( length != null ) { + typeStr += "(" + length + ")"; + } + + if ( collectionsType != null ) { + typeStr += " " + collectionsType.getName(); + if ( cardinality != null ) { + typeStr += "(" + dimension + "," + cardinality + ")"; + } else if ( dimension != null ) { + typeStr += "(" + dimension + ")"; + } + } + return typeStr; + } + + } + +} diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 2dfbacbf67..69785f3aac 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -29,6 +29,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.BiFunction; import java.util.function.Supplier; import java.util.stream.Collectors; import javax.annotation.Nullable; @@ -38,6 +39,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DataSource.ExportedColumn; +import org.polypheny.db.adapter.DataSource.ExportedDocument; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.IndexMethodModel; import org.polypheny.db.adapter.DeployMode; @@ -70,6 +72,7 @@ import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalForeignKey; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalIndex; @@ -216,7 +219,44 @@ public void createStore( String uniqueName, String adapterName, AdapterType adap public void createSource( String uniqueName, String adapterName, long namespace, AdapterType adapterType, Map config, DeployMode mode ) { uniqueName = uniqueName.toLowerCase(); DataSource adapter = (DataSource) AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config ); + if ( adapter.getSupportedDataModels().contains( DataModel.RELATIONAL ) ) { + createRelationalSource( adapter, namespace ); + } + if ( adapter.getSupportedDataModels().contains( DataModel.DOCUMENT ) ) { + createDocumentSource( adapter, namespace ); + } + if ( adapter.getSupportedDataModels().contains( DataModel.GRAPH ) ) { + // TODO: implement graph source creation + throw new IllegalArgumentException( "Adapters with native data model graph are not yet supported!" ); + } + catalog.updateSnapshot(); + } + + + private void createDocumentSource( DataSource adapter, long namespace ) { + List exportedCollections; + try { + exportedCollections = adapter.getExportedCollection(); + } catch ( Exception e ) { + AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() ); + throw new GenericRuntimeException( "Could not deploy adapter", e ); + } + + for ( ExportedDocument exportedDocument : exportedCollections ) { + String documentName = getUniqueEntityName( namespace, exportedDocument.getName(), ( ns, en ) -> catalog.getSnapshot().doc().getCollection( ns, en ) ); + LogicalCollection logical = catalog.getLogicalDoc( namespace ).addCollection( documentName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); + + LogicalCollection logicalCollection = catalog.getLogicalDoc( namespace ).addCollection( exportedDocument.getName(), exportedDocument.getType(), exportedDocument.isModifyable() ); + AllocationCollection allocationCollection = catalog.getAllocDoc( namespace ).addAllocation( logicalCollection, logical.getId(), 0, adapter.getAdapterId() ); + buildNamespace( Catalog.defaultNamespaceId, logical, adapter ); + adapter.createCollection( null, logical, allocationCollection ); + catalog.updateSnapshot(); + } + } + + + private void createRelationalSource( DataSource adapter, long namespace ) { Map> exportedColumns; try { exportedColumns = adapter.getExportedColumns(); @@ -227,14 +267,7 @@ public void createSource( String uniqueName, String adapterName, long namespace, // Create table, columns etc. for ( Map.Entry> entry : exportedColumns.entrySet() ) { // Make sure the table name is unique - String tableName = entry.getKey(); - if ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent() ) { - int i = 0; - while ( catalog.getSnapshot().rel().getTable( namespace, tableName + i ).isPresent() ) { - i++; - } - tableName += i; - } + String tableName = getUniqueEntityName( namespace, entry.getKey(), ( ns, en ) -> catalog.getSnapshot().rel().getTable( ns, en ) ); LogicalTable logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); List columns = new ArrayList<>(); @@ -275,10 +308,16 @@ public void createSource( String uniqueName, String adapterName, long namespace, buildNamespace( Catalog.defaultNamespaceId, logical, adapter ); adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of() ), AllocationTableWrapper.of( allocation.unwrap( AllocationTable.class ).orElseThrow(), aColumns ) ); catalog.updateSnapshot(); - } - catalog.updateSnapshot(); + } + + private String getUniqueEntityName( Long namespace, String name, BiFunction> retriever ) { + int enumerator = 0; + while ( retriever.apply( namespace, name + enumerator ).isPresent() ) { + enumerator++; + } + return name + enumerator; } @@ -2118,7 +2157,7 @@ private static List sortByPosition( List columns ) } - private void buildNamespace( long namespaceId, LogicalTable logical, Adapter store ) { + private void buildNamespace( long namespaceId, LogicalEntity logical, Adapter store ) { store.updateNamespace( logical.getNamespaceName(), namespaceId ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 5328bad85d..a96dfdc1eb 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -35,6 +35,8 @@ import org.polypheny.db.adapter.ConnectionMethod; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingDirectory; @@ -72,7 +74,7 @@ @AdapterSettingString(subOf = "method_link", defaultValue = "classpath://hr", name = "directoryName", description = "You can select a path to a folder or specific .csv or .csv.gz files.", position = 2) @AdapterSettingInteger(name = "maxStringLength", defaultValue = 255, position = 3, description = "Which length (number of characters including whitespace) should be used for the varchar columns. Make sure this is equal or larger than the longest string in any of the columns.") -public class CsvSource extends DataSource { +public class CsvSource extends DataSource implements RelationalDataSource { private static final Logger log = LoggerFactory.getLogger( CsvSource.class ); @Delegate(excludes = Excludes.class) diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index 39e1011dd2..0b0ba6d016 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -31,6 +31,8 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingBoolean; @@ -91,7 +93,7 @@ public void stop() { @AdapterSettingString(name = "ClientUrl", description = "The URL of the ethereum JSON RPC client", defaultValue = "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", position = 1) @AdapterSettingInteger(name = "Blocks", description = "The number of Blocks to fetch when processing a query", defaultValue = 10, position = 2, modifiable = true) @AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true) - public static class EthereumDataSource extends DataSource { + public static class EthereumDataSource extends DataSource implements RelationalDataSource { @Delegate(excludes = Excludes.class) private final RelationalScanDelegate delegate; @@ -305,4 +307,4 @@ private interface Excludes { } -} \ No newline at end of file +} diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index 583d47a49d..e3bc73a60a 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -41,6 +41,8 @@ import org.polypheny.db.adapter.ConnectionMethod; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingDirectory; @@ -76,7 +78,7 @@ @AdapterSettingString(name = "sheetName", description = "default to read the first sheet", defaultValue = "", required = false) @AdapterSettingInteger(name = "maxStringLength", defaultValue = 255, position = 2, description = "Which length (number of characters including whitespace) should be used for the varchar columns. Make sure this is equal or larger than the longest string in any of the columns.") -public class ExcelSource extends DataSource { +public class ExcelSource extends DataSource implements RelationalDataSource { @Delegate(excludes = Excludes.class) private final RelationalScanDelegate delegate; diff --git a/plugins/google-sheet-adapter/src/main/java/org/polypheny/db/adapter/googlesheet/GoogleSheetSource.java b/plugins/google-sheet-adapter/src/main/java/org/polypheny/db/adapter/googlesheet/GoogleSheetSource.java index ad96fd7a7f..51bad40d94 100644 --- a/plugins/google-sheet-adapter/src/main/java/org/polypheny/db/adapter/googlesheet/GoogleSheetSource.java +++ b/plugins/google-sheet-adapter/src/main/java/org/polypheny/db/adapter/googlesheet/GoogleSheetSource.java @@ -47,6 +47,8 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingBoolean; @@ -84,7 +86,7 @@ @AdapterSettingString(name = "oAuth-Client-ID", description = "Authentication credentials used for GoogleSheets API. Not the account credentials.", defaultValue = "", position = 5) @AdapterSettingString(name = "oAuth-Client-Key", description = "Authentication credentials used for GoogleSheets API. Not the account credentials.", defaultValue = "") @AdapterSettingString(name = "sheetName", description = "Name of sheet to use.", defaultValue = "") -public class GoogleSheetSource extends DataSource { +public class GoogleSheetSource extends DataSource implements RelationalDataSource { @Delegate(excludes = Excludes.class) private final RelationalScanDelegate delegate; diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java index 7b1fb9126b..06f31a071a 100644 --- a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java @@ -20,93 +20,28 @@ import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; import java.net.URL; -import java.util.HashMap; +import java.util.LinkedList; import java.util.List; -import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Collectors; -import org.polypheny.db.adapter.DataSource.ExportedColumn; -import org.polypheny.db.type.PolyType; +import org.polypheny.db.adapter.DocumentDataSource.ExportedDocument; +import org.polypheny.db.catalog.logistic.EntityType; public class JsonMetaRetriever { - public static Map> getFields( URL jsonFile, String physicalCollectionName ) throws IOException { + public static List getDocuments( URL jsonFile ) throws IOException { ObjectMapper objectMapper = new ObjectMapper(); JsonNode rootNode = objectMapper.readTree( jsonFile ); - AtomicInteger position = new AtomicInteger( 1 ); + List exportedDocuments = new LinkedList<>(); String entityName = deriveEntityName( jsonFile.getFile() ); - - Map fields = gatherFields( rootNode ); - List uniqueFields = fields.entrySet().stream() - .map( entry -> buildColumn( entry.getKey(), getDataType( entry.getValue() ), entityName, physicalCollectionName, position.getAndIncrement() ) ) - .collect( Collectors.toList() ); - - Map> exportedColumns = new HashMap<>(); - exportedColumns.put( entityName, uniqueFields ); - return exportedColumns; - } - - - private static Map gatherFields( JsonNode node ) { - Map fields = new HashMap<>(); - if ( node.isArray() ) { - node.forEach( subNode -> subNode.fields().forEachRemaining( entry -> fields.put( entry.getKey(), entry.getValue() ) ) ); - } else if ( node.isObject() ) { - node.fields().forEachRemaining( entry -> fields.put( entry.getKey(), entry.getValue() ) ); + if ( rootNode.isArray() ) { + AtomicInteger enumerator = new AtomicInteger(); + rootNode.forEach( elementNode -> exportedDocuments.add( new ExportedDocument( entityName + enumerator.getAndIncrement(), false, EntityType.SOURCE ) ) ); + } else if ( rootNode.isObject() ) { + exportedDocuments.add( new ExportedDocument( entityName, false, EntityType.SOURCE ) ); } else { throw new RuntimeException( "JSON file does not contain a valid top-level structure (neither an object nor an array)" ); } - return fields; - } - - - private static ExportedColumn buildColumn( String name, PolyType type, String fileName, String physicalCollectionName, int position ) { - int length = type == PolyType.VARCHAR ? 8388096 : 0; // max length of json string in chars: 8388096 - return new ExportedColumn( - name, - type, - null, - length, - null, - null, - null, - false, - fileName, - physicalCollectionName, - name, - position, - position == 1 ); - } - - - private static PolyType getDataType( JsonNode value ) { - switch ( value.getNodeType() ) { - case NULL -> { - return PolyType.NULL; - } - case ARRAY -> { - return PolyType.ARRAY; - } - case OBJECT -> { - return PolyType.MAP; - } - case NUMBER -> { - if ( value.isIntegralNumber() ) { - return PolyType.BIGINT; - } - if ( value.isFloatingPointNumber() ) { - return PolyType.DOUBLE; - } - throw new RuntimeException( "ILLEGAL DATA TYPE: json file contains unknown number type." ); - } - case STRING -> { - return PolyType.VARCHAR; - } - case BOOLEAN -> { - return PolyType.BOOLEAN; - } - } - return PolyType.NULL; + return exportedDocuments; } diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java index c26ad48098..aeeb15c009 100644 --- a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java @@ -20,12 +20,14 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; +import java.util.HashSet; import java.util.List; import java.util.Map; import lombok.experimental.Delegate; import org.pf4j.Extension; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DocumentDataSource; import org.polypheny.db.adapter.DocumentScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingDirectory; @@ -41,6 +43,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalCollection; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Namespace; import org.polypheny.db.transaction.PolyXid; @@ -55,7 +58,7 @@ usedModes = DeployMode.EMBEDDED, defaultMode = DeployMode.EMBEDDED) @AdapterSettingDirectory(name = "jsonFile", defaultValue = "classpath://articles.json", description = "Path to the JSON file which is to be integrated as this source.", position = 1) -public class JsonSource extends DataSource { +public class JsonSource extends DataSource implements DocumentDataSource { private static final Logger log = LoggerFactory.getLogger( JsonSource.class ); @Delegate(excludes = Excludes.class) @@ -66,7 +69,7 @@ public class JsonSource extends DataSource { public JsonSource( final long storeId, final String uniqueName, final Map settings ) { - super( storeId, uniqueName, settings, true, new DocAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, true, new DocAdapterCatalog( storeId ), new HashSet<>( List.of( DataModel.DOCUMENT ) ) ); //this.jsonFile = getJsonFileUrl( settings ); URL url = getJsonFileUrl( "classpath://articles.json" ); this.jsonFile = url; @@ -97,7 +100,6 @@ private URL getJsonFileUrl( String file ) { @Override public void updateNamespace( String name, long id ) { - // TODO: Ask David. What is name used for? namespace = new JsonNamespace( name, id, adapterId ); } @@ -115,15 +117,14 @@ public void shutdown() { @Override - public Map> getExportedColumns() { + public List getExportedCollection() { if ( !Sources.of( jsonFile ).file().isFile() ) { throw new RuntimeException( "File must be a single JSON file, not a directory." ); } try { - String namespaceName = "foo"; //TODO: Where do i get this from or where is it set? - return JsonMetaRetriever.getFields( jsonFile, namespaceName ); + return JsonMetaRetriever.getDocuments( jsonFile ); } catch ( IOException e ) { - throw new RuntimeException( "Failed to retrieve columns from json file." ); + throw new RuntimeException( "Failed to retrieve documents from json file." ); } } @@ -151,11 +152,6 @@ public void restoreCollection( AllocationCollection allocation, List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { - log.debug( "NOT SUPPORTED: JSON source does not support method createTable()." ); - return null; - } @Override public List createCollection( Context context, LogicalCollection logical, AllocationCollection allocation ) { @@ -217,12 +213,20 @@ public void rollback( PolyXid xid ) { log.debug( "NOT SUPPORTED: JSON source does not support method rollback()." ); } + @Override public void dropTable( Context context, long allocId ) { log.debug( "NOT SUPPORTED: JSON source does not support method dropTable()" ); } + @Override + public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + log.debug( "NOT SUPPORTED: JSON source does not support method createTable()." ); + return null; + } + + @Override public void restoreTable( AllocationTable alloc, List entities, Context context ) { log.debug( "NOT SUPPORTED: JSON source does not support method restoreTable()." ); @@ -255,7 +259,9 @@ private interface Excludes { void createCollection( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ); void restoreCollection( AllocationTable alloc, List entities ); + } + }