diff --git a/core/src/main/java/org/polypheny/db/adapter/DataSource.java b/core/src/main/java/org/polypheny/db/adapter/DataSource.java index bb8982433c..1e42066ae8 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataSource.java @@ -31,23 +31,13 @@ @Getter public abstract class DataSource extends Adapter implements ExtensionPoint { + private final Set supportedDataModels; private final boolean dataReadOnly; - private Set supportedDataModels; - - protected DataSource( final long adapterId, final String uniqueName, final Map settings, boolean dataReadOnly, S catalog, Set supportedDataModels ) { - super( adapterId, uniqueName, settings, catalog ); - this.dataReadOnly = dataReadOnly; - this.supportedDataModels = supportedDataModels; - informationPage.setLabel( "Sources" ); - - } - - - protected DataSource( final long adapterId, final String uniqueName, final Map settings, boolean dataReadOnly, S catalog ) { + protected DataSource( final long adapterId, final String uniqueName, final Map settings, boolean dataReadOnly, S catalog, List supportedModels ) { super( adapterId, uniqueName, settings, catalog ); this.dataReadOnly = dataReadOnly; - this.supportedDataModels = new HashSet<>( List.of( DataModel.getDefault() ) ); + this.supportedDataModels = new HashSet<>(supportedModels); informationPage.setLabel( "Sources" ); } @@ -73,5 +63,26 @@ private AdapterType getAdapterType() { return AdapterType.SOURCE; } + public boolean supportsRelational() { + return supportedDataModels.contains( DataModel.RELATIONAL ); + } + public boolean supportsDocument() { + return supportedDataModels.contains( DataModel.DOCUMENT ); + } + public boolean supportsGraph() { + return supportedDataModels.contains( DataModel.GRAPH ); + } + public RelationalDataSource asRelationalDataSource() { + // should be overridden by subclasses accordingly + throw new IllegalStateException("This source does not support the relational data model."); + } + public DocumentDataSource asDocumentDataSource() { + // should be overridden by subclasses accordingly + throw new IllegalStateException("This source does not support the document data model."); + } + public DocumentDataSource asGraphDataSource() { + // should be overridden by subclasses accordingly + throw new IllegalStateException("This source does not support the graph data model."); + } } diff --git a/core/src/main/java/org/polypheny/db/adapter/DocumentDataSource.java b/core/src/main/java/org/polypheny/db/adapter/DocumentDataSource.java index 350b5cf376..e3a5f1f078 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DocumentDataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DocumentDataSource.java @@ -34,5 +34,4 @@ class ExportedDocument { private final EntityType type; } - } diff --git a/core/src/main/java/org/polypheny/db/adapter/RelationalDataSource.java b/core/src/main/java/org/polypheny/db/adapter/RelationalDataSource.java index dd9ab3ee8f..b5cc069ff1 100644 --- a/core/src/main/java/org/polypheny/db/adapter/RelationalDataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/RelationalDataSource.java @@ -63,5 +63,4 @@ public String getDisplayType() { } } - } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index 7c1b2f2cba..09faa483c5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -57,4 +57,9 @@ public Expression asExpression() { return Expressions.call( Catalog.CATALOG_EXPRESSION, "getCollection", Expressions.constant( id ) ); } + @Override + public String getNamespaceName() { + return Catalog.getInstance().getSnapshot().getNamespace( namespaceId ).orElseThrow().name; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/impl/PolyCatalog.java b/core/src/main/java/org/polypheny/db/catalog/impl/PolyCatalog.java index 4b36738aa3..8dbc900452 100644 --- a/core/src/main/java/org/polypheny/db/catalog/impl/PolyCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/impl/PolyCatalog.java @@ -259,6 +259,7 @@ private void restoreLastState() { private void validateNamespaceType( long id, DataModel type ) { + LogicalNamespace namespace = logicalCatalogs.get( id ).getLogicalNamespace(); if ( logicalCatalogs.get( id ).getLogicalNamespace().dataModel != type ) { throw new GenericRuntimeException( "Error while retrieving namespace type" ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 69785f3aac..c207de705f 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -38,11 +38,11 @@ import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataSource; -import org.polypheny.db.adapter.DataSource.ExportedColumn; -import org.polypheny.db.adapter.DataSource.ExportedDocument; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.IndexMethodModel; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DocumentDataSource.ExportedDocument; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; @@ -211,7 +211,7 @@ public long createNamespace( String initialName, DataModel type, boolean ifNotEx @Override public void createStore( String uniqueName, String adapterName, AdapterType adapterType, Map config, DeployMode mode ) { uniqueName = uniqueName.toLowerCase(); - Adapter adapter = AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config ); + AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config ); } @@ -219,13 +219,14 @@ public void createStore( String uniqueName, String adapterName, AdapterType adap public void createSource( String uniqueName, String adapterName, long namespace, AdapterType adapterType, Map config, DeployMode mode ) { uniqueName = uniqueName.toLowerCase(); DataSource adapter = (DataSource) AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config ); - if ( adapter.getSupportedDataModels().contains( DataModel.RELATIONAL ) ) { + namespace = adapter.getCurrentNamespace() == null ? namespace : adapter.getCurrentNamespace().getId(); // TODO: clean implementation. Sources should either create their own namespace or there should be default namespaces for different models. + if ( adapter.supportsRelational() ) { createRelationalSource( adapter, namespace ); } - if ( adapter.getSupportedDataModels().contains( DataModel.DOCUMENT ) ) { + if ( adapter.supportsDocument() ) { createDocumentSource( adapter, namespace ); } - if ( adapter.getSupportedDataModels().contains( DataModel.GRAPH ) ) { + if ( adapter.supportsGraph() ) { // TODO: implement graph source creation throw new IllegalArgumentException( "Adapters with native data model graph are not yet supported!" ); } @@ -236,7 +237,7 @@ public void createSource( String uniqueName, String adapterName, long namespace, private void createDocumentSource( DataSource adapter, long namespace ) { List exportedCollections; try { - exportedCollections = adapter.getExportedCollection(); + exportedCollections = adapter.asDocumentDataSource().getExportedCollection(); } catch ( Exception e ) { AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() ); throw new GenericRuntimeException( "Could not deploy adapter", e ); @@ -244,13 +245,13 @@ private void createDocumentSource( DataSource adapter, long namespace ) { for ( ExportedDocument exportedDocument : exportedCollections ) { String documentName = getUniqueEntityName( namespace, exportedDocument.getName(), ( ns, en ) -> catalog.getSnapshot().doc().getCollection( ns, en ) ); - LogicalCollection logical = catalog.getLogicalDoc( namespace ).addCollection( documentName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); + LogicalCollection logicalCollection = catalog.getLogicalDoc( namespace ).addCollection( documentName, exportedDocument.getType(), exportedDocument.isModifyable() ); + AllocationPartition partition = catalog.getAllocDoc( namespace ).addPartition( logicalCollection, PartitionType.NONE, null ); + AllocationPlacement placement = catalog.getAllocDoc( namespace ).addPlacement( logicalCollection, adapter.getAdapterId() ); + AllocationCollection allocationCollection = catalog.getAllocDoc( namespace ).addAllocation( logicalCollection, placement.getId(), partition.getId(), adapter.getAdapterId() ); - LogicalCollection logicalCollection = catalog.getLogicalDoc( namespace ).addCollection( exportedDocument.getName(), exportedDocument.getType(), exportedDocument.isModifyable() ); - AllocationCollection allocationCollection = catalog.getAllocDoc( namespace ).addAllocation( logicalCollection, logical.getId(), 0, adapter.getAdapterId() ); - - buildNamespace( Catalog.defaultNamespaceId, logical, adapter ); - adapter.createCollection( null, logical, allocationCollection ); + buildDocumentNamespace( namespace, logicalCollection, adapter ); + adapter.createCollection( null, logicalCollection, allocationCollection ); catalog.updateSnapshot(); } } @@ -259,7 +260,7 @@ private void createDocumentSource( DataSource adapter, long namespace ) { private void createRelationalSource( DataSource adapter, long namespace ) { Map> exportedColumns; try { - exportedColumns = adapter.getExportedColumns(); + exportedColumns = adapter.asRelationalDataSource().getExportedColumns(); } catch ( Exception e ) { AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() ); throw new GenericRuntimeException( "Could not deploy adapter", e ); @@ -305,7 +306,7 @@ private void createRelationalSource( DataSource adapter, long namespace ) { aColumns.add( allocationColumn ); } - buildNamespace( Catalog.defaultNamespaceId, logical, adapter ); + buildRelationalNamespace( namespace, logical, adapter ); adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of() ), AllocationTableWrapper.of( allocation.unwrap( AllocationTable.class ).orElseThrow(), aColumns ) ); catalog.updateSnapshot(); } @@ -313,6 +314,9 @@ private void createRelationalSource( DataSource adapter, long namespace ) { private String getUniqueEntityName( Long namespace, String name, BiFunction> retriever ) { + if ( retriever.apply( namespace, name ).isEmpty() ) { + return name; + } int enumerator = 0; while ( retriever.apply( namespace, name + enumerator ).isPresent() ) { enumerator++; @@ -420,9 +424,8 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam long adapterId = allocation.adapterId; DataSource dataSource = AdapterManager.getInstance().getSource( adapterId ).orElseThrow(); - //String physicalTableName = catalog.getSnapshot().alloc().getPhysicalTable( catalogTable.id, adapterId ).name; - List exportedColumns = dataSource.getExportedColumns().get( table.name ); + List exportedColumns = dataSource.asRelationalDataSource().getExportedColumns().get( table.name ); // Check if physicalColumnName is valid ExportedColumn exportedColumn = null; @@ -2119,7 +2122,7 @@ private List addAllocationsForPlacement( long namespaceId, Stat columns.add( catalog.getAllocRel( namespaceId ).addColumn( placementId, logical.id, column.id, adapter.adapterId, PlacementType.AUTOMATIC, i++ ) ); } - buildNamespace( namespaceId, logical, adapter ); + buildRelationalNamespace( namespaceId, logical, adapter ); List tables = new ArrayList<>(); for ( Long partitionId : partitionIds ) { tables.add( addAllocationTable( namespaceId, statement, logical, lColumns, pkIds, placementId, partitionId, columns, adapter ) ); @@ -2157,7 +2160,11 @@ private static List sortByPosition( List columns ) } - private void buildNamespace( long namespaceId, LogicalEntity logical, Adapter store ) { + private void buildRelationalNamespace( long namespaceId, LogicalTable logical, Adapter store ) { + store.updateNamespace( logical.getNamespaceName(), namespaceId ); + } + + private void buildDocumentNamespace( long namespaceId, LogicalCollection logical, Adapter store ) { store.updateNamespace( logical.getNamespaceName(), namespaceId ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index 52863c217d..32b56b48de 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -42,7 +42,7 @@ import java.util.Map; import lombok.Getter; import org.jetbrains.annotations.Nullable; -import org.polypheny.db.adapter.DataSource.ExportedColumn; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; @@ -112,7 +112,14 @@ public CsvTable createCsvTable( long id, PhysicalTable table, CsvSource csvSourc /** - * Creates different subtype of table based on the "flavor" attribute. + * Creates different subtype of table based on thString tableName = entry.getKey(); + if ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent() ) { + int i = 0; + while ( catalog.getSnapshot().rel().getTable( namespace, tableName + i ).isPresent() ) { + i++; + } + tableName += i; + }e "flavor" attribute. */ private CsvTable createTable( long id, Source source, PhysicalTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { return switch ( flavor ) { diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index a96dfdc1eb..9e49989728 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -35,6 +35,7 @@ import org.polypheny.db.adapter.ConnectionMethod; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DocumentDataSource; import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.RelationalScanDelegate; @@ -53,6 +54,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; @@ -89,7 +91,7 @@ public class CsvSource extends DataSource implements Relation public CsvSource( final long storeId, final String uniqueName, final Map settings ) { - super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ), List.of( DataModel.RELATIONAL ) ); this.connectionMethod = settings.containsKey( "method" ) ? ConnectionMethod.from( settings.get( "method" ).toUpperCase() ) : ConnectionMethod.UPLOAD; @@ -373,6 +375,10 @@ public void renameLogicalColumn( long id, String newColumnName ) { adapterCatalog.fields.values().stream().filter( c -> c.id == id ).forEach( c -> updateNativePhysical( c.allocId ) ); } + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } @SuppressWarnings("unused") private interface Excludes { diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index 0b0ba6d016..235f9d1870 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -32,7 +32,6 @@ import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.RelationalDataSource; -import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingBoolean; @@ -44,6 +43,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.plugins.PluginContext; @@ -107,7 +107,7 @@ public static class EthereumDataSource extends DataSource imp public EthereumDataSource( final long storeId, final String uniqueName, final Map settings ) { - super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ), List.of( DataModel.RELATIONAL ) ); setClientURL( settings.get( "ClientUrl" ) ); this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); @@ -293,6 +293,12 @@ public void renameLogicalColumn( long id, String newColumnName ) { adapterCatalog.fields.values().stream().filter( c -> c.id == id ).forEach( c -> updateNativePhysical( c.allocId ) ); } + + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + } diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index e3bc73a60a..b1c742dc6e 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -42,7 +42,6 @@ import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.RelationalDataSource; -import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingDirectory; @@ -58,6 +57,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; @@ -92,7 +92,7 @@ public class ExcelSource extends DataSource implements Relati public ExcelSource( final long storeId, final String uniqueName, final Map settings ) { - super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ), List.of( DataModel.RELATIONAL ) ); this.connectionMethod = settings.containsKey( "method" ) ? ConnectionMethod.from( settings.get( "method" ) ) : ConnectionMethod.UPLOAD; // Validate maxStringLength setting @@ -370,6 +370,12 @@ private void addInformationExportedColumns() { } + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + + @SuppressWarnings("unused") private interface Excludes { diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java index b7fce19e46..4d72456d76 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java @@ -39,6 +39,9 @@ import org.jetbrains.annotations.NotNull; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DocumentDataSource; +import org.polypheny.db.adapter.RelationalDataSource; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingString; @@ -49,6 +52,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationTable; @@ -69,7 +73,7 @@ usedModes = DeployMode.EMBEDDED, defaultMode = DeployMode.EMBEDDED) @AdapterSettingString(name = "rootDir", defaultValue = "") -public class Qfs extends DataSource { +public class Qfs extends DataSource implements RelationalDataSource { @Delegate(excludes = Exclude.class) private final RelationalScanDelegate delegate; @@ -82,7 +86,7 @@ public class Qfs extends DataSource { public Qfs( long adapterId, String uniqueName, Map settings ) { - super( adapterId, uniqueName, settings, true, new RelAdapterCatalog( adapterId ) ); + super( adapterId, uniqueName, settings, true, new RelAdapterCatalog( adapterId ), List.of( DataModel.RELATIONAL) ); init( settings ); registerInformationPage( uniqueName ); this.delegate = new RelationalScanDelegate( this, adapterCatalog ); @@ -224,7 +228,6 @@ private static String getString( File whitelist ) { } - @Override public Map> getExportedColumns() { //name, extension, path, mime, canExecute, canRead, canWrite, size, lastModified String physSchemaName = getUniqueName(); @@ -340,6 +343,11 @@ private static InformationTable getInformationTable( Entry implements public GoogleSheetSource( final long storeId, final String uniqueName, final Map settings ) { - super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ), List.of( DataModel.RELATIONAL ) ); this.clientId = getSettingOrFail( "oAuth-Client-ID", settings ); this.clientKey = getSettingOrFail( "oAuth-Client-Key", settings ); @@ -383,6 +383,12 @@ public void rollback( PolyXid xid ) { } + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + + @SuppressWarnings("unused") private interface Excludes { diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 0da4dee864..09f6682c44 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -31,6 +31,7 @@ import org.apache.commons.dbcp2.BasicDataSource; import org.pf4j.ExtensionPoint; import org.polypheny.db.adapter.DataSource; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.jdbc.JdbcSchema; import org.polypheny.db.adapter.jdbc.JdbcUtils; @@ -43,6 +44,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Namespace; @@ -71,7 +73,7 @@ public AbstractJdbcSource( final String diverClass, final SqlDialect dialect, final boolean readOnly ) { - super( storeId, uniqueName, settings, readOnly, new RelAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, readOnly, new RelAdapterCatalog( storeId ), List.of( DataModel.RELATIONAL) ); this.connectionFactory = createConnectionFactory( settings, dialect, diverClass ); this.dialect = dialect; // Register the JDBC Pool Size as information in the information manager and enable it @@ -194,7 +196,6 @@ public void rollback( PolyXid xid ) { protected abstract boolean requiresSchema(); - @Override public Map> getExportedColumns() { Map> map = new HashMap<>(); PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java index 06f31a071a..2e2215f77f 100644 --- a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java @@ -22,7 +22,6 @@ import java.net.URL; import java.util.LinkedList; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; import org.polypheny.db.adapter.DocumentDataSource.ExportedDocument; import org.polypheny.db.catalog.logistic.EntityType; @@ -33,14 +32,10 @@ public static List getDocuments( URL jsonFile ) throws IOExcep JsonNode rootNode = objectMapper.readTree( jsonFile ); List exportedDocuments = new LinkedList<>(); String entityName = deriveEntityName( jsonFile.getFile() ); - if ( rootNode.isArray() ) { - AtomicInteger enumerator = new AtomicInteger(); - rootNode.forEach( elementNode -> exportedDocuments.add( new ExportedDocument( entityName + enumerator.getAndIncrement(), false, EntityType.SOURCE ) ) ); - } else if ( rootNode.isObject() ) { - exportedDocuments.add( new ExportedDocument( entityName, false, EntityType.SOURCE ) ); - } else { + if ( !(rootNode.isArray() || rootNode.isObject()) ) { throw new RuntimeException( "JSON file does not contain a valid top-level structure (neither an object nor an array)" ); } + exportedDocuments.add( new ExportedDocument( entityName, false, EntityType.SOURCE ) ); return exportedDocuments; } diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java index aeeb15c009..675927127f 100644 --- a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; -import java.util.HashSet; import java.util.List; import java.util.Map; import lombok.experimental.Delegate; @@ -29,8 +28,10 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.DocumentDataSource; import org.polypheny.db.adapter.DocumentScanDelegate; +import org.polypheny.db.adapter.Scannable; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingDirectory; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.catalogs.AdapterCatalog; import org.polypheny.db.catalog.catalogs.DocAdapterCatalog; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; @@ -58,7 +59,7 @@ usedModes = DeployMode.EMBEDDED, defaultMode = DeployMode.EMBEDDED) @AdapterSettingDirectory(name = "jsonFile", defaultValue = "classpath://articles.json", description = "Path to the JSON file which is to be integrated as this source.", position = 1) -public class JsonSource extends DataSource implements DocumentDataSource { +public class JsonSource extends DataSource implements DocumentDataSource, Scannable { private static final Logger log = LoggerFactory.getLogger( JsonSource.class ); @Delegate(excludes = Excludes.class) @@ -69,11 +70,13 @@ public class JsonSource extends DataSource implements Documen public JsonSource( final long storeId, final String uniqueName, final Map settings ) { - super( storeId, uniqueName, settings, true, new DocAdapterCatalog( storeId ), new HashSet<>( List.of( DataModel.DOCUMENT ) ) ); + super( storeId, uniqueName, settings, true, new DocAdapterCatalog( storeId ), List.of( DataModel.DOCUMENT ) ); //this.jsonFile = getJsonFileUrl( settings ); URL url = getJsonFileUrl( "classpath://articles.json" ); this.jsonFile = url; this.delegate = new DocumentScanDelegate( this, getAdapterCatalog() ); + long namespaceId = Catalog.getInstance().createNamespace( uniqueName, DataModel.DOCUMENT, true ); + this.namespace = new JsonNamespace( uniqueName, namespaceId, getAdapterId() ); } @@ -252,6 +255,12 @@ public void restoreGraph( AllocationGraph alloc, List entities, } + @Override + public DocumentDataSource asDocumentDataSource() { + return this; + } + + private interface Excludes { void refreshCollection( long allocId ); diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index b156dc16b7..078f301932 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -24,6 +24,8 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.dbcp2.BasicDataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DocumentDataSource; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingString; @@ -53,7 +55,7 @@ @AdapterSettingString(name = "password", defaultValue = "polypheny", description = "Username to be used for authenticating at the remote instance.", position = 5) @AdapterSettingInteger(name = "maxConnections", defaultValue = 25, description = "Password to be used for authenticating at the remote instance.") @AdapterSettingString(name = "table", defaultValue = "public.foo,public.bar", description = "Maximum number of concurrent JDBC connections.") -public class MonetdbSource extends AbstractJdbcSource { +public class MonetdbSource extends AbstractJdbcSource implements RelationalDataSource{ public MonetdbSource( final long storeId, final String uniqueName, final Map settings ) { super( storeId, uniqueName, settings, "nl.cwi.monetdb.jdbc.MonetDriver", MonetdbSqlDialect.DEFAULT, false ); @@ -128,4 +130,33 @@ public List createTable( Context context, LogicalTableWrapper lo return List.of( physical ); } + @Override + public boolean supportsRelational() { + return true; + } + + + @Override + public boolean supportsDocument() { + return false; + } + + + @Override + public boolean supportsGraph() { + return false; + } + + + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + + + @Override + public DocumentDataSource asDocumentDataSource() { + throw new IllegalStateException("This source does not support the relational model."); + } + } diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index 637967784b..808fe6d4bc 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -24,6 +24,8 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DocumentDataSource; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; @@ -89,7 +91,7 @@ public void stop() { description = "Which level of transaction isolation should be used.") @AdapterSettingString(name = "tables", defaultValue = "foo,bar", description = "List of tables which should be imported. The names must to be separated by a comma.") - public static class MysqlSource extends AbstractJdbcSource { + public static class MysqlSource extends AbstractJdbcSource implements RelationalDataSource { public MysqlSource( final long storeId, final String uniqueName, final Map settings ) { super( storeId, uniqueName, settings, "org.mariadb.jdbc.Driver", MysqlSqlDialect.DEFAULT, false ); @@ -142,6 +144,35 @@ protected boolean requiresSchema() { return false; } + @Override + public boolean supportsRelational() { + return true; + } + + + @Override + public boolean supportsDocument() { + return false; + } + + + @Override + public boolean supportsGraph() { + return false; + } + + + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + + + @Override + public DocumentDataSource asDocumentDataSource() { + throw new IllegalStateException("This source does not support the relational model."); + } + } -} \ No newline at end of file +} diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 6dbe369ad9..d06066f209 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -23,6 +23,8 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DocumentDataSource; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; @@ -58,7 +60,7 @@ description = "Which level of transaction isolation should be used.") @AdapterSettingString(name = "tables", defaultValue = "foo,bar", description = "List of tables which should be imported. The names must to be separated by a comma.") -public class PostgresqlSource extends AbstractJdbcSource { +public class PostgresqlSource extends AbstractJdbcSource implements RelationalDataSource { public PostgresqlSource( final long storeId, final String uniqueName, final Map settings ) { super( @@ -115,4 +117,33 @@ public List createTable( Context context, LogicalTableWrapper lo } + @Override + public boolean supportsRelational() { + return true; + } + + + @Override + public boolean supportsDocument() { + return false; + } + + + @Override + public boolean supportsGraph() { + return false; + } + + + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + + + @Override + public DocumentDataSource asDocumentDataSource() { + throw new IllegalStateException( "This source does not support the relational model." ); + } + } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index b6a9d2d86e..5bd6132b07 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -83,9 +83,10 @@ import org.polypheny.db.adapter.AdapterManager.AdapterInformation; import org.polypheny.db.adapter.ConnectionMethod; import org.polypheny.db.adapter.DataSource; -import org.polypheny.db.adapter.DataSource.ExportedColumn; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; +import org.polypheny.db.adapter.RelationalDataSource; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.adapter.java.AdapterTemplate; import org.polypheny.db.algebra.AlgCollation; @@ -131,13 +132,13 @@ import org.polypheny.db.docker.DockerSetupHelper; import org.polypheny.db.docker.HandshakeManager; import org.polypheny.db.docker.exceptions.DockerUserException; -import org.polypheny.db.docker.models.CreateDockerResponse; import org.polypheny.db.docker.models.AutoDockerResult; import org.polypheny.db.docker.models.CreateDockerRequest; +import org.polypheny.db.docker.models.CreateDockerResponse; import org.polypheny.db.docker.models.DockerSettings; -import org.polypheny.db.docker.models.UpdateDockerRequest; import org.polypheny.db.docker.models.HandshakeInfo; import org.polypheny.db.docker.models.InstancesAndAutoDocker; +import org.polypheny.db.docker.models.UpdateDockerRequest; import org.polypheny.db.iface.QueryInterface; import org.polypheny.db.iface.QueryInterfaceManager; import org.polypheny.db.iface.QueryInterfaceManager.QueryInterfaceInformation; @@ -1101,7 +1102,7 @@ void getAvailableSourceColumns( final Context ctx ) { for ( Long adapterId : adapterIds ) { Adapter adapter = AdapterManager.getInstance().getAdapter( adapterId ).orElseThrow(); if ( adapter instanceof DataSource dataSource ) { - for ( Entry> entry : dataSource.getExportedColumns().entrySet() ) { + for ( Entry> entry : dataSource.asRelationalDataSource().getExportedColumns().entrySet() ) { List columnList = new ArrayList<>(); for ( ExportedColumn col : entry.getValue() ) { UiColumnDefinition dbCol = UiColumnDefinition.builder() @@ -1122,8 +1123,8 @@ void getAvailableSourceColumns( final Context ctx ) { } ctx.json( exportedColumns.toArray( new RelationalResult[0] ) ); return; - } + } } ctx.json( RelationalResult.builder().error( "Could not retrieve exported source fields." ).build() );