Skip to content

Commit

Permalink
Introduce data model handling
Browse files Browse the repository at this point in the history
  • Loading branch information
Tobias Hafner committed May 3, 2024
1 parent d1292e8 commit 8196df9
Show file tree
Hide file tree
Showing 19 changed files with 227 additions and 67 deletions.
37 changes: 24 additions & 13 deletions core/src/main/java/org/polypheny/db/adapter/DataSource.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,23 +31,13 @@
@Getter
public abstract class DataSource<S extends AdapterCatalog> extends Adapter<S> implements ExtensionPoint {

private final Set<DataModel> supportedDataModels;
private final boolean dataReadOnly;
private Set<DataModel> supportedDataModels;


protected DataSource( final long adapterId, final String uniqueName, final Map<String, String> settings, boolean dataReadOnly, S catalog, Set<DataModel> supportedDataModels ) {
super( adapterId, uniqueName, settings, catalog );
this.dataReadOnly = dataReadOnly;
this.supportedDataModels = supportedDataModels;
informationPage.setLabel( "Sources" );

}


protected DataSource( final long adapterId, final String uniqueName, final Map<String, String> settings, boolean dataReadOnly, S catalog ) {
protected DataSource( final long adapterId, final String uniqueName, final Map<String, String> settings, boolean dataReadOnly, S catalog, List<DataModel> supportedModels ) {
super( adapterId, uniqueName, settings, catalog );
this.dataReadOnly = dataReadOnly;
this.supportedDataModels = new HashSet<>( List.of( DataModel.getDefault() ) );
this.supportedDataModels = new HashSet<>(supportedModels);
informationPage.setLabel( "Sources" );

}
Expand All @@ -73,5 +63,26 @@ private AdapterType getAdapterType() {
return AdapterType.SOURCE;
}

public boolean supportsRelational() {
return supportedDataModels.contains( DataModel.RELATIONAL );
}
public boolean supportsDocument() {
return supportedDataModels.contains( DataModel.DOCUMENT );
}
public boolean supportsGraph() {
return supportedDataModels.contains( DataModel.GRAPH );
}

public RelationalDataSource asRelationalDataSource() {
// should be overridden by subclasses accordingly
throw new IllegalStateException("This source does not support the relational data model.");
}
public DocumentDataSource asDocumentDataSource() {
// should be overridden by subclasses accordingly
throw new IllegalStateException("This source does not support the document data model.");
}
public DocumentDataSource asGraphDataSource() {
// should be overridden by subclasses accordingly
throw new IllegalStateException("This source does not support the graph data model.");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,5 +34,4 @@ class ExportedDocument {
private final EntityType type;

}

}
Original file line number Diff line number Diff line change
Expand Up @@ -63,5 +63,4 @@ public String getDisplayType() {
}

}

}
Original file line number Diff line number Diff line change
Expand Up @@ -57,4 +57,9 @@ public Expression asExpression() {
return Expressions.call( Catalog.CATALOG_EXPRESSION, "getCollection", Expressions.constant( id ) );
}

@Override
public String getNamespaceName() {
return Catalog.getInstance().getSnapshot().getNamespace( namespaceId ).orElseThrow().name;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,7 @@ private void restoreLastState() {


private void validateNamespaceType( long id, DataModel type ) {
LogicalNamespace namespace = logicalCatalogs.get( id ).getLogicalNamespace();
if ( logicalCatalogs.get( id ).getLogicalNamespace().dataModel != type ) {
throw new GenericRuntimeException( "Error while retrieving namespace type" );
}
Expand Down
45 changes: 26 additions & 19 deletions dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@
import org.polypheny.db.adapter.Adapter;
import org.polypheny.db.adapter.AdapterManager;
import org.polypheny.db.adapter.DataSource;
import org.polypheny.db.adapter.DataSource.ExportedColumn;
import org.polypheny.db.adapter.DataSource.ExportedDocument;
import org.polypheny.db.adapter.DataStore;
import org.polypheny.db.adapter.DataStore.IndexMethodModel;
import org.polypheny.db.adapter.DeployMode;
import org.polypheny.db.adapter.DocumentDataSource.ExportedDocument;
import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn;
import org.polypheny.db.adapter.index.IndexManager;
import org.polypheny.db.algebra.AlgCollation;
import org.polypheny.db.algebra.AlgNode;
Expand Down Expand Up @@ -211,21 +211,22 @@ public long createNamespace( String initialName, DataModel type, boolean ifNotEx
@Override
public void createStore( String uniqueName, String adapterName, AdapterType adapterType, Map<String, String> config, DeployMode mode ) {
uniqueName = uniqueName.toLowerCase();
Adapter<?> adapter = AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config );
AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config );
}


@Override
public void createSource( String uniqueName, String adapterName, long namespace, AdapterType adapterType, Map<String, String> config, DeployMode mode ) {
uniqueName = uniqueName.toLowerCase();
DataSource<?> adapter = (DataSource<?>) AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config );
if ( adapter.getSupportedDataModels().contains( DataModel.RELATIONAL ) ) {
namespace = adapter.getCurrentNamespace() == null ? namespace : adapter.getCurrentNamespace().getId(); // TODO: clean implementation. Sources should either create their own namespace or there should be default namespaces for different models.
if ( adapter.supportsRelational() ) {
createRelationalSource( adapter, namespace );
}
if ( adapter.getSupportedDataModels().contains( DataModel.DOCUMENT ) ) {
if ( adapter.supportsDocument() ) {
createDocumentSource( adapter, namespace );
}
if ( adapter.getSupportedDataModels().contains( DataModel.GRAPH ) ) {
if ( adapter.supportsGraph() ) {
// TODO: implement graph source creation
throw new IllegalArgumentException( "Adapters with native data model graph are not yet supported!" );
}
Expand All @@ -236,21 +237,21 @@ public void createSource( String uniqueName, String adapterName, long namespace,
private void createDocumentSource( DataSource<?> adapter, long namespace ) {
List<ExportedDocument> exportedCollections;
try {
exportedCollections = adapter.getExportedCollection();
exportedCollections = adapter.asDocumentDataSource().getExportedCollection();
} catch ( Exception e ) {
AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() );
throw new GenericRuntimeException( "Could not deploy adapter", e );
}

for ( ExportedDocument exportedDocument : exportedCollections ) {
String documentName = getUniqueEntityName( namespace, exportedDocument.getName(), ( ns, en ) -> catalog.getSnapshot().doc().getCollection( ns, en ) );
LogicalCollection logical = catalog.getLogicalDoc( namespace ).addCollection( documentName, EntityType.SOURCE, !(adapter).isDataReadOnly() );
LogicalCollection logicalCollection = catalog.getLogicalDoc( namespace ).addCollection( documentName, exportedDocument.getType(), exportedDocument.isModifyable() );
AllocationPartition partition = catalog.getAllocDoc( namespace ).addPartition( logicalCollection, PartitionType.NONE, null );
AllocationPlacement placement = catalog.getAllocDoc( namespace ).addPlacement( logicalCollection, adapter.getAdapterId() );
AllocationCollection allocationCollection = catalog.getAllocDoc( namespace ).addAllocation( logicalCollection, placement.getId(), partition.getId(), adapter.getAdapterId() );

LogicalCollection logicalCollection = catalog.getLogicalDoc( namespace ).addCollection( exportedDocument.getName(), exportedDocument.getType(), exportedDocument.isModifyable() );
AllocationCollection allocationCollection = catalog.getAllocDoc( namespace ).addAllocation( logicalCollection, logical.getId(), 0, adapter.getAdapterId() );

buildNamespace( Catalog.defaultNamespaceId, logical, adapter );
adapter.createCollection( null, logical, allocationCollection );
buildDocumentNamespace( namespace, logicalCollection, adapter );
adapter.createCollection( null, logicalCollection, allocationCollection );
catalog.updateSnapshot();
}
}
Expand All @@ -259,7 +260,7 @@ private void createDocumentSource( DataSource<?> adapter, long namespace ) {
private void createRelationalSource( DataSource<?> adapter, long namespace ) {
Map<String, List<ExportedColumn>> exportedColumns;
try {
exportedColumns = adapter.getExportedColumns();
exportedColumns = adapter.asRelationalDataSource().getExportedColumns();
} catch ( Exception e ) {
AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() );
throw new GenericRuntimeException( "Could not deploy adapter", e );
Expand Down Expand Up @@ -305,14 +306,17 @@ private void createRelationalSource( DataSource<?> adapter, long namespace ) {
aColumns.add( allocationColumn );
}

buildNamespace( Catalog.defaultNamespaceId, logical, adapter );
buildRelationalNamespace( namespace, logical, adapter );
adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of() ), AllocationTableWrapper.of( allocation.unwrap( AllocationTable.class ).orElseThrow(), aColumns ) );
catalog.updateSnapshot();
}
}


private String getUniqueEntityName( Long namespace, String name, BiFunction<Long, String, Optional<?>> retriever ) {
if ( retriever.apply( namespace, name ).isEmpty() ) {
return name;
}
int enumerator = 0;
while ( retriever.apply( namespace, name + enumerator ).isPresent() ) {
enumerator++;
Expand Down Expand Up @@ -420,9 +424,8 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam

long adapterId = allocation.adapterId;
DataSource<?> dataSource = AdapterManager.getInstance().getSource( adapterId ).orElseThrow();

//String physicalTableName = catalog.getSnapshot().alloc().getPhysicalTable( catalogTable.id, adapterId ).name;
List<ExportedColumn> exportedColumns = dataSource.getExportedColumns().get( table.name );
List<ExportedColumn> exportedColumns = dataSource.asRelationalDataSource().getExportedColumns().get( table.name );

// Check if physicalColumnName is valid
ExportedColumn exportedColumn = null;
Expand Down Expand Up @@ -2119,7 +2122,7 @@ private List<AllocationTable> addAllocationsForPlacement( long namespaceId, Stat
columns.add( catalog.getAllocRel( namespaceId ).addColumn( placementId, logical.id, column.id, adapter.adapterId, PlacementType.AUTOMATIC, i++ ) );
}

buildNamespace( namespaceId, logical, adapter );
buildRelationalNamespace( namespaceId, logical, adapter );
List<AllocationTable> tables = new ArrayList<>();
for ( Long partitionId : partitionIds ) {
tables.add( addAllocationTable( namespaceId, statement, logical, lColumns, pkIds, placementId, partitionId, columns, adapter ) );
Expand Down Expand Up @@ -2157,7 +2160,11 @@ private static List<LogicalColumn> sortByPosition( List<LogicalColumn> columns )
}


private void buildNamespace( long namespaceId, LogicalEntity logical, Adapter<?> store ) {
private void buildRelationalNamespace( long namespaceId, LogicalTable logical, Adapter<?> store ) {
store.updateNamespace( logical.getNamespaceName(), namespaceId );
}

private void buildDocumentNamespace( long namespaceId, LogicalCollection logical, Adapter<?> store ) {
store.updateNamespace( logical.getNamespaceName(), namespaceId );
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
import java.util.Map;
import lombok.Getter;
import org.jetbrains.annotations.Nullable;
import org.polypheny.db.adapter.DataSource.ExportedColumn;
import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn;
import org.polypheny.db.algebra.type.AlgDataType;
import org.polypheny.db.algebra.type.AlgDataTypeFactory;
import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder;
Expand Down Expand Up @@ -112,7 +112,14 @@ public CsvTable createCsvTable( long id, PhysicalTable table, CsvSource csvSourc


/**
* Creates different subtype of table based on the "flavor" attribute.
* Creates different subtype of table based on thString tableName = entry.getKey();
if ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent() ) {
int i = 0;
while ( catalog.getSnapshot().rel().getTable( namespace, tableName + i ).isPresent() ) {
i++;
}
tableName += i;
}e "flavor" attribute.
*/
private CsvTable createTable( long id, Source source, PhysicalTable table, List<CsvFieldType> fieldTypes, int[] fields, CsvSource csvSource ) {
return switch ( flavor ) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.polypheny.db.adapter.ConnectionMethod;
import org.polypheny.db.adapter.DataSource;
import org.polypheny.db.adapter.DeployMode;
import org.polypheny.db.adapter.DocumentDataSource;
import org.polypheny.db.adapter.RelationalDataSource;
import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn;
import org.polypheny.db.adapter.RelationalScanDelegate;
Expand All @@ -53,6 +54,7 @@
import org.polypheny.db.catalog.entity.physical.PhysicalEntity;
import org.polypheny.db.catalog.entity.physical.PhysicalTable;
import org.polypheny.db.catalog.exceptions.GenericRuntimeException;
import org.polypheny.db.catalog.logistic.DataModel;
import org.polypheny.db.information.InformationGroup;
import org.polypheny.db.information.InformationTable;
import org.polypheny.db.prepare.Context;
Expand Down Expand Up @@ -89,7 +91,7 @@ public class CsvSource extends DataSource<RelAdapterCatalog> implements Relation


public CsvSource( final long storeId, final String uniqueName, final Map<String, String> settings ) {
super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ) );
super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ), List.of( DataModel.RELATIONAL ) );

this.connectionMethod = settings.containsKey( "method" ) ? ConnectionMethod.from( settings.get( "method" ).toUpperCase() ) : ConnectionMethod.UPLOAD;

Expand Down Expand Up @@ -373,6 +375,10 @@ public void renameLogicalColumn( long id, String newColumnName ) {
adapterCatalog.fields.values().stream().filter( c -> c.id == id ).forEach( c -> updateNativePhysical( c.allocId ) );
}

@Override
public RelationalDataSource asRelationalDataSource() {
return this;
}

@SuppressWarnings("unused")
private interface Excludes {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
import org.polypheny.db.adapter.DataSource;
import org.polypheny.db.adapter.DeployMode;
import org.polypheny.db.adapter.RelationalDataSource;
import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn;
import org.polypheny.db.adapter.RelationalScanDelegate;
import org.polypheny.db.adapter.annotations.AdapterProperties;
import org.polypheny.db.adapter.annotations.AdapterSettingBoolean;
Expand All @@ -44,6 +43,7 @@
import org.polypheny.db.catalog.entity.physical.PhysicalEntity;
import org.polypheny.db.catalog.entity.physical.PhysicalTable;
import org.polypheny.db.catalog.exceptions.GenericRuntimeException;
import org.polypheny.db.catalog.logistic.DataModel;
import org.polypheny.db.information.InformationGroup;
import org.polypheny.db.information.InformationTable;
import org.polypheny.db.plugins.PluginContext;
Expand Down Expand Up @@ -107,7 +107,7 @@ public static class EthereumDataSource extends DataSource<RelAdapterCatalog> imp


public EthereumDataSource( final long storeId, final String uniqueName, final Map<String, String> settings ) {
super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ) );
super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ), List.of( DataModel.RELATIONAL ) );
setClientURL( settings.get( "ClientUrl" ) );
this.blocks = Integer.parseInt( settings.get( "Blocks" ) );
this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) );
Expand Down Expand Up @@ -293,6 +293,12 @@ public void renameLogicalColumn( long id, String newColumnName ) {
adapterCatalog.fields.values().stream().filter( c -> c.id == id ).forEach( c -> updateNativePhysical( c.allocId ) );
}


@Override
public RelationalDataSource asRelationalDataSource() {
return this;
}

}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
import org.polypheny.db.adapter.DataSource;
import org.polypheny.db.adapter.DeployMode;
import org.polypheny.db.adapter.RelationalDataSource;
import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn;
import org.polypheny.db.adapter.RelationalScanDelegate;
import org.polypheny.db.adapter.annotations.AdapterProperties;
import org.polypheny.db.adapter.annotations.AdapterSettingDirectory;
Expand All @@ -58,6 +57,7 @@
import org.polypheny.db.catalog.entity.physical.PhysicalEntity;
import org.polypheny.db.catalog.entity.physical.PhysicalTable;
import org.polypheny.db.catalog.exceptions.GenericRuntimeException;
import org.polypheny.db.catalog.logistic.DataModel;
import org.polypheny.db.information.InformationGroup;
import org.polypheny.db.information.InformationTable;
import org.polypheny.db.prepare.Context;
Expand Down Expand Up @@ -92,7 +92,7 @@ public class ExcelSource extends DataSource<RelAdapterCatalog> implements Relati


public ExcelSource( final long storeId, final String uniqueName, final Map<String, String> settings ) {
super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ) );
super( storeId, uniqueName, settings, true, new RelAdapterCatalog( storeId ), List.of( DataModel.RELATIONAL ) );

this.connectionMethod = settings.containsKey( "method" ) ? ConnectionMethod.from( settings.get( "method" ) ) : ConnectionMethod.UPLOAD;
// Validate maxStringLength setting
Expand Down Expand Up @@ -370,6 +370,12 @@ private void addInformationExportedColumns() {
}


@Override
public RelationalDataSource asRelationalDataSource() {
return this;
}


@SuppressWarnings("unused")
private interface Excludes {

Expand Down
Loading

0 comments on commit 8196df9

Please sign in to comment.