Skip to content

Commit

Permalink
Start document source refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
Tobias Hafner committed May 2, 2024
1 parent f986ca6 commit d1292e8
Show file tree
Hide file tree
Showing 10 changed files with 210 additions and 147 deletions.
56 changes: 13 additions & 43 deletions core/src/main/java/org/polypheny/db/adapter/DataSource.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,68 +18,37 @@

import com.google.gson.JsonObject;
import com.google.gson.JsonSerializer;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import lombok.AllArgsConstructor;
import java.util.Set;
import lombok.Getter;
import org.pf4j.ExtensionPoint;
import org.polypheny.db.catalog.catalogs.AdapterCatalog;
import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType;
import org.polypheny.db.type.PolyType;
import org.polypheny.db.catalog.logistic.DataModel;

@Getter
public abstract class DataSource<S extends AdapterCatalog> extends Adapter<S> implements ExtensionPoint {

private final boolean dataReadOnly;
private Set<DataModel> supportedDataModels;


protected DataSource( final long adapterId, final String uniqueName, final Map<String, String> settings, boolean dataReadOnly, S catalog ) {
protected DataSource( final long adapterId, final String uniqueName, final Map<String, String> settings, boolean dataReadOnly, S catalog, Set<DataModel> supportedDataModels ) {
super( adapterId, uniqueName, settings, catalog );
this.dataReadOnly = dataReadOnly;

this.supportedDataModels = supportedDataModels;
informationPage.setLabel( "Sources" );

}


public abstract Map<String, List<ExportedColumn>> getExportedColumns();


@AllArgsConstructor
public static class ExportedColumn {

public final String name;
public final PolyType type;
public final PolyType collectionsType;
public final Integer length;
public final Integer scale;
public final Integer dimension;
public final Integer cardinality;
public final boolean nullable;
public final String physicalSchemaName;
public final String physicalTableName;
public final String physicalColumnName;
public final int physicalPosition;
public final boolean primary;


public String getDisplayType() {
String typeStr = type.getName();
if ( scale != null ) {
typeStr += "(" + length + "," + scale + ")";
} else if ( length != null ) {
typeStr += "(" + length + ")";
}

if ( collectionsType != null ) {
typeStr += " " + collectionsType.getName();
if ( cardinality != null ) {
typeStr += "(" + dimension + "," + cardinality + ")";
} else if ( dimension != null ) {
typeStr += "(" + dimension + ")";
}
}
return typeStr;
}
protected DataSource( final long adapterId, final String uniqueName, final Map<String, String> settings, boolean dataReadOnly, S catalog ) {
super( adapterId, uniqueName, settings, catalog );
this.dataReadOnly = dataReadOnly;
this.supportedDataModels = new HashSet<>( List.of( DataModel.getDefault() ) );
informationPage.setLabel( "Sources" );

}

Expand All @@ -104,4 +73,5 @@ private AdapterType getAdapterType() {
return AdapterType.SOURCE;
}


}
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Copyright 2019-2024 The Polypheny Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.polypheny.db.adapter;

import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Getter;
import org.polypheny.db.catalog.logistic.EntityType;

public interface DocumentDataSource {

List<ExportedDocument> getExportedCollection();

@AllArgsConstructor
@Getter
class ExportedDocument {

private final String name;
private final boolean isModifyable;
private final EntityType type;

}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
/*
* Copyright 2019-2024 The Polypheny Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.polypheny.db.adapter;

import java.util.List;
import java.util.Map;
import lombok.AllArgsConstructor;
import org.polypheny.db.type.PolyType;

public interface RelationalDataSource {

Map<String, List<ExportedColumn>> getExportedColumns();

@AllArgsConstructor
class ExportedColumn {

public final String name;
public final PolyType type;
public final PolyType collectionsType;
public final Integer length;
public final Integer scale;
public final Integer dimension;
public final Integer cardinality;
public final boolean nullable;
public final String physicalSchemaName;
public final String physicalTableName;
public final String physicalColumnName;
public final int physicalPosition;
public final boolean primary;


public String getDisplayType() {
String typeStr = type.getName();
if ( scale != null ) {
typeStr += "(" + length + "," + scale + ")";
} else if ( length != null ) {
typeStr += "(" + length + ")";
}

if ( collectionsType != null ) {
typeStr += " " + collectionsType.getName();
if ( cardinality != null ) {
typeStr += "(" + dimension + "," + cardinality + ")";
} else if ( dimension != null ) {
typeStr += "(" + dimension + ")";
}
}
return typeStr;
}

}

}
61 changes: 50 additions & 11 deletions dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
Expand All @@ -38,6 +39,7 @@
import org.polypheny.db.adapter.AdapterManager;
import org.polypheny.db.adapter.DataSource;
import org.polypheny.db.adapter.DataSource.ExportedColumn;
import org.polypheny.db.adapter.DataSource.ExportedDocument;
import org.polypheny.db.adapter.DataStore;
import org.polypheny.db.adapter.DataStore.IndexMethodModel;
import org.polypheny.db.adapter.DeployMode;
Expand Down Expand Up @@ -70,6 +72,7 @@
import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper;
import org.polypheny.db.catalog.entity.logical.LogicalCollection;
import org.polypheny.db.catalog.entity.logical.LogicalColumn;
import org.polypheny.db.catalog.entity.logical.LogicalEntity;
import org.polypheny.db.catalog.entity.logical.LogicalForeignKey;
import org.polypheny.db.catalog.entity.logical.LogicalGraph;
import org.polypheny.db.catalog.entity.logical.LogicalIndex;
Expand Down Expand Up @@ -216,7 +219,44 @@ public void createStore( String uniqueName, String adapterName, AdapterType adap
public void createSource( String uniqueName, String adapterName, long namespace, AdapterType adapterType, Map<String, String> config, DeployMode mode ) {
uniqueName = uniqueName.toLowerCase();
DataSource<?> adapter = (DataSource<?>) AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config );
if ( adapter.getSupportedDataModels().contains( DataModel.RELATIONAL ) ) {
createRelationalSource( adapter, namespace );
}
if ( adapter.getSupportedDataModels().contains( DataModel.DOCUMENT ) ) {
createDocumentSource( adapter, namespace );
}
if ( adapter.getSupportedDataModels().contains( DataModel.GRAPH ) ) {
// TODO: implement graph source creation
throw new IllegalArgumentException( "Adapters with native data model graph are not yet supported!" );
}
catalog.updateSnapshot();
}


private void createDocumentSource( DataSource<?> adapter, long namespace ) {
List<ExportedDocument> exportedCollections;
try {
exportedCollections = adapter.getExportedCollection();
} catch ( Exception e ) {
AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() );
throw new GenericRuntimeException( "Could not deploy adapter", e );
}

for ( ExportedDocument exportedDocument : exportedCollections ) {
String documentName = getUniqueEntityName( namespace, exportedDocument.getName(), ( ns, en ) -> catalog.getSnapshot().doc().getCollection( ns, en ) );
LogicalCollection logical = catalog.getLogicalDoc( namespace ).addCollection( documentName, EntityType.SOURCE, !(adapter).isDataReadOnly() );

LogicalCollection logicalCollection = catalog.getLogicalDoc( namespace ).addCollection( exportedDocument.getName(), exportedDocument.getType(), exportedDocument.isModifyable() );
AllocationCollection allocationCollection = catalog.getAllocDoc( namespace ).addAllocation( logicalCollection, logical.getId(), 0, adapter.getAdapterId() );

buildNamespace( Catalog.defaultNamespaceId, logical, adapter );
adapter.createCollection( null, logical, allocationCollection );
catalog.updateSnapshot();
}
}


private void createRelationalSource( DataSource<?> adapter, long namespace ) {
Map<String, List<ExportedColumn>> exportedColumns;
try {
exportedColumns = adapter.getExportedColumns();
Expand All @@ -227,14 +267,7 @@ public void createSource( String uniqueName, String adapterName, long namespace,
// Create table, columns etc.
for ( Map.Entry<String, List<ExportedColumn>> entry : exportedColumns.entrySet() ) {
// Make sure the table name is unique
String tableName = entry.getKey();
if ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent() ) {
int i = 0;
while ( catalog.getSnapshot().rel().getTable( namespace, tableName + i ).isPresent() ) {
i++;
}
tableName += i;
}
String tableName = getUniqueEntityName( namespace, entry.getKey(), ( ns, en ) -> catalog.getSnapshot().rel().getTable( ns, en ) );

LogicalTable logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() );
List<LogicalColumn> columns = new ArrayList<>();
Expand Down Expand Up @@ -275,10 +308,16 @@ public void createSource( String uniqueName, String adapterName, long namespace,
buildNamespace( Catalog.defaultNamespaceId, logical, adapter );
adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of() ), AllocationTableWrapper.of( allocation.unwrap( AllocationTable.class ).orElseThrow(), aColumns ) );
catalog.updateSnapshot();

}
catalog.updateSnapshot();
}


private String getUniqueEntityName( Long namespace, String name, BiFunction<Long, String, Optional<?>> retriever ) {
int enumerator = 0;
while ( retriever.apply( namespace, name + enumerator ).isPresent() ) {
enumerator++;
}
return name + enumerator;
}


Expand Down Expand Up @@ -2118,7 +2157,7 @@ private static List<LogicalColumn> sortByPosition( List<LogicalColumn> columns )
}


private void buildNamespace( long namespaceId, LogicalTable logical, Adapter<?> store ) {
private void buildNamespace( long namespaceId, LogicalEntity logical, Adapter<?> store ) {
store.updateNamespace( logical.getNamespaceName(), namespaceId );
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@
import org.polypheny.db.adapter.ConnectionMethod;
import org.polypheny.db.adapter.DataSource;
import org.polypheny.db.adapter.DeployMode;
import org.polypheny.db.adapter.RelationalDataSource;
import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn;
import org.polypheny.db.adapter.RelationalScanDelegate;
import org.polypheny.db.adapter.annotations.AdapterProperties;
import org.polypheny.db.adapter.annotations.AdapterSettingDirectory;
Expand Down Expand Up @@ -72,7 +74,7 @@
@AdapterSettingString(subOf = "method_link", defaultValue = "classpath://hr", name = "directoryName", description = "You can select a path to a folder or specific .csv or .csv.gz files.", position = 2)
@AdapterSettingInteger(name = "maxStringLength", defaultValue = 255, position = 3,
description = "Which length (number of characters including whitespace) should be used for the varchar columns. Make sure this is equal or larger than the longest string in any of the columns.")
public class CsvSource extends DataSource<RelAdapterCatalog> {
public class CsvSource extends DataSource<RelAdapterCatalog> implements RelationalDataSource {

private static final Logger log = LoggerFactory.getLogger( CsvSource.class );
@Delegate(excludes = Excludes.class)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
import org.polypheny.db.adapter.AdapterManager;
import org.polypheny.db.adapter.DataSource;
import org.polypheny.db.adapter.DeployMode;
import org.polypheny.db.adapter.RelationalDataSource;
import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn;
import org.polypheny.db.adapter.RelationalScanDelegate;
import org.polypheny.db.adapter.annotations.AdapterProperties;
import org.polypheny.db.adapter.annotations.AdapterSettingBoolean;
Expand Down Expand Up @@ -91,7 +93,7 @@ public void stop() {
@AdapterSettingString(name = "ClientUrl", description = "The URL of the ethereum JSON RPC client", defaultValue = "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", position = 1)
@AdapterSettingInteger(name = "Blocks", description = "The number of Blocks to fetch when processing a query", defaultValue = 10, position = 2, modifiable = true)
@AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true)
public static class EthereumDataSource extends DataSource<RelAdapterCatalog> {
public static class EthereumDataSource extends DataSource<RelAdapterCatalog> implements RelationalDataSource {

@Delegate(excludes = Excludes.class)
private final RelationalScanDelegate delegate;
Expand Down Expand Up @@ -305,4 +307,4 @@ private interface Excludes {

}

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@
import org.polypheny.db.adapter.ConnectionMethod;
import org.polypheny.db.adapter.DataSource;
import org.polypheny.db.adapter.DeployMode;
import org.polypheny.db.adapter.RelationalDataSource;
import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn;
import org.polypheny.db.adapter.RelationalScanDelegate;
import org.polypheny.db.adapter.annotations.AdapterProperties;
import org.polypheny.db.adapter.annotations.AdapterSettingDirectory;
Expand Down Expand Up @@ -76,7 +78,7 @@
@AdapterSettingString(name = "sheetName", description = "default to read the first sheet", defaultValue = "", required = false)
@AdapterSettingInteger(name = "maxStringLength", defaultValue = 255, position = 2,
description = "Which length (number of characters including whitespace) should be used for the varchar columns. Make sure this is equal or larger than the longest string in any of the columns.")
public class ExcelSource extends DataSource<RelAdapterCatalog> {
public class ExcelSource extends DataSource<RelAdapterCatalog> implements RelationalDataSource {

@Delegate(excludes = Excludes.class)
private final RelationalScanDelegate delegate;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@
import lombok.extern.slf4j.Slf4j;
import org.polypheny.db.adapter.DataSource;
import org.polypheny.db.adapter.DeployMode;
import org.polypheny.db.adapter.RelationalDataSource;
import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn;
import org.polypheny.db.adapter.RelationalScanDelegate;
import org.polypheny.db.adapter.annotations.AdapterProperties;
import org.polypheny.db.adapter.annotations.AdapterSettingBoolean;
Expand Down Expand Up @@ -84,7 +86,7 @@
@AdapterSettingString(name = "oAuth-Client-ID", description = "Authentication credentials used for GoogleSheets API. Not the account credentials.", defaultValue = "", position = 5)
@AdapterSettingString(name = "oAuth-Client-Key", description = "Authentication credentials used for GoogleSheets API. Not the account credentials.", defaultValue = "")
@AdapterSettingString(name = "sheetName", description = "Name of sheet to use.", defaultValue = "")
public class GoogleSheetSource extends DataSource<RelAdapterCatalog> {
public class GoogleSheetSource extends DataSource<RelAdapterCatalog> implements RelationalDataSource {

@Delegate(excludes = Excludes.class)
private final RelationalScanDelegate delegate;
Expand Down
Loading

0 comments on commit d1292e8

Please sign in to comment.