diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..c9a8b72c --- /dev/null +++ b/.editorconfig @@ -0,0 +1,844 @@ +[*] +charset = utf-8 +end_of_line = lf +indent_size = 4 +indent_style = space +insert_final_newline = true +max_line_length = 900 +tab_width = 4 +ij_continuation_indent_size = 4 +ij_formatter_off_tag = @formatter:off +ij_formatter_on_tag = @formatter:on +ij_formatter_tags_enabled = true +ij_smart_tabs = false +ij_visual_guides = 125 +ij_wrap_on_typing = false + +[*.css] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_css_align_closing_brace_with_properties = false +ij_css_blank_lines_around_nested_selector = 1 +ij_css_blank_lines_between_blocks = 1 +ij_css_block_comment_add_space = false +ij_css_brace_placement = end_of_line +ij_css_enforce_quotes_on_format = false +ij_css_hex_color_long_format = false +ij_css_hex_color_lower_case = false +ij_css_hex_color_short_format = false +ij_css_hex_color_upper_case = false +ij_css_keep_blank_lines_in_code = 2 +ij_css_keep_indents_on_empty_lines = false +ij_css_keep_single_line_blocks = false +ij_css_properties_order = font, font-family, font-size, font-weight, font-style, font-variant, font-size-adjust, font-stretch, line-height, position, z-index, top, right, bottom, left, display, visibility, float, clear, overflow, overflow-x, overflow-y, clip, zoom, align-content, align-items, align-self, flex, flex-flow, flex-basis, flex-direction, flex-grow, flex-shrink, flex-wrap, justify-content, order, box-sizing, width, min-width, max-width, height, min-height, max-height, margin, margin-top, margin-right, margin-bottom, margin-left, padding, padding-top, padding-right, padding-bottom, padding-left, table-layout, empty-cells, caption-side, border-spacing, border-collapse, list-style, list-style-position, list-style-type, list-style-image, content, quotes, counter-reset, counter-increment, resize, cursor, user-select, nav-index, nav-up, nav-right, nav-down, nav-left, transition, transition-delay, transition-timing-function, transition-duration, transition-property, transform, transform-origin, animation, animation-name, animation-duration, animation-play-state, animation-timing-function, animation-delay, animation-iteration-count, animation-direction, text-align, text-align-last, vertical-align, white-space, text-decoration, text-emphasis, text-emphasis-color, text-emphasis-style, text-emphasis-position, text-indent, text-justify, letter-spacing, word-spacing, text-outline, text-transform, text-wrap, text-overflow, text-overflow-ellipsis, text-overflow-mode, word-wrap, word-break, tab-size, hyphens, pointer-events, opacity, color, border, border-width, border-style, border-color, border-top, border-top-width, border-top-style, border-top-color, border-right, border-right-width, border-right-style, border-right-color, border-bottom, border-bottom-width, border-bottom-style, border-bottom-color, border-left, border-left-width, border-left-style, border-left-color, border-radius, border-top-left-radius, border-top-right-radius, border-bottom-right-radius, border-bottom-left-radius, border-image, border-image-source, border-image-slice, border-image-width, border-image-outset, border-image-repeat, outline, outline-width, outline-style, outline-color, outline-offset, background, background-color, background-image, background-repeat, background-attachment, background-position, background-position-x, background-position-y, background-clip, background-origin, background-size, box-decoration-break, box-shadow, text-shadow +ij_css_space_after_colon = true +ij_css_space_before_opening_brace = true +ij_css_use_double_quotes = true +ij_css_value_alignment = do_not_align + +[*.csv] +indent_style = tab +ij_visual_guides = none +ij_csv_keep_indents_on_empty_lines = true +ij_csv_wrap_long_lines = false + +[*.feature] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_gherkin_keep_indents_on_empty_lines = false + +[*.java] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_java_align_consecutive_assignments = false +ij_java_align_consecutive_variable_declarations = false +ij_java_align_group_field_declarations = false +ij_java_align_multiline_annotation_parameters = false +ij_java_align_multiline_array_initializer_expression = false +ij_java_align_multiline_assignment = false +ij_java_align_multiline_binary_operation = false +ij_java_align_multiline_chained_methods = false +ij_java_align_multiline_deconstruction_list_components = true +ij_java_align_multiline_extends_list = false +ij_java_align_multiline_for = false +ij_java_align_multiline_method_parentheses = false +ij_java_align_multiline_parameters = false +ij_java_align_multiline_parameters_in_calls = false +ij_java_align_multiline_parenthesized_expression = false +ij_java_align_multiline_records = true +ij_java_align_multiline_resources = false +ij_java_align_multiline_ternary_operation = false +ij_java_align_multiline_text_blocks = false +ij_java_align_multiline_throws_list = false +ij_java_align_subsequent_simple_methods = false +ij_java_align_throws_keyword = false +ij_java_align_types_in_multi_catch = true +ij_java_annotation_parameter_wrap = off +ij_java_array_initializer_new_line_after_left_brace = false +ij_java_array_initializer_right_brace_on_new_line = false +ij_java_array_initializer_wrap = normal +ij_java_assert_statement_colon_on_next_line = false +ij_java_assert_statement_wrap = off +ij_java_assignment_wrap = off +ij_java_binary_operation_sign_on_next_line = true +ij_java_binary_operation_wrap = normal +ij_java_blank_lines_after_anonymous_class_header = 0 +ij_java_blank_lines_after_class_header = 1 +ij_java_blank_lines_after_imports = 1 +ij_java_blank_lines_after_package = 1 +ij_java_blank_lines_around_class = 2 +ij_java_blank_lines_around_field = 0 +ij_java_blank_lines_around_field_in_interface = 0 +ij_java_blank_lines_around_initializer = 2 +ij_java_blank_lines_around_method = 2 +ij_java_blank_lines_around_method_in_interface = 1 +ij_java_blank_lines_before_class_end = 1 +ij_java_blank_lines_before_imports = 1 +ij_java_blank_lines_before_method_body = 0 +ij_java_blank_lines_before_package = 0 +ij_java_block_brace_style = end_of_line +ij_java_block_comment_add_space = false +ij_java_block_comment_at_first_column = true +ij_java_builder_methods = none +ij_java_call_parameters_new_line_after_left_paren = false +ij_java_call_parameters_right_paren_on_new_line = false +ij_java_call_parameters_wrap = normal +ij_java_case_statement_on_separate_line = true +ij_java_catch_on_new_line = false +ij_java_class_annotation_wrap = split_into_lines +ij_java_class_brace_style = end_of_line +ij_java_class_count_to_use_import_on_demand = 999 +ij_java_class_names_in_javadoc = 1 +ij_java_deconstruction_list_wrap = normal +ij_java_do_not_indent_top_level_class_members = false +ij_java_do_not_wrap_after_single_annotation = false +ij_java_do_not_wrap_after_single_annotation_in_parameter = false +ij_java_do_while_brace_force = always +ij_java_doc_add_blank_line_after_description = true +ij_java_doc_add_blank_line_after_param_comments = false +ij_java_doc_add_blank_line_after_return = false +ij_java_doc_add_p_tag_on_empty_lines = false +ij_java_doc_align_exception_comments = false +ij_java_doc_align_param_comments = false +ij_java_doc_do_not_wrap_if_one_line = false +ij_java_doc_enable_formatting = true +ij_java_doc_enable_leading_asterisks = true +ij_java_doc_indent_on_continuation = false +ij_java_doc_keep_empty_lines = true +ij_java_doc_keep_empty_parameter_tag = true +ij_java_doc_keep_empty_return_tag = true +ij_java_doc_keep_empty_throws_tag = true +ij_java_doc_keep_invalid_tags = true +ij_java_doc_param_description_on_new_line = false +ij_java_doc_preserve_line_breaks = true +ij_java_doc_use_throws_not_exception_tag = true +ij_java_else_on_new_line = false +ij_java_entity_dd_suffix = EJB +ij_java_entity_eb_suffix = Bean +ij_java_entity_hi_suffix = Home +ij_java_entity_lhi_prefix = Local +ij_java_entity_lhi_suffix = Home +ij_java_entity_li_prefix = Local +ij_java_entity_pk_class = java.lang.String +ij_java_entity_vo_suffix = VO +ij_java_enum_constants_wrap = off +ij_java_extends_keyword_wrap = off +ij_java_extends_list_wrap = normal +ij_java_field_annotation_wrap = split_into_lines +ij_java_finally_on_new_line = false +ij_java_for_brace_force = always +ij_java_for_statement_new_line_after_left_paren = false +ij_java_for_statement_right_paren_on_new_line = false +ij_java_for_statement_wrap = normal +ij_java_generate_final_locals = false +ij_java_generate_final_parameters = false +ij_java_if_brace_force = always +ij_java_imports_layout = $*, |, * +ij_java_indent_case_from_switch = true +ij_java_insert_inner_class_imports = true +ij_java_insert_override_annotation = true +ij_java_keep_blank_lines_before_right_brace = 2 +ij_java_keep_blank_lines_between_package_declaration_and_header = 2 +ij_java_keep_blank_lines_in_code = 1 +ij_java_keep_blank_lines_in_declarations = 2 +ij_java_keep_builder_methods_indents = false +ij_java_keep_control_statement_in_one_line = false +ij_java_keep_first_column_comment = true +ij_java_keep_indents_on_empty_lines = false +ij_java_keep_line_breaks = true +ij_java_keep_multiple_expressions_in_one_line = false +ij_java_keep_simple_blocks_in_one_line = false +ij_java_keep_simple_classes_in_one_line = false +ij_java_keep_simple_lambdas_in_one_line = false +ij_java_keep_simple_methods_in_one_line = false +ij_java_label_indent_absolute = false +ij_java_label_indent_size = 0 +ij_java_lambda_brace_style = end_of_line +ij_java_layout_static_imports_separately = true +ij_java_line_comment_add_space = false +ij_java_line_comment_add_space_on_reformat = false +ij_java_line_comment_at_first_column = true +ij_java_message_dd_suffix = EJB +ij_java_message_eb_suffix = Bean +ij_java_method_annotation_wrap = split_into_lines +ij_java_method_brace_style = end_of_line +ij_java_method_call_chain_wrap = normal +ij_java_method_parameters_new_line_after_left_paren = true +ij_java_method_parameters_right_paren_on_new_line = false +ij_java_method_parameters_wrap = normal +ij_java_modifier_list_wrap = false +ij_java_multi_catch_types_wrap = normal +ij_java_names_count_to_use_import_on_demand = 999 +ij_java_new_line_after_lparen_in_annotation = false +ij_java_new_line_after_lparen_in_deconstruction_pattern = true +ij_java_new_line_after_lparen_in_record_header = false +ij_java_parameter_annotation_wrap = off +ij_java_parentheses_expression_new_line_after_left_paren = false +ij_java_parentheses_expression_right_paren_on_new_line = false +ij_java_place_assignment_sign_on_next_line = false +ij_java_prefer_longer_names = true +ij_java_prefer_parameters_wrap = false +ij_java_record_components_wrap = normal +ij_java_repeat_synchronized = true +ij_java_replace_instanceof_and_cast = false +ij_java_replace_null_check = true +ij_java_replace_sum_lambda_with_method_ref = true +ij_java_resource_list_new_line_after_left_paren = false +ij_java_resource_list_right_paren_on_new_line = false +ij_java_resource_list_wrap = off +ij_java_rparen_on_new_line_in_annotation = false +ij_java_rparen_on_new_line_in_deconstruction_pattern = true +ij_java_rparen_on_new_line_in_record_header = false +ij_java_session_dd_suffix = EJB +ij_java_session_eb_suffix = Bean +ij_java_session_hi_suffix = Home +ij_java_session_lhi_prefix = Local +ij_java_session_lhi_suffix = Home +ij_java_session_li_prefix = Local +ij_java_session_si_suffix = Service +ij_java_space_after_closing_angle_bracket_in_type_argument = false +ij_java_space_after_colon = true +ij_java_space_after_comma = true +ij_java_space_after_comma_in_type_arguments = true +ij_java_space_after_for_semicolon = true +ij_java_space_after_quest = true +ij_java_space_after_type_cast = true +ij_java_space_before_annotation_array_initializer_left_brace = false +ij_java_space_before_annotation_parameter_list = false +ij_java_space_before_array_initializer_left_brace = false +ij_java_space_before_catch_keyword = true +ij_java_space_before_catch_left_brace = true +ij_java_space_before_catch_parentheses = true +ij_java_space_before_class_left_brace = true +ij_java_space_before_colon = true +ij_java_space_before_colon_in_foreach = true +ij_java_space_before_comma = false +ij_java_space_before_deconstruction_list = false +ij_java_space_before_do_left_brace = true +ij_java_space_before_else_keyword = true +ij_java_space_before_else_left_brace = true +ij_java_space_before_finally_keyword = true +ij_java_space_before_finally_left_brace = true +ij_java_space_before_for_left_brace = true +ij_java_space_before_for_parentheses = true +ij_java_space_before_for_semicolon = false +ij_java_space_before_if_left_brace = true +ij_java_space_before_if_parentheses = true +ij_java_space_before_method_call_parentheses = false +ij_java_space_before_method_left_brace = true +ij_java_space_before_method_parentheses = false +ij_java_space_before_opening_angle_bracket_in_type_parameter = false +ij_java_space_before_quest = true +ij_java_space_before_switch_left_brace = true +ij_java_space_before_switch_parentheses = true +ij_java_space_before_synchronized_left_brace = true +ij_java_space_before_synchronized_parentheses = true +ij_java_space_before_try_left_brace = true +ij_java_space_before_try_parentheses = true +ij_java_space_before_type_parameter_list = false +ij_java_space_before_while_keyword = true +ij_java_space_before_while_left_brace = true +ij_java_space_before_while_parentheses = true +ij_java_space_inside_one_line_enum_braces = false +ij_java_space_within_empty_array_initializer_braces = false +ij_java_space_within_empty_method_call_parentheses = false +ij_java_space_within_empty_method_parentheses = false +ij_java_spaces_around_additive_operators = true +ij_java_spaces_around_annotation_eq = true +ij_java_spaces_around_assignment_operators = true +ij_java_spaces_around_bitwise_operators = true +ij_java_spaces_around_equality_operators = true +ij_java_spaces_around_lambda_arrow = true +ij_java_spaces_around_logical_operators = true +ij_java_spaces_around_method_ref_dbl_colon = false +ij_java_spaces_around_multiplicative_operators = true +ij_java_spaces_around_relational_operators = true +ij_java_spaces_around_shift_operators = true +ij_java_spaces_around_type_bounds_in_type_parameters = true +ij_java_spaces_around_unary_operator = false +ij_java_spaces_within_angle_brackets = false +ij_java_spaces_within_annotation_parentheses = false +ij_java_spaces_within_array_initializer_braces = true +ij_java_spaces_within_braces = false +ij_java_spaces_within_brackets = false +ij_java_spaces_within_cast_parentheses = false +ij_java_spaces_within_catch_parentheses = true +ij_java_spaces_within_deconstruction_list = false +ij_java_spaces_within_for_parentheses = true +ij_java_spaces_within_if_parentheses = true +ij_java_spaces_within_method_call_parentheses = true +ij_java_spaces_within_method_parentheses = true +ij_java_spaces_within_parentheses = false +ij_java_spaces_within_record_header = false +ij_java_spaces_within_switch_parentheses = true +ij_java_spaces_within_synchronized_parentheses = true +ij_java_spaces_within_try_parentheses = true +ij_java_spaces_within_while_parentheses = true +ij_java_special_else_if_treatment = true +ij_java_subclass_name_suffix = Impl +ij_java_ternary_operation_signs_on_next_line = true +ij_java_ternary_operation_wrap = normal +ij_java_test_name_suffix = Test +ij_java_throws_keyword_wrap = normal +ij_java_throws_list_wrap = off +ij_java_use_external_annotations = false +ij_java_use_fq_class_names = false +ij_java_use_relative_indents = false +ij_java_use_single_class_imports = true +ij_java_variable_annotation_wrap = off +ij_java_visibility = public +ij_java_while_brace_force = always +ij_java_while_on_new_line = false +ij_java_wrap_comments = false +ij_java_wrap_first_method_in_call_chain = false +ij_java_wrap_long_lines = false + +[*.less] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_less_align_closing_brace_with_properties = false +ij_less_blank_lines_around_nested_selector = 1 +ij_less_blank_lines_between_blocks = 1 +ij_less_block_comment_add_space = false +ij_less_brace_placement = 0 +ij_less_enforce_quotes_on_format = false +ij_less_hex_color_long_format = false +ij_less_hex_color_lower_case = false +ij_less_hex_color_short_format = false +ij_less_hex_color_upper_case = false +ij_less_keep_blank_lines_in_code = 2 +ij_less_keep_indents_on_empty_lines = false +ij_less_keep_single_line_blocks = false +ij_less_line_comment_add_space = false +ij_less_line_comment_at_first_column = false +ij_less_properties_order = font, font-family, font-size, font-weight, font-style, font-variant, font-size-adjust, font-stretch, line-height, position, z-index, top, right, bottom, left, display, visibility, float, clear, overflow, overflow-x, overflow-y, clip, zoom, align-content, align-items, align-self, flex, flex-flow, flex-basis, flex-direction, flex-grow, flex-shrink, flex-wrap, justify-content, order, box-sizing, width, min-width, max-width, height, min-height, max-height, margin, margin-top, margin-right, margin-bottom, margin-left, padding, padding-top, padding-right, padding-bottom, padding-left, table-layout, empty-cells, caption-side, border-spacing, border-collapse, list-style, list-style-position, list-style-type, list-style-image, content, quotes, counter-reset, counter-increment, resize, cursor, user-select, nav-index, nav-up, nav-right, nav-down, nav-left, transition, transition-delay, transition-timing-function, transition-duration, transition-property, transform, transform-origin, animation, animation-name, animation-duration, animation-play-state, animation-timing-function, animation-delay, animation-iteration-count, animation-direction, text-align, text-align-last, vertical-align, white-space, text-decoration, text-emphasis, text-emphasis-color, text-emphasis-style, text-emphasis-position, text-indent, text-justify, letter-spacing, word-spacing, text-outline, text-transform, text-wrap, text-overflow, text-overflow-ellipsis, text-overflow-mode, word-wrap, word-break, tab-size, hyphens, pointer-events, opacity, color, border, border-width, border-style, border-color, border-top, border-top-width, border-top-style, border-top-color, border-right, border-right-width, border-right-style, border-right-color, border-bottom, border-bottom-width, border-bottom-style, border-bottom-color, border-left, border-left-width, border-left-style, border-left-color, border-radius, border-top-left-radius, border-top-right-radius, border-bottom-right-radius, border-bottom-left-radius, border-image, border-image-source, border-image-slice, border-image-width, border-image-outset, border-image-repeat, outline, outline-width, outline-style, outline-color, outline-offset, background, background-color, background-image, background-repeat, background-attachment, background-position, background-position-x, background-position-y, background-clip, background-origin, background-size, box-decoration-break, box-shadow, text-shadow +ij_less_space_after_colon = true +ij_less_space_before_opening_brace = true +ij_less_use_double_quotes = true +ij_less_value_alignment = 0 + +[*.proto] +ij_visual_guides = none +ij_protobuf_keep_blank_lines_in_code = 2 +ij_protobuf_keep_indents_on_empty_lines = false +ij_protobuf_keep_line_breaks = true +ij_protobuf_space_after_comma = true +ij_protobuf_space_before_comma = false +ij_protobuf_spaces_around_assignment_operators = true +ij_protobuf_spaces_within_braces = false +ij_protobuf_spaces_within_brackets = false + +[*.sass] +ij_visual_guides = none +ij_sass_align_closing_brace_with_properties = false +ij_sass_blank_lines_around_nested_selector = 1 +ij_sass_blank_lines_between_blocks = 1 +ij_sass_brace_placement = 0 +ij_sass_enforce_quotes_on_format = false +ij_sass_hex_color_long_format = false +ij_sass_hex_color_lower_case = false +ij_sass_hex_color_short_format = false +ij_sass_hex_color_upper_case = false +ij_sass_keep_blank_lines_in_code = 2 +ij_sass_keep_indents_on_empty_lines = false +ij_sass_keep_single_line_blocks = false +ij_sass_line_comment_add_space = false +ij_sass_line_comment_at_first_column = false +ij_sass_properties_order = font, font-family, font-size, font-weight, font-style, font-variant, font-size-adjust, font-stretch, line-height, position, z-index, top, right, bottom, left, display, visibility, float, clear, overflow, overflow-x, overflow-y, clip, zoom, align-content, align-items, align-self, flex, flex-flow, flex-basis, flex-direction, flex-grow, flex-shrink, flex-wrap, justify-content, order, box-sizing, width, min-width, max-width, height, min-height, max-height, margin, margin-top, margin-right, margin-bottom, margin-left, padding, padding-top, padding-right, padding-bottom, padding-left, table-layout, empty-cells, caption-side, border-spacing, border-collapse, list-style, list-style-position, list-style-type, list-style-image, content, quotes, counter-reset, counter-increment, resize, cursor, user-select, nav-index, nav-up, nav-right, nav-down, nav-left, transition, transition-delay, transition-timing-function, transition-duration, transition-property, transform, transform-origin, animation, animation-name, animation-duration, animation-play-state, animation-timing-function, animation-delay, animation-iteration-count, animation-direction, text-align, text-align-last, vertical-align, white-space, text-decoration, text-emphasis, text-emphasis-color, text-emphasis-style, text-emphasis-position, text-indent, text-justify, letter-spacing, word-spacing, text-outline, text-transform, text-wrap, text-overflow, text-overflow-ellipsis, text-overflow-mode, word-wrap, word-break, tab-size, hyphens, pointer-events, opacity, color, border, border-width, border-style, border-color, border-top, border-top-width, border-top-style, border-top-color, border-right, border-right-width, border-right-style, border-right-color, border-bottom, border-bottom-width, border-bottom-style, border-bottom-color, border-left, border-left-width, border-left-style, border-left-color, border-radius, border-top-left-radius, border-top-right-radius, border-bottom-right-radius, border-bottom-left-radius, border-image, border-image-source, border-image-slice, border-image-width, border-image-outset, border-image-repeat, outline, outline-width, outline-style, outline-color, outline-offset, background, background-color, background-image, background-repeat, background-attachment, background-position, background-position-x, background-position-y, background-clip, background-origin, background-size, box-decoration-break, box-shadow, text-shadow +ij_sass_space_after_colon = true +ij_sass_space_before_opening_brace = true +ij_sass_use_double_quotes = true +ij_sass_value_alignment = 0 + +[*.scss] +ij_visual_guides = none +ij_scss_align_closing_brace_with_properties = false +ij_scss_blank_lines_around_nested_selector = 1 +ij_scss_blank_lines_between_blocks = 1 +ij_scss_block_comment_add_space = false +ij_scss_brace_placement = 0 +ij_scss_enforce_quotes_on_format = false +ij_scss_hex_color_long_format = false +ij_scss_hex_color_lower_case = false +ij_scss_hex_color_short_format = false +ij_scss_hex_color_upper_case = false +ij_scss_keep_blank_lines_in_code = 2 +ij_scss_keep_indents_on_empty_lines = false +ij_scss_keep_single_line_blocks = false +ij_scss_line_comment_add_space = false +ij_scss_line_comment_at_first_column = false +ij_scss_properties_order = font, font-family, font-size, font-weight, font-style, font-variant, font-size-adjust, font-stretch, line-height, position, z-index, top, right, bottom, left, display, visibility, float, clear, overflow, overflow-x, overflow-y, clip, zoom, align-content, align-items, align-self, flex, flex-flow, flex-basis, flex-direction, flex-grow, flex-shrink, flex-wrap, justify-content, order, box-sizing, width, min-width, max-width, height, min-height, max-height, margin, margin-top, margin-right, margin-bottom, margin-left, padding, padding-top, padding-right, padding-bottom, padding-left, table-layout, empty-cells, caption-side, border-spacing, border-collapse, list-style, list-style-position, list-style-type, list-style-image, content, quotes, counter-reset, counter-increment, resize, cursor, user-select, nav-index, nav-up, nav-right, nav-down, nav-left, transition, transition-delay, transition-timing-function, transition-duration, transition-property, transform, transform-origin, animation, animation-name, animation-duration, animation-play-state, animation-timing-function, animation-delay, animation-iteration-count, animation-direction, text-align, text-align-last, vertical-align, white-space, text-decoration, text-emphasis, text-emphasis-color, text-emphasis-style, text-emphasis-position, text-indent, text-justify, letter-spacing, word-spacing, text-outline, text-transform, text-wrap, text-overflow, text-overflow-ellipsis, text-overflow-mode, word-wrap, word-break, tab-size, hyphens, pointer-events, opacity, color, border, border-width, border-style, border-color, border-top, border-top-width, border-top-style, border-top-color, border-right, border-right-width, border-right-style, border-right-color, border-bottom, border-bottom-width, border-bottom-style, border-bottom-color, border-left, border-left-width, border-left-style, border-left-color, border-radius, border-top-left-radius, border-top-right-radius, border-bottom-right-radius, border-bottom-left-radius, border-image, border-image-source, border-image-slice, border-image-width, border-image-outset, border-image-repeat, outline, outline-width, outline-style, outline-color, outline-offset, background, background-color, background-image, background-repeat, background-attachment, background-position, background-position-x, background-position-y, background-clip, background-origin, background-size, box-decoration-break, box-shadow, text-shadow +ij_scss_space_after_colon = true +ij_scss_space_before_opening_brace = true +ij_scss_use_double_quotes = true +ij_scss_value_alignment = 0 + +[.editorconfig] +ij_visual_guides = none +ij_editorconfig_align_group_field_declarations = false +ij_editorconfig_space_after_colon = false +ij_editorconfig_space_after_comma = true +ij_editorconfig_space_before_colon = false +ij_editorconfig_space_before_comma = false +ij_editorconfig_spaces_around_assignment_operators = true + +[{*.ant,*.fxml,*.jhm,*.jnlp,*.jrxml,*.pom,*.rng,*.tld,*.wadl,*.wsdl,*.xml,*.xsd,*.xsl,*.xslt,*.xul}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_xml_align_attributes = false +ij_xml_align_text = false +ij_xml_attribute_wrap = normal +ij_xml_block_comment_add_space = false +ij_xml_block_comment_at_first_column = true +ij_xml_keep_blank_lines = 2 +ij_xml_keep_indents_on_empty_lines = false +ij_xml_keep_line_breaks = true +ij_xml_keep_line_breaks_in_text = true +ij_xml_keep_whitespaces = false +ij_xml_keep_whitespaces_around_cdata = preserve +ij_xml_keep_whitespaces_inside_cdata = false +ij_xml_line_comment_at_first_column = true +ij_xml_space_after_tag_name = false +ij_xml_space_around_equals_in_attribute = false +ij_xml_space_inside_empty_tag = false +ij_xml_text_wrap = normal +ij_xml_use_custom_settings = false + +[{*.bash,*.sh,*.zsh}] +ij_visual_guides = none +ij_shell_binary_ops_start_line = false +ij_shell_keep_column_alignment_padding = false +ij_shell_minify_program = false +ij_shell_redirect_followed_by_space = false +ij_shell_switch_cases_indented = false +ij_shell_use_unix_line_separator = true + +[{*.ft,*.vm,*.vsl}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_vtl_keep_indents_on_empty_lines = false + +[{*.gant,*.groovy,*.gy}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_groovy_align_group_field_declarations = false +ij_groovy_align_multiline_array_initializer_expression = false +ij_groovy_align_multiline_assignment = false +ij_groovy_align_multiline_binary_operation = false +ij_groovy_align_multiline_chained_methods = false +ij_groovy_align_multiline_extends_list = false +ij_groovy_align_multiline_for = true +ij_groovy_align_multiline_list_or_map = true +ij_groovy_align_multiline_method_parentheses = false +ij_groovy_align_multiline_parameters = true +ij_groovy_align_multiline_parameters_in_calls = false +ij_groovy_align_multiline_resources = true +ij_groovy_align_multiline_ternary_operation = false +ij_groovy_align_multiline_throws_list = false +ij_groovy_align_named_args_in_map = true +ij_groovy_align_throws_keyword = false +ij_groovy_array_initializer_new_line_after_left_brace = false +ij_groovy_array_initializer_right_brace_on_new_line = false +ij_groovy_array_initializer_wrap = off +ij_groovy_assert_statement_wrap = off +ij_groovy_assignment_wrap = off +ij_groovy_binary_operation_wrap = off +ij_groovy_blank_lines_after_class_header = 0 +ij_groovy_blank_lines_after_imports = 1 +ij_groovy_blank_lines_after_package = 1 +ij_groovy_blank_lines_around_class = 1 +ij_groovy_blank_lines_around_field = 0 +ij_groovy_blank_lines_around_field_in_interface = 0 +ij_groovy_blank_lines_around_method = 1 +ij_groovy_blank_lines_around_method_in_interface = 1 +ij_groovy_blank_lines_before_imports = 1 +ij_groovy_blank_lines_before_method_body = 0 +ij_groovy_blank_lines_before_package = 0 +ij_groovy_block_brace_style = end_of_line +ij_groovy_block_comment_add_space = false +ij_groovy_block_comment_at_first_column = true +ij_groovy_call_parameters_new_line_after_left_paren = false +ij_groovy_call_parameters_right_paren_on_new_line = false +ij_groovy_call_parameters_wrap = off +ij_groovy_catch_on_new_line = false +ij_groovy_class_annotation_wrap = split_into_lines +ij_groovy_class_brace_style = end_of_line +ij_groovy_class_count_to_use_import_on_demand = 5 +ij_groovy_do_while_brace_force = never +ij_groovy_else_on_new_line = false +ij_groovy_enable_groovydoc_formatting = true +ij_groovy_enum_constants_wrap = off +ij_groovy_extends_keyword_wrap = off +ij_groovy_extends_list_wrap = off +ij_groovy_field_annotation_wrap = split_into_lines +ij_groovy_finally_on_new_line = false +ij_groovy_for_brace_force = never +ij_groovy_for_statement_new_line_after_left_paren = false +ij_groovy_for_statement_right_paren_on_new_line = false +ij_groovy_for_statement_wrap = off +ij_groovy_ginq_general_clause_wrap_policy = 2 +ij_groovy_ginq_having_wrap_policy = 1 +ij_groovy_ginq_indent_having_clause = true +ij_groovy_ginq_indent_on_clause = true +ij_groovy_ginq_on_wrap_policy = 1 +ij_groovy_ginq_space_after_keyword = true +ij_groovy_if_brace_force = never +ij_groovy_import_annotation_wrap = 2 +ij_groovy_imports_layout = *, |, javax.**, java.**, |, $* +ij_groovy_indent_case_from_switch = true +ij_groovy_indent_label_blocks = true +ij_groovy_insert_inner_class_imports = false +ij_groovy_keep_blank_lines_before_right_brace = 2 +ij_groovy_keep_blank_lines_in_code = 2 +ij_groovy_keep_blank_lines_in_declarations = 2 +ij_groovy_keep_control_statement_in_one_line = true +ij_groovy_keep_first_column_comment = true +ij_groovy_keep_indents_on_empty_lines = false +ij_groovy_keep_line_breaks = true +ij_groovy_keep_multiple_expressions_in_one_line = false +ij_groovy_keep_simple_blocks_in_one_line = false +ij_groovy_keep_simple_classes_in_one_line = true +ij_groovy_keep_simple_lambdas_in_one_line = true +ij_groovy_keep_simple_methods_in_one_line = true +ij_groovy_label_indent_absolute = false +ij_groovy_label_indent_size = 0 +ij_groovy_lambda_brace_style = end_of_line +ij_groovy_layout_static_imports_separately = true +ij_groovy_line_comment_add_space = false +ij_groovy_line_comment_add_space_on_reformat = false +ij_groovy_line_comment_at_first_column = true +ij_groovy_method_annotation_wrap = split_into_lines +ij_groovy_method_brace_style = end_of_line +ij_groovy_method_call_chain_wrap = off +ij_groovy_method_parameters_new_line_after_left_paren = false +ij_groovy_method_parameters_right_paren_on_new_line = false +ij_groovy_method_parameters_wrap = off +ij_groovy_modifier_list_wrap = false +ij_groovy_names_count_to_use_import_on_demand = 3 +ij_groovy_packages_to_use_import_on_demand = java.awt.*, javax.swing.* +ij_groovy_parameter_annotation_wrap = off +ij_groovy_parentheses_expression_new_line_after_left_paren = false +ij_groovy_parentheses_expression_right_paren_on_new_line = false +ij_groovy_prefer_parameters_wrap = false +ij_groovy_resource_list_new_line_after_left_paren = false +ij_groovy_resource_list_right_paren_on_new_line = false +ij_groovy_resource_list_wrap = off +ij_groovy_space_after_assert_separator = true +ij_groovy_space_after_colon = true +ij_groovy_space_after_comma = true +ij_groovy_space_after_comma_in_type_arguments = true +ij_groovy_space_after_for_semicolon = true +ij_groovy_space_after_quest = true +ij_groovy_space_after_type_cast = true +ij_groovy_space_before_annotation_parameter_list = false +ij_groovy_space_before_array_initializer_left_brace = false +ij_groovy_space_before_assert_separator = false +ij_groovy_space_before_catch_keyword = true +ij_groovy_space_before_catch_left_brace = true +ij_groovy_space_before_catch_parentheses = true +ij_groovy_space_before_class_left_brace = true +ij_groovy_space_before_closure_left_brace = true +ij_groovy_space_before_colon = true +ij_groovy_space_before_comma = false +ij_groovy_space_before_do_left_brace = true +ij_groovy_space_before_else_keyword = true +ij_groovy_space_before_else_left_brace = true +ij_groovy_space_before_finally_keyword = true +ij_groovy_space_before_finally_left_brace = true +ij_groovy_space_before_for_left_brace = true +ij_groovy_space_before_for_parentheses = true +ij_groovy_space_before_for_semicolon = false +ij_groovy_space_before_if_left_brace = true +ij_groovy_space_before_if_parentheses = true +ij_groovy_space_before_method_call_parentheses = false +ij_groovy_space_before_method_left_brace = true +ij_groovy_space_before_method_parentheses = false +ij_groovy_space_before_quest = true +ij_groovy_space_before_record_parentheses = false +ij_groovy_space_before_switch_left_brace = true +ij_groovy_space_before_switch_parentheses = true +ij_groovy_space_before_synchronized_left_brace = true +ij_groovy_space_before_synchronized_parentheses = true +ij_groovy_space_before_try_left_brace = true +ij_groovy_space_before_try_parentheses = true +ij_groovy_space_before_while_keyword = true +ij_groovy_space_before_while_left_brace = true +ij_groovy_space_before_while_parentheses = true +ij_groovy_space_in_named_argument = true +ij_groovy_space_in_named_argument_before_colon = false +ij_groovy_space_within_empty_array_initializer_braces = false +ij_groovy_space_within_empty_method_call_parentheses = false +ij_groovy_spaces_around_additive_operators = true +ij_groovy_spaces_around_assignment_operators = true +ij_groovy_spaces_around_bitwise_operators = true +ij_groovy_spaces_around_equality_operators = true +ij_groovy_spaces_around_lambda_arrow = true +ij_groovy_spaces_around_logical_operators = true +ij_groovy_spaces_around_multiplicative_operators = true +ij_groovy_spaces_around_regex_operators = true +ij_groovy_spaces_around_relational_operators = true +ij_groovy_spaces_around_shift_operators = true +ij_groovy_spaces_within_annotation_parentheses = false +ij_groovy_spaces_within_array_initializer_braces = false +ij_groovy_spaces_within_braces = true +ij_groovy_spaces_within_brackets = false +ij_groovy_spaces_within_cast_parentheses = false +ij_groovy_spaces_within_catch_parentheses = false +ij_groovy_spaces_within_for_parentheses = false +ij_groovy_spaces_within_gstring_injection_braces = false +ij_groovy_spaces_within_if_parentheses = false +ij_groovy_spaces_within_list_or_map = false +ij_groovy_spaces_within_method_call_parentheses = false +ij_groovy_spaces_within_method_parentheses = false +ij_groovy_spaces_within_parentheses = false +ij_groovy_spaces_within_switch_parentheses = false +ij_groovy_spaces_within_synchronized_parentheses = false +ij_groovy_spaces_within_try_parentheses = false +ij_groovy_spaces_within_tuple_expression = false +ij_groovy_spaces_within_while_parentheses = false +ij_groovy_special_else_if_treatment = true +ij_groovy_ternary_operation_wrap = off +ij_groovy_throws_keyword_wrap = off +ij_groovy_throws_list_wrap = off +ij_groovy_use_flying_geese_braces = false +ij_groovy_use_fq_class_names = false +ij_groovy_use_fq_class_names_in_javadoc = true +ij_groovy_use_relative_indents = false +ij_groovy_use_single_class_imports = true +ij_groovy_variable_annotation_wrap = off +ij_groovy_while_brace_force = never +ij_groovy_while_on_new_line = false +ij_groovy_wrap_chain_calls_after_dot = false +ij_groovy_wrap_long_lines = false + +[{*.gradle.kts,*.kt,*.kts,*.main.kts,*.space.kts}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_kotlin_align_in_columns_case_branch = false +ij_kotlin_align_multiline_binary_operation = false +ij_kotlin_align_multiline_extends_list = false +ij_kotlin_align_multiline_method_parentheses = false +ij_kotlin_align_multiline_parameters = true +ij_kotlin_align_multiline_parameters_in_calls = false +ij_kotlin_allow_trailing_comma = false +ij_kotlin_allow_trailing_comma_on_call_site = false +ij_kotlin_assignment_wrap = off +ij_kotlin_blank_lines_after_class_header = 0 +ij_kotlin_blank_lines_around_block_when_branches = 0 +ij_kotlin_blank_lines_before_declaration_with_comment_or_annotation_on_separate_line = 1 +ij_kotlin_block_comment_add_space = false +ij_kotlin_block_comment_at_first_column = true +ij_kotlin_call_parameters_new_line_after_left_paren = false +ij_kotlin_call_parameters_right_paren_on_new_line = false +ij_kotlin_call_parameters_wrap = off +ij_kotlin_catch_on_new_line = false +ij_kotlin_class_annotation_wrap = split_into_lines +ij_kotlin_continuation_indent_for_chained_calls = true +ij_kotlin_continuation_indent_for_expression_bodies = true +ij_kotlin_continuation_indent_in_argument_lists = true +ij_kotlin_continuation_indent_in_elvis = true +ij_kotlin_continuation_indent_in_if_conditions = true +ij_kotlin_continuation_indent_in_parameter_lists = true +ij_kotlin_continuation_indent_in_supertype_lists = true +ij_kotlin_else_on_new_line = false +ij_kotlin_enum_constants_wrap = off +ij_kotlin_extends_list_wrap = off +ij_kotlin_field_annotation_wrap = split_into_lines +ij_kotlin_finally_on_new_line = false +ij_kotlin_if_rparen_on_new_line = false +ij_kotlin_import_nested_classes = false +ij_kotlin_imports_layout = *, java.**, javax.**, kotlin.**, ^ +ij_kotlin_insert_whitespaces_in_simple_one_line_method = true +ij_kotlin_keep_blank_lines_before_right_brace = 2 +ij_kotlin_keep_blank_lines_in_code = 2 +ij_kotlin_keep_blank_lines_in_declarations = 2 +ij_kotlin_keep_first_column_comment = true +ij_kotlin_keep_indents_on_empty_lines = false +ij_kotlin_keep_line_breaks = true +ij_kotlin_lbrace_on_next_line = false +ij_kotlin_line_break_after_multiline_when_entry = true +ij_kotlin_line_comment_add_space = false +ij_kotlin_line_comment_add_space_on_reformat = false +ij_kotlin_line_comment_at_first_column = true +ij_kotlin_method_annotation_wrap = split_into_lines +ij_kotlin_method_call_chain_wrap = off +ij_kotlin_method_parameters_new_line_after_left_paren = false +ij_kotlin_method_parameters_right_paren_on_new_line = false +ij_kotlin_method_parameters_wrap = off +ij_kotlin_name_count_to_use_star_import = 5 +ij_kotlin_name_count_to_use_star_import_for_members = 3 +ij_kotlin_packages_to_use_import_on_demand = java.util.*, kotlinx.android.synthetic.**, io.ktor.** +ij_kotlin_parameter_annotation_wrap = off +ij_kotlin_space_after_comma = true +ij_kotlin_space_after_extend_colon = true +ij_kotlin_space_after_type_colon = true +ij_kotlin_space_before_catch_parentheses = true +ij_kotlin_space_before_comma = false +ij_kotlin_space_before_extend_colon = true +ij_kotlin_space_before_for_parentheses = true +ij_kotlin_space_before_if_parentheses = true +ij_kotlin_space_before_lambda_arrow = true +ij_kotlin_space_before_type_colon = false +ij_kotlin_space_before_when_parentheses = true +ij_kotlin_space_before_while_parentheses = true +ij_kotlin_spaces_around_additive_operators = true +ij_kotlin_spaces_around_assignment_operators = true +ij_kotlin_spaces_around_equality_operators = true +ij_kotlin_spaces_around_function_type_arrow = true +ij_kotlin_spaces_around_logical_operators = true +ij_kotlin_spaces_around_multiplicative_operators = true +ij_kotlin_spaces_around_range = false +ij_kotlin_spaces_around_relational_operators = true +ij_kotlin_spaces_around_unary_operator = false +ij_kotlin_spaces_around_when_arrow = true +ij_kotlin_variable_annotation_wrap = off +ij_kotlin_while_on_new_line = false +ij_kotlin_wrap_elvis_expressions = 1 +ij_kotlin_wrap_expression_body_functions = 0 +ij_kotlin_wrap_first_method_in_call_chain = false + +[{*.har,*.json}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_json_array_wrapping = split_into_lines +ij_json_keep_blank_lines_in_code = 0 +ij_json_keep_indents_on_empty_lines = false +ij_json_keep_line_breaks = true +ij_json_keep_trailing_comma = false +ij_json_object_wrapping = split_into_lines +ij_json_property_alignment = do_not_align +ij_json_space_after_colon = true +ij_json_space_after_comma = true +ij_json_space_before_colon = false +ij_json_space_before_comma = false +ij_json_spaces_within_braces = false +ij_json_spaces_within_brackets = false +ij_json_wrap_long_lines = false + +[{*.htm,*.html,*.sht,*.shtm,*.shtml}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_html_add_new_line_before_tags = body, div, p, form, h1, h2, h3 +ij_html_align_attributes = true +ij_html_align_text = false +ij_html_attribute_wrap = normal +ij_html_block_comment_add_space = false +ij_html_block_comment_at_first_column = true +ij_html_do_not_align_children_of_min_lines = 0 +ij_html_do_not_break_if_inline_tags = title, h1, h2, h3, h4, h5, h6, p +ij_html_do_not_indent_children_of_tags = html, body, thead, tbody, tfoot +ij_html_enforce_quotes = false +ij_html_inline_tags = a, abbr, acronym, b, basefont, bdo, big, br, cite, cite, code, dfn, em, font, i, img, input, kbd, label, q, s, samp, select, small, span, strike, strong, sub, sup, textarea, tt, u, var +ij_html_keep_blank_lines = 2 +ij_html_keep_indents_on_empty_lines = false +ij_html_keep_line_breaks = true +ij_html_keep_line_breaks_in_text = true +ij_html_keep_whitespaces = false +ij_html_keep_whitespaces_inside = span, pre, textarea +ij_html_line_comment_at_first_column = true +ij_html_new_line_after_last_attribute = never +ij_html_new_line_before_first_attribute = never +ij_html_quote_style = double +ij_html_remove_new_line_before_tags = br +ij_html_space_after_tag_name = false +ij_html_space_around_equality_in_attribute = false +ij_html_space_inside_empty_tag = false +ij_html_text_wrap = normal + +[{*.http,*.rest}] +indent_size = 0 +ij_visual_guides = none +ij_http-request_call_parameters_wrap = normal + +[{*.jsf,*.jsp,*.jspf,*.tag,*.tagf,*.xjsp}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_jsp_jsp_prefer_comma_separated_import_list = false +ij_jsp_keep_indents_on_empty_lines = false + +[{*.jspx,*.tagx}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_jspx_keep_indents_on_empty_lines = false + +[{*.markdown,*.md}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_markdown_force_one_space_after_blockquote_symbol = true +ij_markdown_force_one_space_after_header_symbol = true +ij_markdown_force_one_space_after_list_bullet = true +ij_markdown_force_one_space_between_words = true +ij_markdown_format_tables = true +ij_markdown_insert_quote_arrows_on_wrap = true +ij_markdown_keep_indents_on_empty_lines = false +ij_markdown_keep_line_breaks_inside_text_blocks = true +ij_markdown_max_lines_around_block_elements = 1 +ij_markdown_max_lines_around_header = 1 +ij_markdown_max_lines_between_paragraphs = 1 +ij_markdown_min_lines_around_block_elements = 1 +ij_markdown_min_lines_around_header = 1 +ij_markdown_min_lines_between_paragraphs = 1 +ij_markdown_wrap_text_if_long = true +ij_markdown_wrap_text_inside_blockquotes = true + +[{*.pb,*.textproto}] +ij_visual_guides = none +ij_prototext_keep_blank_lines_in_code = 2 +ij_prototext_keep_indents_on_empty_lines = false +ij_prototext_keep_line_breaks = true +ij_prototext_space_after_colon = true +ij_prototext_space_after_comma = true +ij_prototext_space_before_colon = false +ij_prototext_space_before_comma = false +ij_prototext_spaces_within_braces = true +ij_prototext_spaces_within_brackets = false + +[{*.properties,spring.handlers,spring.schemas}] +ij_visual_guides = none +ij_properties_align_group_field_declarations = false +ij_properties_keep_blank_lines = true +ij_properties_key_value_delimiter = equals +ij_properties_spaces_around_key_value_delimiter = true + +[{*.qute.htm,*.qute.html,*.qute.json,*.qute.txt,*.qute.yaml,*.qute.yml}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_qute_keep_indents_on_empty_lines = false + +[{*.toml,Cargo.lock,Cargo.toml.orig,Gopkg.lock,Pipfile,poetry.lock}] +ij_continuation_indent_size = 8 +ij_visual_guides = none +ij_toml_keep_indents_on_empty_lines = false + +[{*.yaml,*.yml}] +ij_visual_guides = none +ij_yaml_align_values_properties = do_not_align +ij_yaml_autoinsert_sequence_marker = true +ij_yaml_block_mapping_on_new_line = false +ij_yaml_indent_sequence_value = true +ij_yaml_keep_indents_on_empty_lines = false +ij_yaml_keep_line_breaks = true +ij_yaml_sequence_on_new_line = false +ij_yaml_space_before_colon = false +ij_yaml_spaces_within_braces = true +ij_yaml_spaces_within_brackets = true diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3c798033..78ecbba1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,34 +3,120 @@ name: Polypheny JDBC Driver CI on: [ push, pull_request ] jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - # test against latest update of each major Java version, as well as specific updates of LTS versions: - java: [ 8, 11 ] - os: [ macos-latest, ubuntu-latest, windows-latest ] - name: Java ${{ matrix.java }} @ ${{ matrix.os }} - steps: - - uses: actions/checkout@v2 - - name: Set up JDK - uses: actions/setup-java@v1 - with: - java-version: ${{ matrix.java }} - - name: Cache Gradle packages - uses: actions/cache@v2 - with: - path: | - ~/.gradle/caches - ~/.gradle/wrapper - key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} - restore-keys: | - ${{ runner.os }}-gradle- - - name: Build with Gradle - run: ./gradlew build - - name: Cleanup Gradle Cache - # Remove some files from the Gradle cache, so they aren't cached by GitHub Actions. - # Restoring these files from a GitHub Actions cache might cause problems for future builds. - run: | - rm -f ~/.gradle/caches/modules-2/modules-2.lock - rm -f ~/.gradle/caches/modules-2/gc.properties \ No newline at end of file + build-polypheny: + timeout-minutes: 15 + runs-on: ubuntu-latest + steps: + - uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '17' + + - name: Checkout Polypheny + uses: actions/checkout@v4 + with: + repository: polypheny/Polypheny-DB + ref: proto-without-grpc + + - name: Build Polypheny + run: | + ./gradlew build -x test -x licensee + + - name: Build Polypheny again (just to be sure all plugins are available) + run: | + ./gradlew build -x test -x licensee + + - name: Store Polypheny JAR + uses: actions/upload-artifact@v4 + with: + name: polypheny-jar + path: dbms/build/libs/dbms-0.10.0-SNAPSHOT.jar + + build: + needs: build-polypheny + strategy: + fail-fast: false + matrix: + java: [ 8, 11, 17, 21 ] + os: [ ubuntu-latest, macos-latest ] + exclude: + - os: macos-latest + java: 8 + timeout-minutes: 5 + runs-on: ${{ matrix.os }} + name: Java ${{ matrix.java }} @ ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + - name: Set up JDK + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: | + 17 + ${{ matrix.java }} + - name: Load Polypheny JAR + uses: actions/download-artifact@v4 + with: + name: polypheny-jar + + - name: Build & test with Gradle + uses: polypheny/GitHub-Action-Run-Polypheny@dev + with: + cmd: ./gradlew build -PdisableToolchain=true + jar: dbms-0.10.0-SNAPSHOT.jar + java: ${{ env.JAVA_HOME_17_X64 || env.JAVA_HOME_17_ARM64 }}/bin/java + + build-windows: + needs: build-polypheny + strategy: + fail-fast: false + timeout-minutes: 20 + runs-on: windows-latest + name: Java 8 @ windows-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: | + 17 + 8 + - name: Load Polypheny JAR + uses: actions/download-artifact@v4 + with: + name: polypheny-jar + + - name: Build & test with Gradle + uses: polypheny/GitHub-Action-Run-Polypheny@v0.2.0 + with: + cmd: gradlew.bat build -PdisableToolchain=true + jar: dbms-0.10.0-SNAPSHOT.jar + java: ${{ env.JAVA_HOME_17_X64 }}\bin\java + + test-stores: + needs: build-polypheny + strategy: + fail-fast: false + matrix: + adapter: [ mongodb, hsqldb, monetdb, postgresql, file, cottontail, neo4j ] + timeout-minutes: 10 + runs-on: ubuntu-latest + name: Test on ${{ matrix.adapter }} + steps: + - uses: actions/checkout@v4 + - name: Set up JDK + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: 17 + - name: Load Polypheny JAR + uses: actions/download-artifact@v4 + with: + name: polypheny-jar + - name: Build with Gradle + uses: polypheny/GitHub-Action-Run-Polypheny@v0.2.0 + with: + cmd: ./gradlew build -PdisableToolchain=true + jar: dbms-0.10.0-SNAPSHOT.jar + default-store: ${{ matrix.adapter }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 10a6a10f..85f07765 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -10,12 +10,12 @@ jobs: contents: read packages: write steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Java uses: actions/setup-java@v2 with: - java-version: '11' - distribution: 'adopt' + java-version: '17' + distribution: 'temurin' - name: Publish package run: ./gradlew publish env: diff --git a/.gitignore b/.gitignore index 3d19ed4e..3ef44a9a 100644 --- a/.gitignore +++ b/.gitignore @@ -250,3 +250,6 @@ nbdist/ # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) !/gradle/wrapper/gradle-wrapper.jar !/libs/avatica-1.16.0-POLYPHENYDB-shaded.jar + +# Ignore dynamically generated version file +src/main/resources/polypheny-jdbc-driver-version.properties diff --git a/CHANGELOG.md b/CHANGELOG.md index a5867ef8..70947e2b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,19 @@ +## 2.0 + +### May 10, 2024 + +CHANGES: + +* New version of JDBC driver utilizing the Polypheny Prism query interface and protocol. + +IMPROVEMENTS: + +* N/A + +BUG FIXES: + +* N/A + ## 1.5.3 ### November 21, 2021 diff --git a/NOTICE b/NOTICE deleted file mode 100644 index 8d3d130b..00000000 --- a/NOTICE +++ /dev/null @@ -1,7 +0,0 @@ -Polypheny-JDBC-Driver -Copyright 2019-2020 The Polypheny Project - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - -This product contains code from the Apache Calcite Avatica project. diff --git a/README.md b/README.md index 64a249e0..8605be9e 100644 --- a/README.md +++ b/README.md @@ -8,15 +8,17 @@ # Polypheny JDBC Driver -This repository contains a standards-compliant JDBC driver for Polypheny-DB. +This repository contains s JDBC driver for Polypheny. It utilizes the *Prism query interface* deployed with every instance of Polypheny by default. The driver adheres to the JDBC 4.2 standard, ensuring compatibility with Java applications, including those written in Scala and Kotlin, as well as tools like DataGrip. + +This driver is compatible with JVM version 8 or higher. ## Getting Started - The driver is published to Maven Central. Make sure that you have added `mavenCentral()` to the repositories section in your gradle build file. -- Add `implementation group: 'org.polypheny', name: 'polypheny-jdbc-driver', version: '1.5.3'` . -- Load the driver `org.polypheny.jdbc.Driver`, for example via +- Add `implementation group: 'org.polypheny', name: 'polypheny-jdbc-driver', version: '2.0'` . +- Optionally: load the driver `org.polypheny.jdbc.PolyphenyDriver`, for example via ``` - Class.forName( "org.polypheny.jdbc.Driver" ); + Class.forName( "org.polypheny.jdbc.PolyphenyDriver" ); ``` - Use the connection URL `jdbc:polypheny:http://localhost/` to connect to [Polypheny-DB](https://github.com/polypheny/Polypheny-DB). @@ -30,8 +32,6 @@ We highly welcome your contributions to the _Polypheny JDBC Driver_. If you woul Please note that we have a [code of conduct](https://github.com/polypheny/Admin/blob/master/CODE_OF_CONDUCT.md). Please follow it in all your interactions with the project. -## Credits -This JDBC Driver is based on [Apache Avatica](https://calcite.apache.org/avatica/), a framework for building database drivers. ## License The Apache 2.0 License diff --git a/build.gradle b/build.gradle index fb66e011..46f8ae61 100644 --- a/build.gradle +++ b/build.gradle @@ -1,98 +1,126 @@ - plugins { id 'java-library' id 'maven-publish' id 'signing' - id 'idea' - id 'com.github.johnrengelman.shadow' version '7.1.2' - id 'io.freefair.lombok' version '6.5.1' + id 'org.jetbrains.gradle.plugin.idea-ext' version '1.1.8' + id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'io.freefair.lombok' version '8.6' + id 'com.google.protobuf' version '0.9.4' } +group "org.polypheny" + +description = "A standards-compliant JDBC driver for Polypheny." + +def versionMajor = 2 +def versionMinor = 0 +def versionQualifier = "SNAPSHOT" +version = versionMajor + "." + versionMinor + (versionQualifier != '' ? "-" + versionQualifier : '') + repositories { mavenCentral() maven { - // DBIS Nexus - url "https://dbis-nexus.dmi.unibas.ch/repository/maven2/" - } - maven { - url "https://clojars.org/repo/" + url "https://plugins.gradle.org/m2/" } } - -group "org.polypheny" -archivesBaseName = "polypheny-jdbc-driver" -description = "A standards-compliant JDBC driver for Polypheny-DB." - -def versionMajor = 1 -def versionMinor = 6 -def versionQualifier = "-SNAPSHOT" -version = versionMajor + "." + versionMinor + versionQualifier +compileJava.options.encoding = "UTF-8" +compileTestJava.options.encoding = "UTF-8" +javadoc.options.encoding = "UTF-8" -tasks.withType(JavaCompile) { - options.encoding = "UTF-8" +// Allow to disable toolchain setting and use the java available on the system. Important for CI pipelines to test different +// java versions. +if (!hasProperty('disableToolchain')) { + java { + toolchain { + languageVersion = JavaLanguageVersion.of(8) + } + } +} else { + println "Disabled toolchain" + // Without this, it fails for Java 21 + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 } - java { - sourceCompatibility = JavaVersion.VERSION_1_8 - targetCompatibility = JavaVersion.VERSION_1_8 withJavadocJar() withSourcesJar() } +def protobufVersion = "3.23.4" -configurations.all { - // check for updates every build - resolutionStrategy.cacheChangingModulesFor 0, "seconds" -} - - -def avaticaVersion = '1.17.2-POLYPHENY' dependencies { - ////// APACHE CALCITE AVATICA - implementation group: "org.polypheny.avatica", name: "avatica-core", version: avaticaVersion // License: Apache 2.0 + // Prism API files (protobuf files), needs to be implementation due to the prism-api-version.properties + implementation group: 'org.polypheny', name: 'prism', version: '1.3' + + // Protobuf + implementation group: 'com.google.protobuf', name: 'protobuf-java', version: protobufVersion - ////// APACHE COMMONS LANG - implementation group: "org.apache.commons", name: "commons-lang3", version: "3.12.0" + // Apache Commons Lang + implementation group: "org.apache.commons", name: "commons-lang3", version: "3.14.0" - ////// LOGGING - implementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.3' // License: MIT + // Logging + implementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.13' // --- Test Compile --- - testImplementation group: "junit", name: "junit", version: '4.13.1' - testImplementation group: "org.testng", name: "testng", version: "6.14.3" - testImplementation group: "org.polypheny.avatica", name: "avatica-server", version: avaticaVersion + testImplementation group: 'org.junit.jupiter', name: 'junit-jupiter', version: '5.10.2' + testImplementation group: 'org.mockito', name: 'mockito-core', version: '4.11.0' } +apply plugin: "java" +apply plugin: "idea" +apply plugin: "io.freefair.lombok" +apply plugin: "com.github.johnrengelman.shadow" +apply plugin: "com.google.protobuf" + +// compile protos on build +build.dependsOn(generateProto) +delombok.dependsOn(generateProto) +sourcesJar.dependsOn(generateProto) +compileJava.dependsOn(generateProto) +processResources.dependsOn(extractIncludeProto) + +protobuf { + // Configure the protoc executable + protoc { + artifact = "com.google.protobuf:protoc:$protobufVersion" + } + + generateProtoTasks { + all().each { task -> + task.builtins { + java {} + } + } + } +} sourceSets { main { java { - srcDirs = ["src/main/java", "build/generated-sources"] + srcDirs = ["src/main/java", "build/generated/source/proto/main/java"] } resources { srcDirs = ["src/main/resources"] } - } - test { - java { - srcDirs = ["src/test/java"] - } - resources { - srcDirs = ["src/test/resources"] + proto { + srcDirs = ["build/extracted-include-protos/main"] } } } +test { + useJUnitPlatform() + testLogging.showStandardStreams = true +} /** - * JARs - */ + * JARs*/ jar { manifest { attributes 'Manifest-Version': '1.0' @@ -102,7 +130,9 @@ jar { attributes 'Version': project.version } } -/*task sourcesJar(type: Jar, dependsOn: classes) { + +/* +task sourcesJar(type: Jar, dependsOn: classes) { classifier "sources" from sourceSets.main.allSource } @@ -110,10 +140,18 @@ task javadocJar(type: Jar, dependsOn: javadoc) { classifier "javadoc" from javadoc.destinationDir }*/ + shadowJar { archiveClassifier = '' mergeServiceFiles() // merge the META-INF/services/java.sql.Driver files exclude('META-INF/INDEX.LIST', 'META-INF/*.SF', 'META-INF/*.DSA', 'META-INF/*.RSA', 'module-info.class', 'META-INF/services/com.fasterxml.jackson.core.*') + relocate 'google.protobuf', 'org.polypheny.jdbc.dependency.google.protobuf' + relocate 'org.slf4j', 'org.polypheny.jdbc.dependency.org.slf4j' + relocate 'org.apache.commons', 'org.polypheny.jdbc.dependency.org.apache.commons' + relocate 'com.google', 'org.polypheny.jdbc.dependency.com.google' + relocate 'javax', 'org.polypheny.jdbc.dependency.javax' + relocate 'org.checkerframework', 'org.polypheny.jdbc.dependency.org.checkerframework' + relocate 'org.polypheny.prism', 'org.polypheny.jdbc.dependency.prism' // Prevents Polypheny-DB from using this during tests } assemble.dependsOn shadowJar @@ -124,6 +162,29 @@ artifacts { archives javadocJar // jar file containing the source files } +// generate version properties file +task generateVersionProperties { + def versionPropsFile = file('src/main/resources/polypheny-jdbc-driver-version.properties') + versionPropsFile.parentFile.mkdirs() + + // Generate version properties file with project version + versionPropsFile.withWriter { writer -> + writer.write("version=${version}\n") + writer.write("major=${versionMajor}\n") + writer.write("minor=${versionMinor}\n") + writer.write("qualifier=${versionQualifier}\n") + writer.write("buildTimestamp=${new Date().format("yyyy-MM-dd'T'HH:mm:ssZ")}") + } +} + +// Ensure the generated version properties file is created before compilation +compileJava.dependsOn generateVersionProperties + + +javadoc { + exclude 'org/polypheny/prism/**' + exclude 'com/google/protobuf/**' +} publishing { publications { @@ -133,7 +194,7 @@ publishing { artifact source: javadocJar, classifier: 'javadoc', extension: 'jar' pom { name = 'Polypheny JDBC Driver' - description = 'A standards-compliant JDBC driver for Polypheny-DB. ' + description = 'A standards-compliant JDBC driver for Polypheny-DB.' url = 'https://polypheny.org/' licenses { license { @@ -177,7 +238,6 @@ publishing { } //task generatePom(group: 'publishing', dependsOn: "generatePomFileFor${project.name.capitalize()}Publication") - signing { required { gradle.taskGraph.hasTask("publish") } def signingKey = findProperty("signingKey") @@ -186,18 +246,32 @@ signing { sign publishing.publications.shadow } - - /** - * IntelliJ - */ + * IntelliJ*/ idea { - module { - downloadJavadoc = true - downloadSources = true - - inheritOutputDirs = false - outputDir = file("${project.buildDir}/classes") - testOutputDir = file("${project.buildDir}/test-classes") + project { + settings { + copyright { + useDefault = "ApacheLicense" + profiles { + ApacheLicense { + notice = 'Copyright 2019-$today.year The Polypheny Project\n' + + '\n' + + 'Licensed under the Apache License, Version 2.0 (the \"License\");\n' + + 'you may not use this file except in compliance with the License.\n' + + 'You may obtain a copy of the License at\n' + + '\n' + + 'http://www.apache.org/licenses/LICENSE-2.0\n' + + '\n' + + 'Unless required by applicable law or agreed to in writing, software\n' + + 'distributed under the License is distributed on an \"AS IS\" BASIS,\n' + + 'WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n' + + 'See the License for the specific language governing permissions and\n' + + 'limitations under the License.' + keyword = "Copyright" + } + } + } + } } } diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7454180f..033e24c4 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 8c234bdb..e7646dea 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,7 @@ distributionBase=GRADLE_USER_HOME -distributionPath = wrapper/dists -distributionUrl = https\://services.gradle.org/distributions/gradle-7.5.1-all.zip -zipStoreBase = GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip +networkTimeout=10000 +validateDistributionUrl=true +zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 1b6c7873..fcb6fca1 100755 --- a/gradlew +++ b/gradlew @@ -55,7 +55,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -80,13 +80,10 @@ do esac done -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" +# This is normally unused +# shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -133,22 +130,29 @@ location of your Java installation." fi else JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." + fi fi # Increase the maximum file descriptors if we can. if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac case $MAX_FD in #( '' | soft) :;; #( *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -193,6 +197,10 @@ if "$cygwin" || "$msys" ; then done fi + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + # Collect all arguments for the java command; # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of # shell script including quotes and variable substitutions, so put them in @@ -205,6 +213,12 @@ set -- \ org.gradle.wrapper.GradleWrapperMain \ "$@" +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + # Use "xargs" to parse quoted args. # # With -n1 it outputs one arg per line, with the quotes and backslashes removed. diff --git a/gradlew.bat b/gradlew.bat index 107acd32..93e3f59f 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -14,7 +14,7 @@ @rem limitations under the License. @rem -@if "%DEBUG%" == "" @echo off +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -25,7 +25,8 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @@ -40,7 +41,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute +if %ERRORLEVEL% equ 0 goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -75,13 +76,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal diff --git a/settings.gradle b/settings.gradle index f471821e..d6930a94 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1 +1,15 @@ +plugins { + id("org.gradle.toolchains.foojay-resolver") version "0.8.0" +} + rootProject.name = 'polypheny-jdbc-driver' + +toolchainManagement { + jvm { + javaRepositories { + repository("foojay") { + resolverClass = org.gradle.toolchains.foojay.FoojayToolchainResolver + } + } + } +} diff --git a/src/main/java/org/polypheny/jdbc/BidirectionalScrollable.java b/src/main/java/org/polypheny/jdbc/BidirectionalScrollable.java new file mode 100644 index 00000000..9f89a5c3 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/BidirectionalScrollable.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +public interface BidirectionalScrollable extends Scrollable { + + boolean absolute( int rowIndex ) throws PrismInterfaceServiceException; + + boolean relative( int offset ) throws PrismInterfaceServiceException; + + boolean previous() throws PrismInterfaceServiceException; + + void beforeFirst() throws PrismInterfaceServiceException; + + void afterLast(); + + boolean first(); + + boolean last() throws InterruptedException; + +} diff --git a/src/main/java/org/polypheny/jdbc/BidirectionalScroller.java b/src/main/java/org/polypheny/jdbc/BidirectionalScroller.java new file mode 100644 index 00000000..7f5197aa --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/BidirectionalScroller.java @@ -0,0 +1,320 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.util.ArrayList; +import java.util.List; +import org.polypheny.jdbc.properties.PolyphenyResultSetProperties; +import org.polypheny.jdbc.types.TypedValue; +import org.polypheny.jdbc.utils.TypedValueUtils; +import org.polypheny.prism.Frame; + +public class BidirectionalScroller implements BidirectionalScrollable> { + + private static final int INDEX_BEFORE_FIRST = -1; + private static final int DEFAULT_PREFETCH_COUNT = 20; + private List> values; + private List currentRow; + private ResultFetcher resultFetcher; + private PolyphenyResultSetProperties properties; + private Thread fetcherThread; + int currentIndex; + + + public BidirectionalScroller( Frame frame, PrismInterfaceClient client, int statementId, PolyphenyResultSetProperties properties, int fetchTimeout ) { + this.values = new ArrayList<>( TypedValueUtils.buildRows( frame.getRelationalFrame().getRowsList() ) ); + if ( properties.getLargeMaxRows() != 0 && values.size() > properties.getLargeMaxRows() ) { + values.subList( longToInt( properties.getLargeMaxRows() ), values.size() ).clear(); + } + this.resultFetcher = new ResultFetcher( client, statementId, properties, values.size(), fetchTimeout ); + this.resultFetcher.setLast( frame.getIsLast() ); + this.currentIndex = INDEX_BEFORE_FIRST; + this.properties = properties; + } + + + protected int longToInt( long longNumber ) { + return Math.toIntExact( longNumber ); + } + + + private boolean fetchUpTo( int rowIndex ) throws InterruptedException { + while ( values.size() < rowIndex ) { + if ( resultFetcher.isLast() ) { + return false; + } + fetcherThread = new Thread( resultFetcher ); + fetcherThread.start(); + fetcherThread.join(); + } + return true; + } + + + @Override + public void fetchAllAndSync() throws InterruptedException { + fetchAll(); + syncFetch(); + } + + + private void fetchAll() throws InterruptedException { + while ( !resultFetcher.isLast() ) { + fetcherThread = new Thread( resultFetcher ); + fetcherThread.start(); + fetcherThread.join(); + } + } + + + @Override + public boolean absolute( int rowIndex ) throws PrismInterfaceServiceException { + try { + if ( rowToIndex( rowIndex ) == currentIndex ) { + return true; + } + if ( rowIndex < 0 ) { + fetchAll(); + currentIndex = values.size() + rowIndex; + if ( currentIndex < 1 ) { + currentIndex = INDEX_BEFORE_FIRST; + currentRow = null; + return false; + } + currentRow = values.get( currentIndex ); + return true; + } + if ( rowIndex == 0 ) { + currentIndex = INDEX_BEFORE_FIRST; + currentRow = null; + return true; + } + if ( rowIndex <= values.size() ) { + currentIndex = rowToIndex( rowIndex ); + currentRow = values.get( currentIndex ); + return true; + } + if ( fetchUpTo( rowIndex ) ) { + currentIndex = rowToIndex( rowIndex ); + currentRow = values.get( currentIndex ); + considerPrefetch(); + return true; + } + // Explanation: This is not an off by one error: + // An index equal to the array size is one position after the last element. + currentIndex = values.size(); + currentRow = null; + return false; + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Fetching of more rows failed", e ); + } + } + + + private int rowToIndex( int rowIndex ) { + return rowIndex - 1; + } + + + private int indexToRow( int index ) { + return index + 1; + } + + + @Override + public boolean relative( int offset ) throws PrismInterfaceServiceException { + try { + if ( offset == 0 ) { + return currentRow != null; + } + if ( currentIndex + offset < 0 ) { + currentIndex = INDEX_BEFORE_FIRST; + currentRow = null; + return false; + } + if ( currentIndex + offset < values.size() ) { + currentIndex += offset; + currentRow = values.get( currentIndex ); + return true; + } + if ( currentIndex + offset >= values.size() ) { + if ( fetchUpTo( indexToRow( currentIndex + offset ) ) ) { + currentIndex += offset; + currentRow = values.get( currentIndex ); + considerPrefetch(); + return true; + } + // Explanation: This is not an off by one error: + // An index equal to the array size is one position after the last element. + currentIndex = values.size(); + currentRow = null; + return false; + } + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Fetching more rows failed.", e ); + } + throw new PrismInterfaceServiceException( "Should never be thrown!" ); + } + + + @Override + public boolean previous() throws PrismInterfaceServiceException { + return relative( -1 ); + } + + + @Override + public void beforeFirst() throws PrismInterfaceServiceException { + absolute( 0 ); + } + + + @Override + public void afterLast() { + // Explanation: This is not an off by one error: + // An index equal to the array size is one position after the last element. + currentIndex = values.size(); + currentRow = null; + } + + + @Override + public boolean first() { + currentRow = null; + currentIndex = INDEX_BEFORE_FIRST; + if ( values.isEmpty() ) { + return false; + } + currentIndex = 0; + currentRow = values.get( currentIndex ); + return true; + } + + + @Override + public boolean last() throws InterruptedException { + currentRow = null; + if ( resultFetcher.isLast() ) { + currentIndex = values.size() - 1; + currentRow = values.get( currentIndex ); + return true; + } + fetchAll(); + currentIndex = values.size() - 1; + currentRow = values.get( currentIndex ); + return true; + } + + + @Override + public boolean next() throws PrismInterfaceServiceException { + try { + considerPrefetch(); + syncFetch(); + currentIndex++; + currentRow = values.get( currentIndex ); + if ( currentRow == null ) { + return false; + } + return true; + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Fetching more rows from server failed.", e ); + } + } + + + private void considerPrefetch() { + int prefetch_count = Math.min( DEFAULT_PREFETCH_COUNT, properties.getStatementFetchSize() ); + if ( values.size() > prefetch_count ) { + return; + } + if ( resultFetcher.isLast() ) { + return; + } + if ( fetcherThread != null ) { + return; + } + fetcherThread = new Thread( resultFetcher ); + fetcherThread.start(); + } + + + private void syncFetch() throws InterruptedException { + if ( fetcherThread == null ) { + return; + } + // currently not at last element thus we don't have to wait on next frame + if ( !(currentIndex == values.size() - 1) ) { + return; + } + fetcherThread.join(); + fetcherThread = null; + values.addAll( resultFetcher.getFetchedValues() ); + } + + + @Override + public List current() { + return currentRow; + } + + + @Override + public void close() { + if ( fetcherThread == null ) { + return; + } + fetcherThread.interrupt(); + } + + + @Override + public boolean isBeforeFirst() { + return currentIndex == INDEX_BEFORE_FIRST; + } + + + @Override + public boolean isAfterLast() { + return values.isEmpty() || currentIndex == values.size(); + } + + + @Override + public boolean isFirst() { + return currentIndex == 0; + } + + + @Override + public boolean isLast() { + return currentIndex == values.size() - 1; + } + + + @Override + public int getRow() { + return indexToRow( currentIndex ); + } + + + @Override + public boolean hasCurrent() { + return currentRow != null; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/ConnectionString.java b/src/main/java/org/polypheny/jdbc/ConnectionString.java new file mode 100644 index 00000000..cc7b4c3a --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/ConnectionString.java @@ -0,0 +1,165 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.StringTokenizer; +import java.util.stream.Collectors; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.jdbc.properties.DriverProperties; +import org.polypheny.jdbc.properties.PropertyUtils; + +@Slf4j +public class ConnectionString { + + @Getter + private String host; + @Getter + private int port; + private final Map parameters; + + + public ConnectionString( String url ) throws SQLException { + this.parameters = new HashMap<>(); + parseUrl( url ); + } + + + public ConnectionString( String url, Properties parameters ) throws SQLException { + this.parameters = importPropertiesMap( parameters ); + parseUrl( url ); + } + + + public String getUser() { + return parameters.get( PropertyUtils.getUSERNAME_KEY() ); + } + + + private Map importPropertiesMap( Properties properties ) { + if ( properties == null ) { + return new HashMap<>(); + } + return properties.entrySet().stream().collect( Collectors.toMap( + e -> String.valueOf( e.getKey() ), + e -> String.valueOf( e.getValue() ), + ( prev, next ) -> next, HashMap::new ) ); + } + + + private void parseUrl( String url ) throws SQLException { + if ( url == null ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.URL_PARSING_INVALID, "URL must no be null." ); + } + if ( !url.startsWith( DriverProperties.getDRIVER_URL_SCHEMA() ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.URL_PARSING_INVALID, "Invalid driver schema." ); + } + if ( log.isDebugEnabled() ) { + log.debug( "Parsing url: \"{}\"", url ); + } + final int schemeSpecificPartStartIndex = url.indexOf( "//" ); + if ( schemeSpecificPartStartIndex == -1 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.URL_PARSING_INVALID, "Invalid url format." ); + } + + this.host = PropertyUtils.getDEFAULT_HOST(); + this.port = PropertyUtils.getDEFAULT_PORT(); + url = url.substring( schemeSpecificPartStartIndex ); + + if ( url.equals( "//" ) ) { + return; + } + + try { + URI uri = new URI( url ); + if ( uri.getQuery() != null ) { + parseParameters( uri.getQuery() ); + } + if ( uri.getHost() != null ) { + this.host = uri.getHost(); + } + if ( uri.getPort() != -1 ) { + this.port = uri.getPort(); + } + if ( uri.getUserInfo() != null ) { + String[] userAndPassword = uri.getUserInfo().split( ":", 2 ); + this.parameters.put( PropertyUtils.getUSERNAME_KEY(), userAndPassword[0] ); + if ( userAndPassword.length > 1 ) { + this.parameters.put( PropertyUtils.getPASSWORD_KEY(), userAndPassword[1] ); + } + } + if ( !uri.getPath().isEmpty() && uri.getPath().length() > 1 ) { + this.parameters.put( PropertyUtils.getNAMESPACE_KEY(), uri.getPath().substring( 1 ) ); // Leading / + } + } catch ( URISyntaxException e ) { + throw new PrismInterfaceServiceException( e ); + } + } + + + private void parseParameters( String parameters ) throws SQLException { + if ( log.isDebugEnabled() ) { + log.debug( "Parsing url parameters: \"{}\"", parameters ); + } + StringTokenizer tokenizer = new StringTokenizer( parameters, "&" ); + String[] keyValuePair; + while ( tokenizer.hasMoreTokens() ) { + keyValuePair = tokenizer.nextToken().split( "=" ); + if ( keyValuePair.length != 2 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.URL_PARSING_INVALID, "Invalid parameter format." ); + } + if ( keyValuePair[0].isEmpty() || keyValuePair[1].isEmpty() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.URL_PARSING_INVALID, "Invalid parameter format." ); + } + try { + String value = URLDecoder.decode( keyValuePair[1], StandardCharsets.UTF_8.name() ); + this.parameters.put( keyValuePair[0], value ); + } catch ( UnsupportedEncodingException uee ) { + // not going to happen - value came from JDK's own StandardCharsets + } + } + } + + + public String getTarget() { + return host + ":" + port; + } + + + public Map getParameters() { + String property = parameters.get( PropertyUtils.getPASSWORD_KEY() ); + if ( property != null && property.equals( DriverProperties.getBACKDOR_STRING() ) && DriverProperties.isBACKDOOR_ENABLED() ) { + parameters.put( PropertyUtils.getPASSWORD_KEY(), "" ); + } + return parameters; + } + + + public String getParameter( String parameterName ) { + return parameters.get( parameterName ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/Driver.java b/src/main/java/org/polypheny/jdbc/Driver.java deleted file mode 100644 index c8ca8bf3..00000000 --- a/src/main/java/org/polypheny/jdbc/Driver.java +++ /dev/null @@ -1,403 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.jdbc; - - -import java.io.UnsupportedEncodingException; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLDecoder; -import java.nio.charset.StandardCharsets; -import java.sql.Connection; -import java.sql.SQLException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import java.util.StringTokenizer; -import lombok.extern.slf4j.Slf4j; -import org.apache.calcite.avatica.AvaticaConnection; -import org.apache.calcite.avatica.ConnectionConfig; -import org.apache.calcite.avatica.DriverVersion; -import org.apache.calcite.avatica.Meta; -import org.apache.calcite.avatica.UnregisteredDriver; -import org.apache.calcite.avatica.remote.AvaticaHttpClient; -import org.apache.calcite.avatica.remote.AvaticaHttpClientFactory; -import org.apache.calcite.avatica.remote.Driver.Serialization; -import org.apache.calcite.avatica.remote.ProtobufTranslationImpl; -import org.apache.calcite.avatica.remote.RemoteProtobufService; -import org.apache.calcite.avatica.remote.RemoteService; -import org.apache.calcite.avatica.remote.Service; -import org.apache.calcite.avatica.remote.Service.OpenConnectionRequest; -import org.apache.calcite.avatica.remote.Service.OpenConnectionResponse; - - -@Slf4j -public class Driver extends UnregisteredDriver { - - public static final String DRIVER_URL_SCHEMA = "jdbc:polypheny:"; - - public static final String DEFAULT_HOST = "localhost"; - public static final int DEFAULT_PORT = 20591; - public static final String DEFAULT_TRANSPORT_SCHEMA = "http:"; - public static final String DEFAULT_URL = DEFAULT_TRANSPORT_SCHEMA + "//" + DEFAULT_HOST + ":" + DEFAULT_PORT + "/"; - public static final String DEFAULT_SERIALIZATION = Serialization.PROTOBUF.name(); - - public static final String PROPERTY_USERNAME_KEY = "user"; - @java.lang.SuppressWarnings( - "squid:S2068" - // Credentials should not be hard-coded: 'password' detected - // Justification: "password" is here the key to set the password in the connection parameters. - ) - public static final String PROPERTY_PASSWORD_KEY = "password"; - public static final String PROPERTY_URL_KEY = "url"; - public static final String PROPERTY_HOST_KEY = "host"; - public static final String PROPERTY_PORT_KEY = "port"; - public static final String PROPERTY_DATABASE_KEY = "db"; - public static final String PROPERTY_SERIALIZATION = "serialization"; - - private static final Map DEPRECATED_PROPERTY_KEYS = new HashMap<>(); - - - static { - DEPRECATED_PROPERTY_KEYS.put( "wire_protocol", PROPERTY_SERIALIZATION ); - } - - - static { - new Driver().register(); - } - - - public Driver() { - super(); - } - - - @Override - protected DriverVersion createDriverVersion() { - return DriverVersion.load( - Driver.class, - "org-polypheny-jdbc.properties", - "Polypheny JDBC Driver", - "unknown version", - "Polypheny", - "unknown version" ); - } - - - @Override - protected String getConnectStringPrefix() { - return DRIVER_URL_SCHEMA; - } - - - @Override - public boolean acceptsURL( final String url ) throws SQLException { - if ( url == null ) { - throw new SQLException( new NullPointerException( "url == null" ) ); - } - return url.toLowerCase().startsWith( getConnectStringPrefix().toLowerCase() ); - } - - - @Override - public Meta createMeta( final AvaticaConnection connection ) { - final ConnectionConfig config = connection.config(); - // Create a single Service and set it on the Connection instance - final Service service = createService( connection, config ); - connection.setService( service ); - return new RemotePolyphenyMeta( connection ); - } - - - protected Service createService( final AvaticaConnection connection, final ConnectionConfig config ) { - final Service.Factory metaFactory = config.factory(); - final Service service; - if ( metaFactory != null ) { - service = metaFactory.create( connection ); - } else if ( config.url() != null ) { - switch ( Serialization.valueOf( connection.config().serialization().toUpperCase() ) ) { - case JSON: - service = new RemoteService( getHttpClient( connection, config ) ); - break; - case PROTOBUF: - service = new RemoteProtobufService( getHttpClient( connection, config ), new ProtobufTranslationImpl() ); - break; - default: - throw new IllegalArgumentException( "\"serialization\" is not one of " + Arrays.toString( Serialization.values() ) ); - } - } else { - throw new IllegalArgumentException( new NullPointerException( "config.url() == null" ) ); - } - return service; - } - - - /** - * Creates the HTTP client that communicates with the Avatica server. - * - * @param connection The {@link AvaticaConnection}. - * @param config The configuration. - * @return An {@link AvaticaHttpClient} implementation. - */ - protected AvaticaHttpClient getHttpClient( final AvaticaConnection connection, final ConnectionConfig config ) { - URL url; - try { - url = new URL( config.url() ); - } catch ( MalformedURLException e ) { - throw new IllegalArgumentException( e ); - } - - AvaticaHttpClientFactory httpClientFactory = config.httpClientFactory(); - - return httpClientFactory.getClient( url, config, connection.getKerberosConnection() ); - } - - - /** - * @see UnregisteredDriver#getFactoryClassName(JdbcVersion) - */ - @Override - protected String getFactoryClassName( final JdbcVersion jdbcVersion ) { - switch ( jdbcVersion ) { - case JDBC_30: - case JDBC_40: - throw new IllegalArgumentException( "JDBC version not supported: " + jdbcVersion ); - - case JDBC_41: - default: - return "org.polypheny.jdbc.PolyphenyJdbc41Factory"; - } - } - - - @Override - public Connection connect( final String url, Properties info ) throws SQLException { - if ( url == null ) { - throw new SQLException( new NullPointerException( "url == null" ) ); - } - - final AvaticaConnection connection; - if ( url.toLowerCase().contains( "url=http://" ) || url.toLowerCase().contains( "url=https://" ) ) { - // Avatica-compatible -- jdbc:polypheny:url=http(s)://server.address/database;... - connection = (AvaticaConnection) super.connect( url, info ); - } else if ( url.toLowerCase().contains( getConnectStringPrefix().toLowerCase() + "http://" ) || url.toLowerCase().contains( getConnectStringPrefix().toLowerCase() + "https://" ) ) { - // New Poly style -- jdbc:polypheny:http(s)://server.address/database&... - info = parseUrl( url, info ); - if ( info == null ) { - // Something is wrong with the url - return null; - } - connection = (AvaticaConnection) super.connect( url, info ); - } else { - // Old style -- jdbc:polypheny://server.address/database&... - log.debug( "No transport scheme given; fall back to http." ); - info = parseUrl( url, info ); - if ( info == null ) { - // Something is wrong with the url - return null; - } - connection = (AvaticaConnection) super.connect( url, info ); - } - if ( connection == null ) { - // It's not an url for our driver - return null; - } - - final Service service = connection.getService(); - // super.connect(...) should be creating a service and setting it in the AvaticaConnection - assert service != null; - - final OpenConnectionResponse response = service.apply( new OpenConnectionRequest( connection.id, OpenConnectionRequest.serializeProperties( info ) ) ); - if ( response == null ) { - throw new SQLException( "Exception opening a connection. The response is `null`." ); - } - - return connection; - } - - - // packet-visible for testability - @SuppressWarnings({ - "squid:S3776" // Cognitive Complexity - }) - final Properties parseUrl( String url, final Properties defaults ) { - final Properties prop = (defaults == null) ? new Properties() : new Properties( defaults ); - - if ( url == null || url.isEmpty() ) { - return null; - } - - log.debug( "Parsing \"" + url + "\"" ); - - final int questionMarkPosition = url.indexOf( '?' ); - if ( questionMarkPosition != -1 ) { - // we have some parameters - final String parameters = url.substring( questionMarkPosition + 1 ); - url = url.substring( 0, questionMarkPosition ); - - final StringTokenizer parameterTokens = new StringTokenizer( parameters, "&" ); - while ( parameterTokens.hasMoreTokens() ) { - String parameter = parameterTokens.nextToken(); - - final int equalPosition = parameter.indexOf( '=' ); - String parameterKey = null; - String parameterValue = null; - - if ( equalPosition != -1 ) { - parameterKey = parameter.substring( 0, equalPosition ); - - if ( equalPosition + 1 < parameter.length() ) { - parameterValue = parameter.substring( equalPosition + 1 ); - } - } - - if ( (parameterKey != null && parameterKey.length() > 0) && (parameterValue != null && parameterValue.length() > 0) ) { - parameterKey = parameterKey.toLowerCase(); // our parameter keys are always lowercase - - try { - parameterValue = URLDecoder.decode( parameterValue, StandardCharsets.UTF_8.name() ); - } catch ( UnsupportedEncodingException e ) { - // not going to happen - value came from JDK's own StandardCharsets - throw new RuntimeException( e ); //NOSONAR "squid:S00112" - Justification: This is literally a problem with the runtime - } catch ( NoSuchMethodError e ) { - log.debug( "Cannot use the decode method with UTF-8. Using the fallback (deprecated) method.", e ); - parameterValue = URLDecoder.decode( parameterValue ); //NOSONAR "squid:CallToDeprecatedMethod" - Justification: This is the fallback if the superseded method does not exist. - } - - prop.setProperty( parameterKey, parameterValue ); - - // Backwards Compatibility - // Note: we do not overwrite the currently valid property, i.e., if the legacy and the current parameter have been set, the current wins. - if ( DEPRECATED_PROPERTY_KEYS.containsKey( parameterKey ) ) { - final String newParameterKey = DEPRECATED_PROPERTY_KEYS.get( parameterKey ); - prop.setProperty( newParameterKey, prop.getProperty( newParameterKey, parameterValue ) ); - } - } - } - } - - final int doubleSlashPosition = url.indexOf( "//" ); - if ( doubleSlashPosition == -1 ) { - return null; - } - - final String scheme; - try { - scheme = url.substring( 0, doubleSlashPosition ).substring( getConnectStringPrefix().length() ); - } catch ( IndexOutOfBoundsException e ) { - return null; - } - - url = url.substring( doubleSlashPosition + 2 ); - - final int atPosition = url.indexOf( '@' ); - if ( atPosition != -1 ) { - // we have username[:password]@... - final String userPassword = url.substring( 0, atPosition ); - url = url.substring( atPosition + 1 ); - - final int colonPosition = userPassword.indexOf( ':' ); - String username; - String password = null; - - if ( colonPosition != -1 ) { - username = userPassword.substring( 0, colonPosition ); - - if ( colonPosition + 1 < userPassword.length() ) { - password = userPassword.substring( colonPosition + 1 ); - } - } else { - username = userPassword; - } - - //noinspection ConstantConditions - if ( username != null && username.length() > 0 ) { - prop.setProperty( PROPERTY_USERNAME_KEY, username ); - } - if ( password != null && password.length() > 0 ) { - prop.setProperty( PROPERTY_PASSWORD_KEY, password ); - } - } - - final int slashPosition = url.indexOf( '/' ); - String hostPort = url; - - if ( slashPosition != -1 ) { - hostPort = url.substring( 0, slashPosition ); - String database = null; - - if ( slashPosition + 1 < url.length() ) { - database = url.substring( slashPosition + 1 ); - } - - if ( database != null && database.length() > 0 ) { - prop.setProperty( PROPERTY_DATABASE_KEY, database ); - } - } - - final int colonPosition = hostPort.indexOf( ':' ); - - String host = hostPort; - String port = Integer.toString( DEFAULT_PORT ); - - if ( colonPosition != -1 ) { - host = hostPort.substring( 0, colonPosition ); - - if ( colonPosition + 1 < hostPort.length() ) { - port = hostPort.substring( colonPosition + 1 ); - } - } - - if ( host.isEmpty() ) { - host = DEFAULT_HOST; - } - - prop.setProperty( PROPERTY_HOST_KEY, host ); - prop.setProperty( PROPERTY_PORT_KEY, port ); - prop.setProperty( PROPERTY_URL_KEY, (scheme.isEmpty() ? DEFAULT_TRANSPORT_SCHEMA : scheme) + "//" + prop.getProperty( PROPERTY_HOST_KEY, DEFAULT_HOST ) + ":" + prop.getProperty( PROPERTY_PORT_KEY, Integer.toString( DEFAULT_PORT ) ) + "/" ); - - // OVERRIDE URL BY DEFAULT - if ( defaults != null ) { - for ( final Object o : defaults.keySet() ) { - final String key = o.toString(); - final String value = defaults.getProperty( key ); - prop.setProperty( key, value ); - } - } - - // validate, fix, or set serialization - switch ( prop.getProperty( PROPERTY_SERIALIZATION, DEFAULT_SERIALIZATION ).toUpperCase() ) { - case "JSON": - prop.setProperty( PROPERTY_SERIALIZATION, Serialization.JSON.name() ); - break; - case "PROTOBUF": - case "PROTO": - case "PROTO3": - prop.setProperty( PROPERTY_SERIALIZATION, Serialization.PROTOBUF.name() ); - break; - default: - prop.setProperty( PROPERTY_SERIALIZATION, prop.getProperty( PROPERTY_SERIALIZATION ).toUpperCase() ); - break; - } - - log.debug( "Result of parsing: {}", prop ); - - return prop; - } -} diff --git a/src/main/java/org/polypheny/jdbc/ForwardOnlyScroller.java b/src/main/java/org/polypheny/jdbc/ForwardOnlyScroller.java new file mode 100644 index 00000000..3c8f2d1c --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/ForwardOnlyScroller.java @@ -0,0 +1,180 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.util.LinkedList; +import java.util.List; +import java.util.NoSuchElementException; +import org.polypheny.jdbc.properties.PolyphenyResultSetProperties; +import org.polypheny.jdbc.types.TypedValue; +import org.polypheny.jdbc.utils.TypedValueUtils; +import org.polypheny.prism.Frame; + +public class ForwardOnlyScroller implements Scrollable> { + + private static final int DEFAULT_PREFETCH_COUNT = 20; + private static final int INDEX_BEFORE_FIRST = -1; + + private LinkedList> values; + private List currentRow; + private ResultFetcher resultFetcher; + private Thread fetcherThread; + private PolyphenyResultSetProperties properties; + private int baseIndex; + + + public ForwardOnlyScroller( Frame frame, PrismInterfaceClient client, int statementId, PolyphenyResultSetProperties properties, int fetchTimeout ) { + this.values = new LinkedList<>( TypedValueUtils.buildRows( frame.getRelationalFrame().getRowsList() ) ); + if ( properties.getLargeMaxRows() != 0 && values.size() > properties.getLargeMaxRows() ) { + values.subList( longToInt( properties.getLargeMaxRows() ), values.size() ).clear(); + } + this.resultFetcher = new ResultFetcher( client, statementId, properties, values.size(), fetchTimeout ); + this.resultFetcher.setLast( frame.getIsLast() ); + this.properties = properties; + this.baseIndex = INDEX_BEFORE_FIRST; + } + + + protected int longToInt( long longNumber ) { + return Math.toIntExact( longNumber ); + } + + + @Override + public void fetchAllAndSync() throws InterruptedException { + if ( resultFetcher.isLast() ) { + return; + } + if ( fetcherThread != null ) { + return; + } + while ( !resultFetcher.isLast() ) { + fetcherThread = new Thread( resultFetcher ); + fetcherThread.start(); + syncFetch(); + } + } + + + @Override + public boolean next() throws PrismInterfaceServiceException { + try { + considerPrefetch(); + syncFetchIfEmpty(); + currentRow = values.poll(); + if ( currentRow == null ) { + return false; + } + baseIndex++; + return true; + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Fetching more columns from server filed.", e ); + } + } + + + private void considerPrefetch() { + int prefetch_count = Math.min( DEFAULT_PREFETCH_COUNT, properties.getStatementFetchSize() ); + if ( values.size() > prefetch_count ) { + return; + } + if ( resultFetcher.isLast() ) { + return; + } + if ( fetcherThread != null ) { + return; + } + fetcherThread = new Thread( resultFetcher ); + fetcherThread.start(); + } + + + private void syncFetchIfEmpty() throws InterruptedException { + if ( !values.isEmpty() ) { + return; + } + syncFetch(); + } + + + private void syncFetch() throws InterruptedException { + if ( fetcherThread == null ) { + return; + } + fetcherThread.join(); + fetcherThread = null; + values.addAll( resultFetcher.getFetchedValues() ); + } + + + @Override + public List current() { + if ( currentRow == null ) { + throw new NoSuchElementException( "Illegal cursor position." ); + } + return currentRow; + } + + + @Override + public void close() { + if ( fetcherThread == null ) { + return; + } + fetcherThread.interrupt(); + } + + + @Override + public boolean isBeforeFirst() { + return baseIndex == INDEX_BEFORE_FIRST; + } + + + @Override + public boolean isAfterLast() { + return values.isEmpty() && currentRow == null; + } + + + @Override + public boolean isFirst() { + return baseIndex == 0; + } + + + @Override + public boolean isLast() { + return values.isEmpty() && currentRow != null; + } + + + @Override + public int getRow() { + if ( isBeforeFirst() || isAfterLast() ) { + return 0; + } + return baseIndex + 1; + } + + + @Override + public boolean hasCurrent() { + return currentRow != null; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/Main.java b/src/main/java/org/polypheny/jdbc/Main.java new file mode 100644 index 00000000..ec1a6ad1 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/Main.java @@ -0,0 +1,31 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.sql.SQLException; + +public class Main { + + public static void main( String[] args ) throws ClassNotFoundException, SQLException { + final String DB_URL = "jdbc:polypheny://localhost:20590"; + final String USER = "pa"; + final String PASS = ""; + + Class.forName( "org.polypheny.jdbc.PolyphenyDriver" ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PolyConnection.java b/src/main/java/org/polypheny/jdbc/PolyConnection.java new file mode 100644 index 00000000..d48e4132 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/PolyConnection.java @@ -0,0 +1,571 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.sql.Array; +import java.sql.Blob; +import java.sql.CallableStatement; +import java.sql.ClientInfoStatus; +import java.sql.Clob; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.NClob; +import java.sql.PreparedStatement; +import java.sql.SQLClientInfoException; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Savepoint; +import java.sql.Statement; +import java.sql.Struct; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.Executor; +import org.polypheny.jdbc.meta.PolyphenyDatabaseMetadata; +import org.polypheny.jdbc.multimodel.PolyStatement; +import org.polypheny.jdbc.properties.PolyphenyConnectionProperties; +import org.polypheny.jdbc.properties.PolyphenyStatementProperties; +import org.polypheny.jdbc.properties.PropertyUtils; +import org.polypheny.jdbc.types.PolyArray; +import org.polypheny.jdbc.types.PolyBlob; +import org.polypheny.jdbc.types.PolyClob; +import org.polypheny.jdbc.types.PolyStruct; +import org.polypheny.prism.PreparedStatementSignature; + +public class PolyConnection implements Connection { + + private PolyphenyConnectionProperties properties; + + private PolyphenyDatabaseMetadata databaseMetaData; + private boolean isClosed; + + private boolean hasRunningTransaction; + + private Set openStatements; + + private Map> typeMap; + + + private void throwIfClosed() throws SQLException { + if ( isClosed ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.CONNECTION_LOST, "Illegal operation on closed connection." ); + } + } + + + private void throwIfAutoCommit() throws SQLException { + if ( !isStrict() ) { + return; + } + if ( properties.isAutoCommit() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Illegal operation on auto committing connection." ); + } + } + + + private void throwIfRunningTransaction() throws SQLException { + if ( hasRunningTransaction ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Illegal operation during running transaction." ); + } + } + + + public PolyConnection( PolyphenyConnectionProperties connectionProperties, PolyphenyDatabaseMetadata databaseMetaData ) { + this.properties = connectionProperties; + databaseMetaData.setConnection( this ); + this.databaseMetaData = databaseMetaData; + this.openStatements = new HashSet<>(); + this.typeMap = new HashMap<>(); + this.isClosed = false; + } + + + public boolean isStrict() { + return properties.isStrict(); + } + + + public void startTracking( Statement statement ) { + openStatements.add( statement ); + } + + + public void endTracking( Statement statement ) { + if ( !openStatements.contains( statement ) ) { + return; + } + openStatements.remove( statement ); + } + + + public int getTimeout() { + return properties.getNetworkTimeout(); + } + + + public PrismInterfaceClient getPrismInterfaceClient() { + return properties.getPrismInterfaceClient(); + } + + + @Override + public Statement createStatement() throws SQLException { + throwIfClosed(); + PolyphenyStatement statement = new PolyphenyStatement( this, properties.toStatementProperties() ); + startTracking( statement ); + return statement; + } + + + public PolyStatement createPolyStatement() { + return new PolyStatement( this ); + } + + + @Override + public PreparedStatement prepareStatement( String sql ) throws SQLException { + PreparedStatementSignature signature = getPrismInterfaceClient().prepareIndexedStatement( + properties.getNamespaceName(), + PropertyUtils.getSQL_LANGUAGE_NAME(), + sql, + getTimeout() + ); + PolyphenyPreparedStatement statement = new PolyphenyPreparedStatement( this, properties.toStatementProperties(), signature ); + startTracking( statement ); + return statement; + } + + + @Override + public CallableStatement prepareCall( String sql ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public String nativeSQL( String sql ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public void setAutoCommit( boolean autoCommit ) throws SQLException { + throwIfClosed(); + if ( hasRunningTransaction ) { + commit(); + } + properties.setAutoCommit( autoCommit ); + } + + + @Override + public boolean getAutoCommit() throws SQLException { + throwIfClosed(); + return properties.isAutoCommit(); + + } + + + @Override + public void commit() throws SQLException { + throwIfClosed(); + throwIfAutoCommit(); + getPrismInterfaceClient().commitTransaction( getNetworkTimeout() ); + hasRunningTransaction = false; + } + + + @Override + public void rollback() throws SQLException { + throwIfClosed(); + throwIfAutoCommit(); + getPrismInterfaceClient().rollbackTransaction( getNetworkTimeout() ); + } + + + @Override + public void close() throws SQLException { + if ( isClosed() ) { + return; + } + for ( Statement openStatement : new HashSet<>( openStatements ) ) { + openStatement.close(); + } + getPrismInterfaceClient().unregister( properties.getNetworkTimeout() ); + isClosed = true; + } + + + @Override + public boolean isClosed() { + return isClosed; + } + + + @Override + public DatabaseMetaData getMetaData() throws SQLException { + throwIfClosed(); + return databaseMetaData; + } + + + @Override + public void setReadOnly( boolean readOnly ) throws SQLException { + throwIfClosed(); + throwIfRunningTransaction(); + properties.setReadOnly( readOnly ); + } + + + @Override + public boolean isReadOnly() throws SQLException { + throwIfClosed(); + return properties.isReadOnly(); + } + + + @Override + public void setCatalog( String catalog ) throws SQLException { + throwIfClosed(); + // does nothing - just there for consistency + properties.setCatalogName( catalog ); + } + + + @Override + public String getCatalog() { + return properties.getCatalogName(); + + } + + + @Override + public void setTransactionIsolation( int level ) throws SQLException { + throwIfClosed(); + properties.setTransactionIsolation( level ); + } + + + @Override + public int getTransactionIsolation() throws SQLException { + throwIfClosed(); + return properties.getTransactionIsolation(); + } + + + @Override + public SQLWarning getWarnings() throws SQLException { + throwIfClosed(); + return null; + + } + + + @Override + public void clearWarnings() throws SQLException { + throwIfClosed(); + } + + + @Override + public Statement createStatement( int resultSetType, int resultSetConcurrency ) throws SQLException { + throwIfClosed(); + PropertyUtils.throwIfInvalid( resultSetType, resultSetConcurrency ); + PolyphenyStatementProperties statementProperties = properties.toStatementProperties( resultSetType, resultSetConcurrency ); + return new PolyphenyStatement( this, statementProperties ); + } + + + @Override + public PreparedStatement prepareStatement( String sql, int resultSetType, int resultSetConcurrency ) throws SQLException { + throwIfClosed(); + PropertyUtils.throwIfInvalid( resultSetType, resultSetConcurrency ); + PolyphenyStatementProperties statementProperties = properties.toStatementProperties( resultSetType, resultSetConcurrency ); + PreparedStatementSignature signature = getPrismInterfaceClient().prepareIndexedStatement( + properties.getNamespaceName(), + PropertyUtils.getSQL_LANGUAGE_NAME(), + sql, + getTimeout() + ); + return new PolyphenyPreparedStatement( this, statementProperties, signature ); + } + + + @Override + public CallableStatement prepareCall( String sql, int resultSetType, int resultSetConcurrency ) throws SQLException { + throwIfClosed(); + PropertyUtils.throwIfInvalid( resultSetType, resultSetConcurrency ); + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public Map> getTypeMap() throws SQLException { + return typeMap; + } + + + @Override + public void setTypeMap( Map> map ) throws SQLException { + this.typeMap = map; + } + + + @Override + public void setHoldability( int holdability ) throws SQLException { + throwIfClosed(); + properties.setResultSetHoldability( holdability ); + } + + + @Override + public int getHoldability() throws SQLException { + throwIfClosed(); + return properties.getResultSetHoldability(); + } + + + @Override + public Savepoint setSavepoint() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public Savepoint setSavepoint( String name ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public void rollback( Savepoint savepoint ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public void releaseSavepoint( Savepoint savepoint ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public Statement createStatement( int resultSetType, int resultSetConcurrency, int resultSetHoldability ) throws SQLException { + throwIfClosed(); + PropertyUtils.throwIfInvalid( resultSetType, resultSetConcurrency, resultSetHoldability ); + PolyphenyStatementProperties statementProperties = properties.toStatementProperties( resultSetType, resultSetConcurrency, resultSetHoldability ); + PolyphenyStatement statement = new PolyphenyStatement( this, statementProperties ); + openStatements.add( statement ); + return statement; + } + + + @Override + public PreparedStatement prepareStatement( String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability ) throws SQLException { + throwIfClosed(); + PropertyUtils.throwIfInvalid( resultSetType, resultSetConcurrency, resultSetHoldability ); + PolyphenyStatementProperties statementProperties = properties.toStatementProperties( resultSetType, resultSetConcurrency, resultSetHoldability ); + PreparedStatementSignature signature = getPrismInterfaceClient().prepareIndexedStatement( + properties.getNamespaceName(), + PropertyUtils.getSQL_LANGUAGE_NAME(), + sql, + getTimeout() + ); + PolyphenyPreparedStatement statement = new PolyphenyPreparedStatement( this, statementProperties, signature ); + openStatements.add( statement ); + return statement; + } + + + @Override + public CallableStatement prepareCall( String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability ) throws SQLException { + throwIfClosed(); + PropertyUtils.throwIfInvalid( resultSetType, resultSetConcurrency, resultSetHoldability ); + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public PreparedStatement prepareStatement( String sql, int autoGeneratedKeys ) throws SQLException { + throwIfClosed(); + if ( !PropertyUtils.isValidAutogeneratedKeys( autoGeneratedKeys ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal argument for autogenerated keys" ); + } + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public PreparedStatement prepareStatement( String sql, int[] columnIndexes ) throws SQLException { + throwIfClosed(); + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public PreparedStatement prepareStatement( String sql, String[] columnNames ) throws SQLException { + throwIfClosed(); + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public Clob createClob() throws SQLException { + return new PolyClob(); + } + + + @Override + public Blob createBlob() throws SQLException { + return new PolyBlob(); + } + + + @Override + public NClob createNClob() throws SQLException { + // implements both clob and nclob as both are utf-8 + return new PolyClob(); + } + + + @Override + public SQLXML createSQLXML() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public boolean isValid( int timeout ) throws SQLException { + if ( timeout < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal argument for timeout" ); + } + // the prism-interface uses milliseconds for timeouts, jdbc uses seconds + return getPrismInterfaceClient().checkConnection( timeout * 1000 ); + } + + + @Override + public void setClientInfo( String name, String value ) throws SQLClientInfoException { + Properties clientInfoProperties = getClientInfo(); + clientInfoProperties.setProperty( name, value ); + try { + getPrismInterfaceClient().setClientInfoProperties( clientInfoProperties, properties.getNetworkTimeout() ); + } catch ( PrismInterfaceServiceException e ) { + throw new SQLClientInfoException( e.getMessage(), e.getSQLState(), e.getErrorCode(), new HashMap<>(), e ); + } + } + + + @Override + public void setClientInfo( Properties clientInfoProperties ) throws SQLClientInfoException { + try { + getPrismInterfaceClient().setClientInfoProperties( clientInfoProperties, properties.getNetworkTimeout() ); + } catch ( PrismInterfaceServiceException e ) { + HashMap failedOptions = new HashMap<>(); + throw new SQLClientInfoException( e.getMessage(), e.getSQLState(), e.getErrorCode(), new HashMap<>(), e ); + } + } + + + @Override + public String getClientInfo( String name ) throws SQLException { + return getClientInfo().getProperty( name ); + } + + + @Override + public Properties getClientInfo() throws SQLClientInfoException { + try { + Properties properties = new Properties(); + properties.putAll( getPrismInterfaceClient().getClientInfoProperties( getNetworkTimeout() ) ); + return properties; + } catch ( SQLException e ) { + throw new SQLClientInfoException(); + } + } + + + @Override + public Array createArrayOf( String typeName, Object[] elements ) throws SQLException { + throwIfClosed(); + return new PolyArray( typeName, elements ); + } + + + @Override + public Struct createStruct( String typeName, Object[] attributes ) throws SQLException { + throwIfClosed(); + return new PolyStruct( typeName, attributes ); + } + + + @Override + public void setSchema( String schema ) throws SQLException { + throwIfClosed(); + properties.setNamespaceName( schema ); + } + + + @Override + public String getSchema() throws SQLException { + throwIfClosed(); + return properties.getNamespaceName(); + } + + + @Override + public void abort( Executor executor ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public void setNetworkTimeout( Executor executor, int milliseconds ) throws SQLException { + throwIfClosed(); + if ( milliseconds < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal argument for timeout" ); + } + properties.setNetworkTimeout( milliseconds ); + } + + + @Override + public int getNetworkTimeout() throws SQLException { + throwIfClosed(); + return properties.getNetworkTimeout(); + } + + + @Override + public T unwrap( Class aClass ) throws SQLException { + if ( aClass.isInstance( this ) ) { + return aClass.cast( this ); + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.WRAPPER_INCORRECT_TYPE, "Not a wrapper for " + aClass ); + } + + + @Override + public boolean isWrapperFor( Class aClass ) { + return aClass.isInstance( this ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyDriver.java b/src/main/java/org/polypheny/jdbc/PolyphenyDriver.java new file mode 100644 index 00000000..9e330d94 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/PolyphenyDriver.java @@ -0,0 +1,158 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Properties; +import java.util.logging.Logger; +import org.polypheny.jdbc.meta.PolyphenyDatabaseMetadata; +import org.polypheny.jdbc.properties.DriverProperties; +import org.polypheny.jdbc.properties.PolyphenyConnectionProperties; +import org.polypheny.jdbc.properties.PropertyUtils; + +public class PolyphenyDriver implements java.sql.Driver { + + static { + new PolyphenyDriver().register(); + } + + + private void register() { + try { + DriverManager.registerDriver( this ); + } catch ( SQLException e ) { + System.out.println( "Error occurred while registering JDBC driver " + this + ": " + e ); + } + } + + + @Override + public Connection connect( String url, Properties properties ) throws SQLException { + if ( !acceptsURL( url ) ) { + return null; + } + ConnectionString connectionString = new ConnectionString( url, properties ); + PrismInterfaceClient prismInterfaceClient = new PrismInterfaceClient( connectionString.getHost(), connectionString.getPort(), connectionString.getParameters() ); + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, prismInterfaceClient ); + PolyphenyDatabaseMetadata databaseMetadata = new PolyphenyDatabaseMetadata( prismInterfaceClient, connectionString ); + prismInterfaceClient.register( connectionProperties, connectionProperties.getNetworkTimeout() ); + return new PolyConnection( connectionProperties, databaseMetadata ); + } + + + @Override + public boolean acceptsURL( String url ) throws SQLException { + if ( url == null ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "URL must no be null." ); + } + return url.startsWith( DriverProperties.getDRIVER_URL_SCHEMA() ); + } + + + @Override + public DriverPropertyInfo[] getPropertyInfo( String url, Properties properties ) throws SQLException { + ConnectionString connectionString = new ConnectionString( url, properties ); + + DriverPropertyInfo[] infoProperties = new DriverPropertyInfo[7]; + + // User Property + infoProperties[0] = new DriverPropertyInfo( + PropertyUtils.getUSERNAME_KEY(), + connectionString.getUser() ); + infoProperties[0].description = "Specifies the username for authentication. If not specified, the database uses the default user."; + infoProperties[0].required = false; + + // Password Property + infoProperties[1] = new DriverPropertyInfo( + PropertyUtils.getPASSWORD_KEY(), + connectionString.getParameter( PropertyUtils.getPASSWORD_KEY() ) ); + infoProperties[1].description = "Specifies the password associated with the given username. If not specified the database assumes that the user does not have a password."; + infoProperties[1].required = false; + + // Autocommit Property + String autocommit = connectionString.getParameter( PropertyUtils.getAUTOCOMMIT_KEY() ); + infoProperties[2] = new DriverPropertyInfo( + PropertyUtils.getAUTOCOMMIT_KEY(), + autocommit == null ? String.valueOf( PropertyUtils.isDEFAULT_AUTOCOMMIT() ) : autocommit ); + infoProperties[2].description = "Determines if each SQL statement is treated as a transaction."; + infoProperties[2].choices = new String[]{ "true", "false" }; + + // Readonly Property + String readOnly = connectionString.getParameter( PropertyUtils.getREAD_ONLY_KEY() ); + infoProperties[3] = new DriverPropertyInfo( + PropertyUtils.getREAD_ONLY_KEY(), + readOnly == null ? String.valueOf( PropertyUtils.isDEFAULT_READ_ONLY() ) : readOnly ); + infoProperties[3].description = "Indicates if the connection is in read-only mode. Currently ignored, reserved for future use."; + infoProperties[3].choices = new String[]{ "true", "false" }; + + // Holdability Property + String holdability = connectionString.getParameter( PropertyUtils.getRESULT_SET_HOLDABILITY_KEY() ); + String defaultHoldability = PropertyUtils.getHoldabilityName( PropertyUtils.getDEFAULT_RESULTSET_HOLDABILITY() ); + infoProperties[4] = new DriverPropertyInfo( + PropertyUtils.getRESULT_SET_HOLDABILITY_KEY(), + holdability == null ? defaultHoldability : holdability ); + infoProperties[4].description = "Specifies the holdability of ResultSet objects."; + infoProperties[4].choices = new String[]{ "HOLD", "CLOSE" }; + + // Isolation Property + String isolation = connectionString.getParameter( PropertyUtils.getTRANSACTION_ISOLATION_KEY() ); + String defaultIsolation = PropertyUtils.getTransactionIsolationName( PropertyUtils.getDEFAULT_TRANSACTION_ISOLATION() ); + infoProperties[5] = new DriverPropertyInfo( + PropertyUtils.getTRANSACTION_ISOLATION_KEY(), + isolation == null ? defaultIsolation : isolation ); + infoProperties[5].description = "Indicates the transaction isolation level."; + infoProperties[5].choices = new String[]{ "COMMITTED", "DIRTY", "SERIALIZABLE", "REPEATABLE_READ" }; + + // Network Timeout Property + String timeout = connectionString.getParameter( PropertyUtils.getNETWORK_TIMEOUT_KEY() ); + infoProperties[6] = new DriverPropertyInfo( + PropertyUtils.getNETWORK_TIMEOUT_KEY(), + timeout == null ? String.valueOf( PropertyUtils.getDEFAULT_NETWORK_TIMEOUT() ) : timeout ); + infoProperties[6].description = "Specifies the network timeout in seconds. Corresponds to the JDBC network timeout."; + + return infoProperties; + } + + + @Override + public int getMajorVersion() { + return DriverProperties.getDRIVER_MAJOR_VERSION(); + } + + + @Override + public int getMinorVersion() { + return DriverProperties.getDRIVER_MINOR_VERSION(); + } + + + @Override + public boolean jdbcCompliant() { + return DriverProperties.isJDBC_COMPLIANT(); + } + + + @Override + public Logger getParentLogger() throws SQLFeatureNotSupportedException { + throw new SQLFeatureNotSupportedException(); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyJdbc41Factory.java b/src/main/java/org/polypheny/jdbc/PolyphenyJdbc41Factory.java deleted file mode 100644 index 13066dec..00000000 --- a/src/main/java/org/polypheny/jdbc/PolyphenyJdbc41Factory.java +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.jdbc; - - -import java.io.InputStream; -import java.io.Reader; -import java.sql.NClob; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLXML; -import java.util.Properties; -import java.util.TimeZone; -import org.apache.calcite.avatica.AvaticaConnection; -import org.apache.calcite.avatica.AvaticaDatabaseMetaData; -import org.apache.calcite.avatica.AvaticaFactory; -import org.apache.calcite.avatica.AvaticaPreparedStatement; -import org.apache.calcite.avatica.AvaticaResultSet; -import org.apache.calcite.avatica.AvaticaResultSetMetaData; -import org.apache.calcite.avatica.AvaticaStatement; -import org.apache.calcite.avatica.Meta.Frame; -import org.apache.calcite.avatica.Meta.Signature; -import org.apache.calcite.avatica.Meta.StatementHandle; -import org.apache.calcite.avatica.QueryState; -import org.apache.calcite.avatica.UnregisteredDriver; - - -/** - * See also org.apache.calcite.avatica.AvaticaJdbc41Factory - */ -@SuppressWarnings("unused") -public class PolyphenyJdbc41Factory extends PolyphenyJdbcFactory { - - /** - * Creates a factory for JDBC version 4.1. - */ - public PolyphenyJdbc41Factory() { - this( 4, 1 ); - } - - - /** - * Creates a JDBC factory with given major/minor version number. - * - * @param major JDBC major version - * @param minor JDBC minor version - */ - protected PolyphenyJdbc41Factory( int major, int minor ) { - super( major, minor ); - } - - - @Override - public PolyphenyDbJdbc41Connection newConnection( final UnregisteredDriver driver, final AvaticaFactory factory, final String url, final Properties info ) { - return new PolyphenyDbJdbc41Connection( driver, factory, url, info ); - } - - - @Override - public PolyphenyDbJdbc41Statement newStatement( AvaticaConnection connection, StatementHandle h, int resultSetType, int resultSetConcurrency, int resultSetHoldability ) { - return new PolyphenyDbJdbc41Statement( connection, h, resultSetType, resultSetConcurrency, resultSetHoldability ); - } - - - @Override - public PolyphenyDbJdbc41PreparedStatement newPreparedStatement( AvaticaConnection connection, StatementHandle h, Signature signature, int resultSetType, int resultSetConcurrency, int resultSetHoldability ) throws SQLException { - return new PolyphenyDbJdbc41PreparedStatement( connection, h, signature, resultSetType, resultSetConcurrency, resultSetHoldability ); - } - - - @Override - public PolyphenyDbJdbc41ResultSet newResultSet( AvaticaStatement statement, QueryState state, Signature signature, TimeZone timeZone, Frame firstFrame ) throws SQLException { - final PolyphenyDbJdbc41ResultSetMetaData metaData = newResultSetMetaData( statement, signature ); - return new PolyphenyDbJdbc41ResultSet( statement, state, signature, metaData, timeZone, firstFrame ); - } - - - @Override - public PolyphenyDbJdbc41DatabaseMetaData newDatabaseMetaData( AvaticaConnection connection ) { - return new PolyphenyDbJdbc41DatabaseMetaData( connection ); - } - - - @Override - public PolyphenyDbJdbc41ResultSetMetaData newResultSetMetaData( AvaticaStatement statement, Signature signature ) { - return new PolyphenyDbJdbc41ResultSetMetaData( statement, signature ); - } - - - /** - * See also org.apache.calcite.avatica.AvaticaJdbc41Factory.AvaticaJdbc41Connection - */ - protected static class PolyphenyDbJdbc41Connection extends AvaticaConnection implements PolyphenyJdbcConnection { - - /** - * @param driver Driver - * @param factory Factory for JDBC objects - * @param url Server URL - * @param info Other connection properties - */ - protected PolyphenyDbJdbc41Connection( final UnregisteredDriver driver, final AvaticaFactory factory, final String url, final Properties info ) { - super( driver, factory, url, info ); - } - } - - - /** - * See also org.apache.calcite.avatica.AvaticaJdbc41Factory.AvaticaJdbc41Statement - */ - protected static class PolyphenyDbJdbc41Statement extends AvaticaStatement implements PolyphenyJdbcStatement { - - protected PolyphenyDbJdbc41Statement( AvaticaConnection connection, StatementHandle h, int resultSetType, int resultSetConcurrency, int resultSetHoldability ) { - super( connection, h, resultSetType, resultSetConcurrency, resultSetHoldability ); - } - } - - - /** - * See also org.apache.calcite.avatica.AvaticaJdbc41Factory.AvaticaJdbc41PreparedStatement - */ - protected static class PolyphenyDbJdbc41PreparedStatement extends AvaticaPreparedStatement implements PolyphenyJdbcPreparedStatement { - - /** - * @param connection Connection - * @param h Statement handle - * @param signature Result of preparing statement - * @param resultSetType Result set type - * @param resultSetConcurrency Result set concurrency - * @param resultSetHoldability Result set holdability - * @throws SQLException If fails due to underlying implementation reasons. - */ - protected PolyphenyDbJdbc41PreparedStatement( AvaticaConnection connection, StatementHandle h, Signature signature, int resultSetType, int resultSetConcurrency, int resultSetHoldability ) throws SQLException { - super( connection, h, signature, resultSetType, resultSetConcurrency, resultSetHoldability ); - } - - - @Override - public void setRowId( int parameterIndex, RowId x ) throws SQLException { - getSite( parameterIndex ).setRowId( x ); - } - - - @Override - public void setNString( int parameterIndex, String value ) throws SQLException { - getSite( parameterIndex ).setNString( value ); - } - - - @Override - public void setNCharacterStream( int parameterIndex, Reader value, long length ) throws SQLException { - getSite( parameterIndex ).setNCharacterStream( value, length ); - } - - - @Override - public void setNClob( int parameterIndex, NClob value ) throws SQLException { - getSite( parameterIndex ).setNClob( value ); - } - - - @Override - public void setClob( int parameterIndex, Reader reader, long length ) throws SQLException { - getSite( parameterIndex ).setClob( reader, length ); - } - - - @Override - public void setBlob( int parameterIndex, InputStream inputStream, long length ) throws SQLException { - getSite( parameterIndex ).setBlob( inputStream, length ); - } - - - @Override - public void setNClob( int parameterIndex, Reader reader, long length ) throws SQLException { - getSite( parameterIndex ).setNClob( reader, length ); - } - - - @Override - public void setSQLXML( int parameterIndex, SQLXML xmlObject ) throws SQLException { - getSite( parameterIndex ).setSQLXML( xmlObject ); - } - - - @Override - public void setAsciiStream( int parameterIndex, InputStream x, long length ) throws SQLException { - getSite( parameterIndex ).setAsciiStream( x, length ); - } - - - @Override - public void setBinaryStream( int parameterIndex, InputStream x, long length ) throws SQLException { - getSite( parameterIndex ).setBinaryStream( x, length ); - } - - - @Override - public void setCharacterStream( int parameterIndex, Reader reader, long length ) throws SQLException { - getSite( parameterIndex ).setCharacterStream( reader, length ); - } - - - @Override - public void setAsciiStream( int parameterIndex, InputStream x ) throws SQLException { - getSite( parameterIndex ).setAsciiStream( x ); - } - - - @Override - public void setBinaryStream( int parameterIndex, InputStream x ) throws SQLException { - getSite( parameterIndex ).setBinaryStream( x ); - } - - - @Override - public void setCharacterStream( int parameterIndex, Reader reader ) throws SQLException { - getSite( parameterIndex ).setCharacterStream( reader ); - } - - - @Override - public void setNCharacterStream( int parameterIndex, Reader value ) throws SQLException { - getSite( parameterIndex ).setNCharacterStream( value ); - } - - - @Override - public void setClob( int parameterIndex, Reader reader ) throws SQLException { - getSite( parameterIndex ).setClob( reader ); - } - - - @Override - public void setBlob( int parameterIndex, InputStream inputStream ) throws SQLException { - getSite( parameterIndex ).setBlob( inputStream ); - } - - - @Override - public void setNClob( int parameterIndex, Reader reader ) throws SQLException { - getSite( parameterIndex ).setNClob( reader ); - } - } - - - protected static class PolyphenyDbJdbc41ResultSet extends AvaticaResultSet implements PolyphenyJdbcResultSet { - - protected PolyphenyDbJdbc41ResultSet( AvaticaStatement statement, QueryState state, Signature signature, PolyphenyDbJdbc41ResultSetMetaData resultSetMetaData, TimeZone timeZone, Frame firstFrame ) throws SQLException { - super( statement, state, signature, resultSetMetaData, timeZone, firstFrame ); - } - } - - - protected static class PolyphenyDbJdbc41DatabaseMetaData extends AvaticaDatabaseMetaData implements PolyphenyJdbcDatabaseMetaData { - - protected PolyphenyDbJdbc41DatabaseMetaData( AvaticaConnection connection ) { - super( connection ); - } - } - - - protected static class PolyphenyDbJdbc41ResultSetMetaData extends AvaticaResultSetMetaData implements PolyphenyJdbcResultSetMetaData { - - protected PolyphenyDbJdbc41ResultSetMetaData( AvaticaStatement statement, Signature signature ) { - /* - * See also org.apache.calcite.avatica.AvaticaJdbc41Factory#newResultSetMetaData(AvaticaStatement, Signature) - */ - super( statement, null, signature ); - } - } -} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcFactory.java b/src/main/java/org/polypheny/jdbc/PolyphenyJdbcFactory.java deleted file mode 100644 index 3a776d83..00000000 --- a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcFactory.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.jdbc; - - -import java.util.Properties; -import org.apache.calcite.avatica.AvaticaConnection; -import org.apache.calcite.avatica.AvaticaFactory; -import org.apache.calcite.avatica.UnregisteredDriver; - - -/** - * Extension of {@link org.apache.calcite.avatica.AvaticaFactory} for Polypheny-DB. - */ -public abstract class PolyphenyJdbcFactory implements AvaticaFactory { - - private final int major; - private final int minor; - - - /** - * Creates a JDBC factory with given major/minor version number. - * - * @param major JDBC major version - * @param minor JDBC minor version - */ - protected PolyphenyJdbcFactory( final int major, final int minor ) { - this.major = major; - this.minor = minor; - } - - - public int getJdbcMajorVersion() { - return major; - } - - - public int getJdbcMinorVersion() { - return minor; - } - - - public abstract AvaticaConnection newConnection( UnregisteredDriver driver, AvaticaFactory factory, String url, Properties info ); - -} \ No newline at end of file diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcResultSetMetaData.java b/src/main/java/org/polypheny/jdbc/PolyphenyJdbcResultSetMetaData.java deleted file mode 100644 index c72d564c..00000000 --- a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcResultSetMetaData.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.jdbc; - - -public interface PolyphenyJdbcResultSetMetaData extends java.sql.ResultSetMetaData { - -} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcStatement.java b/src/main/java/org/polypheny/jdbc/PolyphenyJdbcStatement.java deleted file mode 100644 index 9514c280..00000000 --- a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcStatement.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.jdbc; - - -public interface PolyphenyJdbcStatement extends java.sql.Statement { - -} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyPreparedStatement.java b/src/main/java/org/polypheny/jdbc/PolyphenyPreparedStatement.java new file mode 100644 index 00000000..1a6f1c48 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/PolyphenyPreparedStatement.java @@ -0,0 +1,695 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.ParameterMetaData; +import java.sql.PreparedStatement; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Arrays; +import java.util.Calendar; +import java.util.LinkedList; +import java.util.List; +import org.polypheny.jdbc.meta.PolyphenyParameterMetaData; +import org.polypheny.jdbc.properties.PolyphenyStatementProperties; +import org.polypheny.jdbc.types.TypedValue; +import org.polypheny.prism.Frame; +import org.polypheny.prism.PreparedStatementSignature; +import org.polypheny.prism.StatementBatchResponse; +import org.polypheny.prism.StatementResult; + +public class PolyphenyPreparedStatement extends PolyphenyStatement implements PreparedStatement { + + private TypedValue[] parameters; + private List> parameterBatch = new LinkedList<>(); + private final PolyphenyParameterMetaData parameterMetaData; + + + public PolyphenyPreparedStatement( PolyConnection connection, PolyphenyStatementProperties properties, PreparedStatementSignature statementSignature ) throws SQLException { + super( connection, properties ); + this.statementId = statementSignature.getStatementId(); + this.parameterMetaData = new PolyphenyParameterMetaData( statementSignature ); + this.parameters = createParameterList( statementSignature.getParameterMetasCount() ); + } + + + private void prepareForReExecution() throws SQLException { + if ( currentResult != null ) { + currentResult.close(); + } + currentUpdateCount = NO_UPDATE_COUNT; + } + + + private TypedValue[] createParameterList( int parameterCount ) { + return new TypedValue[parameterCount]; + } + + + @Override + public ResultSet executeQuery( String statement ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + } + + + @Override + public int executeUpdate( String statement ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + } + + + @Override + public boolean execute( String statement ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + } + + + @Override + public long executeLargeUpdate( String sql, int autogeneratedKeys ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + } + + + @Override + public int executeUpdate( String sql, int autogeneratedKeys ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + } + + + @Override + public long executeLargeUpdate( String sql, int[] columnIndexes ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + } + + + @Override + public int executeUpdate( String sql, int[] columnIndexes ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + } + + + @Override + public long executeLargeUpdate( String sql, String[] columnNames ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + } + + + @Override + public int executeUpdate( String sql, String[] columnNames ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + } + + + @Override + public boolean execute( String s, int i ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + + } + + + @Override + public boolean execute( String s, int[] ints ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + + } + + + @Override + public boolean execute( String s, String[] strings ) throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Method should not be called on a prepared statement." ); + } + + + @Override + public ResultSet executeQuery() throws SQLException { + try { + throwIfClosed(); + prepareForReExecution(); + StatementResult result = getClient().executeIndexedStatement( + statementId, + Arrays.asList( parameters ), + properties.getFetchSize(), + getTimeout() + ); + if ( !result.hasFrame() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.RESULT_TYPE_INVALID, "Statement must produce a single ResultSet" ); + } + Frame frame = result.getFrame(); + throwIfNotRelational( frame ); + currentResult = new PolyphenyResultSet( this, frame, properties.toResultSetProperties() ); + return currentResult; + } finally { + clearParameters(); + clearParameterBatch(); + } + } + + + @Override + public long executeLargeUpdate() throws SQLException { + try { + throwIfClosed(); + prepareForReExecution(); + StatementResult result = getClient().executeIndexedStatement( + statementId, + Arrays.asList( parameters ), + properties.getFetchSize(), + getTimeout() + ); + if ( result.hasFrame() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.RESULT_TYPE_INVALID, "Statement must not produce a ResultSet" ); + } + currentUpdateCount = result.getScalar(); + return currentUpdateCount; + } finally { + clearParameters(); + clearParameterBatch(); + } + } + + + @Override + public int executeUpdate() throws SQLException { + return longToInt( executeLargeUpdate() ); + } + + + private void throwIfOutOfBounds( int parameterIndex ) throws SQLException { + if ( parameterIndex < 1 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Index out of bounds." ); + } + if ( parameterIndex > parameterMetaData.getParameterCount() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Index out of bounds." ); + } + } + + + private int indexFromParameterIndex( int parameterIndex ) { + return parameterIndex - 1; + } + + + @Override + public void setNull( int parameterIndex, int sqlType ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromNull(); + } + + + @Override + public void setBoolean( int parameterIndex, boolean x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromBoolean( x ); + } + + + @Override + public void setByte( int parameterIndex, byte x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromByte( x ); + } + + + @Override + public void setShort( int parameterIndex, short x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromShort( x ); + } + + + @Override + public void setInt( int parameterIndex, int x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromInteger( x ); + } + + + @Override + public void setLong( int parameterIndex, long x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromLong( x ); + } + + + @Override + public void setFloat( int parameterIndex, float x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromFloat( x ); + } + + + @Override + public void setDouble( int parameterIndex, double x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromDouble( x ); + } + + + @Override + public void setBigDecimal( int parameterIndex, BigDecimal x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromBigDecimal( x ); + } + + + @Override + public void setString( int parameterIndex, String x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromString( x ); + } + + + @Override + public void setBytes( int parameterIndex, byte[] x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromBytes( x ); + } + + + @Override + public void setDate( int parameterIndex, Date x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromDate( x ); + } + + + @Override + public void setTime( int parameterIndex, Time x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromTime( x ); + } + + + @Override + public void setTimestamp( int parameterIndex, Timestamp x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromTimestamp( x ); + } + + + @Override + public void setAsciiStream( int parameterIndex, InputStream x, int length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromAsciiStream( x, length ); + } + + + @Override + public void setUnicodeStream( int parameterIndex, InputStream x, int length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromUnicodeStream( x, length ); + } + + + @Override + public void setBinaryStream( int parameterIndex, InputStream x, int length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromBinaryStream( x, length ); + } + + + @Override + public void clearParameters() throws SQLException { + throwIfClosed(); + parameters = createParameterList( parameterMetaData.getParameterCount() ); + } + + + private void clearParameterBatch() { + parameterBatch = new LinkedList<>(); + } + + + @Override + public void setObject( int parameterIndex, Object x, int targetSqlType ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromObject( x, targetSqlType ); + } + + + @Override + public void setObject( int parameterIndex, Object x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromObject( x ); + } + + + @Override + public boolean execute() throws SQLException { + try { + throwIfClosed(); + prepareForReExecution(); + StatementResult result = getClient().executeIndexedStatement( + statementId, + Arrays.asList( parameters ), + properties.getFetchSize(), + getTimeout() + ); + if ( !result.hasFrame() ) { + currentUpdateCount = result.getScalar(); + return false; + } + Frame frame = result.getFrame(); + throwIfNotRelational( frame ); + currentResult = new PolyphenyResultSet( this, frame, properties.toResultSetProperties() ); + return true; + } finally { + clearParameters(); + clearParameterBatch(); + } + } + + + @Override + public void addBatch() throws SQLException { + throwIfClosed(); + parameterBatch.add( Arrays.asList( parameters.clone() ) ); + } + + + @Override + public long[] executeLargeBatch() throws SQLException { + List scalars = executeParameterizedBatch(); + long[] updateCounts = new long[scalars.size()]; + for ( int i = 0; i < scalars.size(); i++ ) { + updateCounts[i] = scalars.get( i ); + } + return updateCounts; + } + + + @Override + public int[] executeBatch() throws SQLException { + List scalars = executeParameterizedBatch(); + int[] updateCounts = new int[scalars.size()]; + for ( int i = 0; i < scalars.size(); i++ ) { + updateCounts[i] = longToInt( scalars.get( i ) ); + } + return updateCounts; + } + + + private List executeParameterizedBatch() throws SQLException { + throwIfClosed(); + try { + StatementBatchResponse status = getClient().executeIndexedStatementBatch( statementId, parameterBatch, getTimeout() ); + return status.getScalarsList(); + } finally { + // jdbc: batch and individual parameters are always cleared even in the execution fails. + clearParameters(); + clearParameterBatch(); + } + } + + + @Override + public void setCharacterStream( int parameterIndex, Reader reader, int length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + } + + + @Override + public void setRef( int parameterIndex, Ref x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromRef( x ); + } + + + @Override + public void setBlob( int parameterIndex, Blob x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromBlob( x ); + } + + + @Override + public void setClob( int parameterIndex, Clob x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromClob( x ); + } + + + @Override + public void setArray( int parameterIndex, Array x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromArray( x ); + } + + + @Override + public ResultSetMetaData getMetaData() throws SQLException { + return null; + } + + + @Override + public void setDate( int parameterIndex, Date x, Calendar cal ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromDate( x, cal ); + } + + + @Override + public void setTime( int parameterIndex, Time x, Calendar cal ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromTime( x, cal ); + } + + + @Override + public void setTimestamp( int parameterIndex, Timestamp x, Calendar cal ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromTimestamp( x, cal ); + } + + + @Override + public void setNull( int parameterIndex, int sqlType, String typeName ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromNull(); + } + + + @Override + public void setURL( int parameterIndex, URL x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromUrl( x ); + } + + + @Override + public ParameterMetaData getParameterMetaData() throws SQLException { + return parameterMetaData; + } + + + @Override + public void setRowId( int parameterIndex, RowId x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromRowId( x ); + } + + + @Override + public void setNString( int parameterIndex, String value ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromNString( value ); + } + + + @Override + public void setNCharacterStream( int parameterIndex, Reader value, long length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromNCharacterStream( value ); + } + + + @Override + public void setNClob( int parameterIndex, NClob value ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromNClob( value ); + } + + + @Override + public void setClob( int parameterIndex, Reader reader, long length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromClob( reader, length ); + } + + + @Override + public void setBlob( int parameterIndex, InputStream inputStream, long length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromBlob( inputStream, length ); + } + + + @Override + public void setNClob( int parameterIndex, Reader reader, long length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromNClob( reader, length ); + } + + + @Override + public void setSQLXML( int parameterIndex, SQLXML xmlObject ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromSQLXML( xmlObject ); + } + + + @Override + public void setObject( int parameterIndex, Object x, int targetSqlType, int scaleOrLength ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromObject( x, targetSqlType, scaleOrLength ); + } + + + @Override + public void setAsciiStream( int parameterIndex, InputStream x, long length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromAsciiStream( x, length ); + } + + + @Override + public void setBinaryStream( int parameterIndex, InputStream x, long length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromBinaryStream( x, length ); + } + + + @Override + public void setCharacterStream( int parameterIndex, Reader reader, long length ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromCharacterStream( reader, length ); + } + + + @Override + public void setAsciiStream( int parameterIndex, InputStream x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromAsciiStream( x ); + } + + + @Override + public void setBinaryStream( int parameterIndex, InputStream x ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromBinaryStream( x ); + } + + + @Override + public void setCharacterStream( int parameterIndex, Reader reader ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromCharacterStream( reader ); + } + + + @Override + public void setNCharacterStream( int parameterIndex, Reader value ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromNCharacterStream( value ); + } + + + @Override + public void setClob( int parameterIndex, Reader reader ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromClob( reader ); + } + + + @Override + public void setBlob( int parameterIndex, InputStream inputStream ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromBlob( inputStream ); + } + + + @Override + public void setNClob( int parameterIndex, Reader reader ) throws SQLException { + throwIfClosed(); + throwIfOutOfBounds( parameterIndex ); + parameters[indexFromParameterIndex( parameterIndex )] = TypedValue.fromNClob( reader ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyResultSet.java b/src/main/java/org/polypheny/jdbc/PolyphenyResultSet.java new file mode 100644 index 00000000..8ad0626a --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/PolyphenyResultSet.java @@ -0,0 +1,1530 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Calendar; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.polypheny.jdbc.meta.MetaScroller; +import org.polypheny.jdbc.meta.MetaUtils; +import org.polypheny.jdbc.meta.PolyphenyColumnMeta; +import org.polypheny.jdbc.meta.PolyphenyResultSetMetadata; +import org.polypheny.jdbc.properties.PolyphenyResultSetProperties; +import org.polypheny.jdbc.types.TypedValue; +import org.polypheny.prism.Frame; +import org.polypheny.prism.Frame.ResultCase; + +public class PolyphenyResultSet implements ResultSet { + + private boolean isMeta = false; + + private PolyphenyStatement statement; + + private PolyphenyResultSetMetadata metadata; + private Scrollable> resultScroller; + private Class bidirectionScrollerClass; + private TypedValue lastRead; + private boolean isClosed; + private Map rowUpdates; + private boolean isInInsertMode; + + PolyphenyResultSetProperties properties; + + + public PolyphenyResultSet( + PolyphenyStatement statement, + Frame frame, + PolyphenyResultSetProperties properties + ) throws SQLException { + if ( frame.getResultCase() != ResultCase.RELATIONAL_FRAME ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.RESULT_TYPE_INVALID, "Invalid frame type " + frame.getResultCase().name() ); + } + this.statement = statement; + this.metadata = new PolyphenyResultSetMetadata( MetaUtils.buildColumnMetas( frame.getRelationalFrame().getColumnMetaList() ) ); + if ( properties.getResultSetType() == ResultSet.TYPE_FORWARD_ONLY ) { + this.resultScroller = new ForwardOnlyScroller( frame, getClient(), statement.getStatementId(), properties, statement.getConnection().getNetworkTimeout() ); + } else { + this.resultScroller = new BidirectionalScroller( frame, getClient(), statement.getStatementId(), properties, statement.getConnection().getNetworkTimeout() ); + } + this.bidirectionScrollerClass = BidirectionalScroller.class; + this.properties = properties; + this.lastRead = null; + this.isClosed = false; + this.isInInsertMode = false; + } + + + public PolyphenyResultSet( List columnMetas, List> rows ) { + this.resultScroller = new MetaScroller<>( rows ); + this.metadata = new PolyphenyResultSetMetadata( columnMetas ); + this.statement = null; + this.properties = PolyphenyResultSetProperties.forMetaResultSet(); + this.lastRead = null; + this.isClosed = false; + this.isInInsertMode = false; + this.isMeta = true; + } + + + private TypedValue accessValue( int column ) throws SQLException { + if ( !isInInsertMode ) { + try { + if ( !resultScroller.hasCurrent() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "No current row to read from." ); + } + lastRead = resultScroller.current().get( column - 1 ); + if ( properties.getMaxFieldSize() > 0 && lastRead.getLength() > properties.getMaxFieldSize() ) { + return lastRead.getTrimmed( properties.getMaxFieldSize() ); + } + return lastRead; + } catch ( IndexOutOfBoundsException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.COLUMN_NOT_EXISTS, "Column index out of bounds." ); + } + } + TypedValue value = rowUpdates.get( column ); + if ( value == null ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.COLUMN_ACCESS_ILLEGAL, "Can't access unset colum" ); + } + return value; + } + + + private void throwIfClosed() throws SQLException { + if ( isClosed ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "This operation cannot be applied to a closed result set." ); + } + } + + + private void throwIfColumnIndexOutOfBounds( int columnIndex ) throws SQLException { + if ( columnIndex < 1 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.COLUMN_NOT_EXISTS, "Column index must be greater than 0" ); + } + if ( columnIndex > metadata.getColumnCount() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.COLUMN_NOT_EXISTS, "Column index out of bounds" ); + } + } + + + private void throwIfReadOnly() throws SQLException { + if ( properties.isReadOnly() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.MODIFICATION_NOT_PERMITTED, "Modification of result sets in read only mode is not permitted" ); + } + } + + + private void discardRowUpdates() { + if ( rowUpdates == null ) { + return; + } + rowUpdates = null; + } + + + private Map getOrCreateRowUpdate() { + if ( rowUpdates == null ) { + rowUpdates = new LinkedHashMap<>(); + } + return rowUpdates; + } + + + public void fetchAll() throws SQLException { + try { + resultScroller.fetchAllAndSync(); + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, e.getMessage(), e ); + } + } + + + @Override + public boolean next() throws SQLException { + throwIfClosed(); + discardRowUpdates(); + return resultScroller.next(); + } + + + private PrismInterfaceClient getClient() { + return statement.getClient(); + } + + + private BidirectionalScroller getBidirectionalScrollerOrThrow() throws SQLException { + if ( resultScroller instanceof BidirectionalScroller ) { + return bidirectionScrollerClass.cast( resultScroller ); + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Illegal operation on resultset of type TYPE_FORWARD_ONLY" ); + } + + + @Override + public void close() throws SQLException { + if ( isClosed ) { + return; + } + statement.notifyResultClosure(); + isClosed = true; + } + + + @Override + public boolean wasNull() throws SQLException { + throwIfClosed(); + return lastRead.isNull(); + } + + + @Override + public String getString( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asString(); + } + + + @Override + public boolean getBoolean( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asBoolean(); + } + + + @Override + public byte getByte( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asByte(); + } + + + @Override + public short getShort( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asShort(); + } + + + @Override + public int getInt( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asInt(); + } + + + @Override + public long getLong( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asLong(); + } + + + @Override + public float getFloat( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asFloat(); + } + + + @Override + public double getDouble( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asDouble(); + } + + + @Override + public BigDecimal getBigDecimal( int columnIndex, int scale ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asBigDecimal( scale ); + } + + + @Override + public byte[] getBytes( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asBytes(); + } + + + @Override + public Date getDate( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asDate( properties.getCalendar() ); + } + + + @Override + public Time getTime( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asTime( properties.getCalendar() ); + } + + + @Override + public Timestamp getTimestamp( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asTimestamp( properties.getCalendar() ); + } + + + @Override + public InputStream getAsciiStream( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asAsciiStream(); + } + + + @Override + public InputStream getUnicodeStream( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asUnicodeStream(); + } + + + @Override + public InputStream getBinaryStream( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asBinaryStream(); + } + + + @Override + public String getString( String columnLabel ) throws SQLException { + return getString( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public boolean getBoolean( String columnLabel ) throws SQLException { + return getBoolean( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public byte getByte( String columnLabel ) throws SQLException { + return getByte( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public short getShort( String columnLabel ) throws SQLException { + return getShort( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public int getInt( String columnLabel ) throws SQLException { + return getInt( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public long getLong( String columnLabel ) throws SQLException { + return getLong( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public float getFloat( String columnLabel ) throws SQLException { + return getFloat( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public double getDouble( String columnLabel ) throws SQLException { + return getDouble( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public BigDecimal getBigDecimal( String columnLabel, int scale ) throws SQLException { + return getBigDecimal( metadata.getColumnIndexFromLabel( columnLabel ), scale ); + } + + + @Override + public byte[] getBytes( String columnLabel ) throws SQLException { + return getBytes( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Date getDate( String columnLabel ) throws SQLException { + return getDate( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Time getTime( String columnLabel ) throws SQLException { + return getTime( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Timestamp getTimestamp( String columnLabel ) throws SQLException { + return getTimestamp( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public InputStream getAsciiStream( String columnLabel ) throws SQLException { + return getAsciiStream( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public InputStream getUnicodeStream( String columnLabel ) throws SQLException { + return getUnicodeStream( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public InputStream getBinaryStream( String columnLabel ) throws SQLException { + return getBinaryStream( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public SQLWarning getWarnings() throws SQLException { + return null; + } + + + @Override + public void clearWarnings() throws SQLException { + } + + + @Override + public String getCursorName() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public ResultSetMetaData getMetaData() throws SQLException { + return metadata; + } + + + @Override + public Object getObject( int columnIndex ) throws SQLException { + throwIfClosed(); + TypedValue typedValue = accessValue( columnIndex ); + if ( typedValue.isUdt() ) { + return typedValue.asObject( getStatement().getConnection().getTypeMap() ); + } + return typedValue.asObject( properties.getCalendar() ); + } + + + @Override + public Object getObject( String columnLabel ) throws SQLException { + return getObject( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public int findColumn( String columnLabel ) throws SQLException { + throwIfClosed(); + return metadata.getColumnIndexFromLabel( columnLabel ); + } + + + @Override + public Reader getCharacterStream( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asCharacterStream(); + } + + + @Override + public Reader getCharacterStream( String columnLabel ) throws SQLException { + return getCharacterStream( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public BigDecimal getBigDecimal( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asBigDecimal(); + } + + + @Override + public BigDecimal getBigDecimal( String columnLabel ) throws SQLException { + return getBigDecimal( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public boolean isBeforeFirst() throws SQLException { + return resultScroller.isBeforeFirst(); + } + + + @Override + public boolean isAfterLast() throws SQLException { + return resultScroller.isAfterLast(); + } + + + @Override + public boolean isFirst() throws SQLException { + return resultScroller.isFirst(); + } + + + @Override + public boolean isLast() throws SQLException { + return resultScroller.isLast(); + } + + + @Override + public void beforeFirst() throws SQLException { + throwIfClosed(); + discardRowUpdates(); + getBidirectionalScrollerOrThrow().beforeFirst(); + } + + + @Override + public void afterLast() throws SQLException { + throwIfClosed(); + discardRowUpdates(); + getBidirectionalScrollerOrThrow().afterLast(); + } + + + @Override + public boolean first() throws SQLException { + throwIfClosed(); + discardRowUpdates(); + return getBidirectionalScrollerOrThrow().first(); + } + + + @Override + public boolean last() throws SQLException { + throwIfClosed(); + discardRowUpdates(); + try { + return getBidirectionalScrollerOrThrow().last(); + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Fetching more rows from server failed.", e ); + } + } + + + @Override + public int getRow() throws SQLException { + return resultScroller.getRow(); + } + + + @Override + public boolean absolute( int i ) throws SQLException { + throwIfClosed(); + discardRowUpdates(); + return getBidirectionalScrollerOrThrow().absolute( i ); + } + + + @Override + public boolean relative( int i ) throws SQLException { + throwIfClosed(); + discardRowUpdates(); + return getBidirectionalScrollerOrThrow().relative( i ); + } + + + @Override + public boolean previous() throws SQLException { + throwIfClosed(); + discardRowUpdates(); + return getBidirectionalScrollerOrThrow().previous(); + } + + + @Override + public void setFetchDirection( int fetchDirection ) throws SQLException { + throwIfClosed(); + if ( properties.getResultSetType() == ResultSet.TYPE_FORWARD_ONLY && fetchDirection != ResultSet.FETCH_FORWARD ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Illegal fetch direction for resultset of TYPE_FORWARD_ONLY." ); + } + properties.setFetchDirection( fetchDirection ); + } + + + @Override + public int getFetchDirection() throws SQLException { + throwIfClosed(); + return properties.getFetchDirection(); + } + + + @Override + public void setFetchSize( int fetchSize ) throws SQLException { + throwIfClosed(); + if ( fetchSize < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for fetch size. fetchSize >= 0 must hold." ); + } + properties.setFetchSize( fetchSize ); + getStatement().setFetchSize( fetchSize ); + } + + + @Override + public int getFetchSize() throws SQLException { + throwIfClosed(); + return properties.getFetchSize(); + } + + + @Override + public int getType() throws SQLException { + throwIfClosed(); + return properties.getResultSetType(); + } + + + @Override + public int getConcurrency() throws SQLException { + throwIfClosed(); + return properties.getResultSetConcurrency(); + } + + + @Override + public boolean rowUpdated() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public boolean rowInserted() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public boolean rowDeleted() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public void updateNull( int columnIndex ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromNull() ); + } + + + @Override + public void updateBoolean( int columnIndex, boolean x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromBoolean( x ) ); + } + + + @Override + public void updateByte( int columnIndex, byte x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromByte( x ) ); + } + + + @Override + public void updateShort( int columnIndex, short x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromShort( x ) ); + } + + + @Override + public void updateInt( int columnIndex, int x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromInteger( x ) ); + } + + + @Override + public void updateLong( int columnIndex, long x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromLong( x ) ); + } + + + @Override + public void updateFloat( int columnIndex, float x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromFloat( x ) ); + } + + + @Override + public void updateDouble( int columnIndex, double x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromDouble( x ) ); + } + + + @Override + public void updateBigDecimal( int columnIndex, BigDecimal x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromBigDecimal( x ) ); + } + + + @Override + public void updateString( int columnIndex, String x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromString( x ) ); + } + + + @Override + public void updateBytes( int columnIndex, byte[] x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromBytes( x ) ); + } + + + @Override + public void updateDate( int columnIndex, Date x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromDate( x ) ); + } + + + @Override + public void updateTime( int columnIndex, Time x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromTime( x ) ); + } + + + @Override + public void updateTimestamp( int columnIndex, Timestamp x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromTimestamp( x ) ); + } + + + @Override + public void updateAsciiStream( int columnIndex, InputStream x, int length ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throwIfColumnIndexOutOfBounds( columnIndex ); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromAsciiStream( x, length ) ); + } + + + @Override + public void updateBinaryStream( int columnIndex, InputStream x, int length ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromBinaryStream( x, length ) ); + } + + + @Override + public void updateCharacterStream( int columnIndex, Reader x, int length ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromCharacterStream( x, length ) ); + } + + + @Override + public void updateObject( int columnIndex, Object x, int saleOrLength ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + // TODO: proper implementation. scaleOrLength only applies to streams (length) and bigDecimals(scale) + } + + + @Override + public void updateObject( int columnIndex, Object x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromObject( x ) ); + } + + + @Override + public void updateNull( String columnLabel ) throws SQLException { + updateNull( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public void updateBoolean( String columnLabel, boolean x ) throws SQLException { + updateBoolean( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateByte( String columnLabel, byte x ) throws SQLException { + updateByte( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateShort( String columnLabel, short x ) throws SQLException { + updateShort( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateInt( String columnLabel, int x ) throws SQLException { + updateInt( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateLong( String columnLabel, long x ) throws SQLException { + updateLong( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateFloat( String columnLabel, float x ) throws SQLException { + updateFloat( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateDouble( String columnLabel, double x ) throws SQLException { + updateDouble( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateBigDecimal( String columnLabel, BigDecimal x ) throws SQLException { + updateBigDecimal( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateString( String columnLabel, String x ) throws SQLException { + updateString( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateBytes( String columnLabel, byte[] x ) throws SQLException { + updateBytes( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateDate( String columnLabel, Date x ) throws SQLException { + updateDate( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateTime( String columnLabel, Time x ) throws SQLException { + updateTime( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateTimestamp( String columnLabel, Timestamp x ) throws SQLException { + updateTimestamp( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateAsciiStream( String columnLabel, InputStream x, int length ) throws SQLException { + updateAsciiStream( metadata.getColumnIndexFromLabel( columnLabel ), x, length ); + } + + + @Override + public void updateBinaryStream( String columnLabel, InputStream x, int length ) throws SQLException { + updateBinaryStream( metadata.getColumnIndexFromLabel( columnLabel ), x, length ); + } + + + @Override + public void updateCharacterStream( String columnLabel, Reader x, int length ) throws SQLException { + updateCharacterStream( metadata.getColumnIndexFromLabel( columnLabel ), x, length ); + } + + + @Override + public void updateObject( String columnLabel, Object x, int scaleOrLength ) throws SQLException { + updateObject( metadata.getColumnIndexFromLabel( columnLabel ), x, scaleOrLength ); + } + + + @Override + public void updateObject( String columnLabel, Object x ) throws SQLException { + updateObject( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void insertRow() throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public void updateRow() throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public void deleteRow() throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public void refreshRow() throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public void cancelRowUpdates() throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throw new SQLFeatureNotSupportedException(); + //discardRowUpdates(); + } + + + @Override + public void moveToInsertRow() throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throw new SQLFeatureNotSupportedException(); + //isInInsertMode = true; + } + + + @Override + public void moveToCurrentRow() throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + throw new SQLFeatureNotSupportedException(); + //isInInsertMode = false; + } + + + @Override + public Statement getStatement() throws SQLException { + throwIfClosed(); + if ( isMeta ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "This operation cannot be applied to a dummy result set of a meta request." ); + } + return statement; + } + + + @Override + public Object getObject( int columnIndex, Map> map ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public Ref getRef( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asRef(); + } + + + @Override + public Blob getBlob( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asBlob(); + } + + + @Override + public Clob getClob( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asClob(); + } + + + @Override + public Array getArray( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asArray(); + } + + + @Override + public Object getObject( String columnLabel, Map> map ) throws SQLException { + return getObject( metadata.getColumnIndexFromLabel( columnLabel ), map ); + } + + + @Override + public Ref getRef( String columnLabel ) throws SQLException { + return getRef( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Blob getBlob( String columnLabel ) throws SQLException { + return getBlob( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Clob getClob( String columnLabel ) throws SQLException { + return getClob( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Array getArray( String columnLabel ) throws SQLException { + return getArray( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Date getDate( int columnIndex, Calendar calendar ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asDate( calendar ); + } + + + @Override + public Date getDate( String columnLabel, Calendar calendar ) throws SQLException { + return getDate( metadata.getColumnIndexFromLabel( columnLabel ), calendar ); + } + + + @Override + public Time getTime( int columnIndex, Calendar calendar ) throws SQLException { + return accessValue( columnIndex ).asTime( calendar ); + } + + + @Override + public Time getTime( String columnLabel, Calendar calendar ) throws SQLException { + return getTime( metadata.getColumnIndexFromLabel( columnLabel ), calendar ); + } + + + @Override + public Timestamp getTimestamp( int columnIndex, Calendar calendar ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asTimestamp( calendar ); + } + + + @Override + public Timestamp getTimestamp( String columnLabel, Calendar calendar ) throws SQLException { + return getTimestamp( metadata.getColumnIndexFromLabel( columnLabel ), calendar ); + } + + + @Override + public URL getURL( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asUrl(); + } + + + @Override + public URL getURL( String columnLabel ) throws SQLException { + return getURL( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public void updateRef( int columnIndex, Ref x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromRef( x ) ); + } + + + @Override + public void updateRef( String columnLabel, Ref x ) throws SQLException { + updateRef( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateBlob( int columnIndex, Blob x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromBlob( x ) ); + } + + + @Override + public void updateBlob( String columnLabel, Blob x ) throws SQLException { + updateBlob( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateClob( int columnIndex, Clob x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromClob( x ) ); + } + + + @Override + public void updateClob( String columnLabel, Clob x ) throws SQLException { + updateClob( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateArray( int columnIndex, Array x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromArray( x ) ); + } + + + @Override + public void updateArray( String columnLabel, Array x ) throws SQLException { + updateArray( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public RowId getRowId( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asRowId(); + } + + + @Override + public RowId getRowId( String columnLabel ) throws SQLException { + return getRowId( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public void updateRowId( int columnIndex, RowId x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromRowId( x ) ); + } + + + @Override + public void updateRowId( String columnLabel, RowId x ) throws SQLException { + updateRowId( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public int getHoldability() throws SQLException { + throwIfClosed(); + return properties.getResultSetHoldability(); + } + + + @Override + public boolean isClosed() throws SQLException { + return isClosed; + } + + + @Override + public void updateNString( int columnIndex, String x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromNString( x ) ); + } + + + @Override + public void updateNString( String columnLabel, String x ) throws SQLException { + updateNString( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateNClob( int columnIndex, NClob x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromNClob( x ) ); + } + + + @Override + public void updateNClob( String columnLabel, NClob x ) throws SQLException { + updateNClob( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public NClob getNClob( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asNClob(); + } + + + @Override + public NClob getNClob( String columnLabel ) throws SQLException { + return getNClob( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public SQLXML getSQLXML( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asSQLXML(); + } + + + @Override + public SQLXML getSQLXML( String columnLabel ) throws SQLException { + return getSQLXML( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public void updateSQLXML( int columnIndex, SQLXML x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromSQLXML( x ) ); + } + + + @Override + public void updateSQLXML( String columnLabel, SQLXML x ) throws SQLException { + updateSQLXML( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public String getNString( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asNString(); + } + + + @Override + public String getNString( String columnLabel ) throws SQLException { + return getNString( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Reader getNCharacterStream( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asNCharacterStream(); + } + + + @Override + public Reader getNCharacterStream( String columnLabel ) throws SQLException { + return getNCharacterStream( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public void updateNCharacterStream( int columnIndex, Reader x, long length ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromNCharacterStream( x, length ) ); + } + + + @Override + public void updateNCharacterStream( String columnLabel, Reader x, long length ) throws SQLException { + updateNCharacterStream( metadata.getColumnIndexFromLabel( columnLabel ), x, length ); + } + + + @Override + public void updateAsciiStream( int columnIndex, InputStream x, long length ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromAsciiStream( x, length ) ); + } + + + @Override + public void updateBinaryStream( int columnIndex, InputStream x, long length ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromBinaryStream( x, length ) ); + } + + + @Override + public void updateCharacterStream( int columnIndex, Reader x, long length ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromCharacterStream( x, length ) ); + } + + + @Override + public void updateAsciiStream( String columnLabel, InputStream x, long length ) throws SQLException { + updateAsciiStream( metadata.getColumnIndexFromLabel( columnLabel ), x, length ); + } + + + @Override + public void updateBinaryStream( String columnLabel, InputStream x, long length ) throws SQLException { + updateBinaryStream( metadata.getColumnIndexFromLabel( columnLabel ), x, length ); + } + + + @Override + public void updateCharacterStream( String columnLabel, Reader x, long length ) throws SQLException { + updateCharacterStream( metadata.getColumnIndexFromLabel( columnLabel ), x, length ); + } + + + @Override + public void updateBlob( int columnIndex, InputStream x, long length ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromBlob( x ) ); + } + + + @Override + public void updateBlob( String columnLabel, InputStream x, long length ) throws SQLException { + updateBlob( metadata.getColumnIndexFromLabel( columnLabel ), x, length ); + } + + + @Override + public void updateClob( int columnIndex, Reader x, long length ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromClob( x, length ) ); + } + + + @Override + public void updateClob( String columnLabel, Reader x, long length ) throws SQLException { + updateClob( metadata.getColumnIndexFromLabel( columnLabel ), x, length ); + } + + + @Override + public void updateNClob( int columnIndex, Reader x, long length ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromNClob( x, length ) ); + } + + + @Override + public void updateNClob( String columnLabel, Reader x, long length ) throws SQLException { + updateNClob( metadata.getColumnIndexFromLabel( columnLabel ), x, length ); + } + + + @Override + public void updateNCharacterStream( int columnIndex, Reader x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromNCharacterStream( x ) ); + } + + + @Override + public void updateNCharacterStream( String columnLabel, Reader x ) throws SQLException { + updateNCharacterStream( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateAsciiStream( int columnIndex, InputStream x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromAsciiStream( x ) ); + } + + + @Override + public void updateBinaryStream( int columnIndex, InputStream x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromBinaryStream( x ) ); + } + + + @Override + public void updateCharacterStream( int columnIndex, Reader x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromCharacterStream( x ) ); + } + + + @Override + public void updateAsciiStream( String columnLabel, InputStream x ) throws SQLException { + updateAsciiStream( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateBinaryStream( String columnLabel, InputStream x ) throws SQLException { + updateBinaryStream( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateCharacterStream( String columnLabel, Reader x ) throws SQLException { + updateCharacterStream( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateBlob( int columnIndex, InputStream x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromBlob( x ) ); + } + + + @Override + public void updateBlob( String columnLabel, InputStream x ) throws SQLException { + updateBlob( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateClob( int columnIndex, Reader x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromClob( x ) ); + } + + + @Override + public void updateClob( String columnLabel, Reader x ) throws SQLException { + updateClob( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public void updateNClob( int columnIndex, Reader x ) throws SQLException { + throwIfClosed(); + throwIfReadOnly(); + getOrCreateRowUpdate().put( columnIndex, TypedValue.fromNClob( x ) ); + } + + + @Override + public void updateNClob( String columnLabel, Reader x ) throws SQLException { + updateNClob( metadata.getColumnIndexFromLabel( columnLabel ), x ); + } + + + @Override + public T getObject( int columnIndex, Class aClass ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ).asObject( aClass ); + } + + + @Override + public T getObject( String columnLabel, Class aClass ) throws SQLException { + return getObject( metadata.getColumnIndexFromLabel( columnLabel ), aClass ); + } + + + @Override + public T unwrap( Class aClass ) throws SQLException { + if ( aClass.isInstance( this ) ) { + return aClass.cast( this ); + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.WRAPPER_INCORRECT_TYPE, "Not a wrapper for " + aClass ); + } + + + @Override + public boolean isWrapperFor( Class aClass ) { + return aClass.isInstance( this ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyRowId.java b/src/main/java/org/polypheny/jdbc/PolyphenyRowId.java new file mode 100644 index 00000000..403d7492 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/PolyphenyRowId.java @@ -0,0 +1,43 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.nio.charset.StandardCharsets; +import java.sql.RowId; + +public class PolyphenyRowId implements RowId { + + String rowId; + + + public PolyphenyRowId( String rowId ) { + this.rowId = rowId; + } + + + @Override + public byte[] getBytes() { + return rowId.getBytes( StandardCharsets.UTF_8 ); + } + + + @Override + public String toString() { + return rowId; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyStatement.java b/src/main/java/org/polypheny/jdbc/PolyphenyStatement.java new file mode 100644 index 00000000..780515a5 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/PolyphenyStatement.java @@ -0,0 +1,615 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLWarning; +import java.sql.Statement; +import java.util.LinkedList; +import java.util.List; +import java.util.stream.Collectors; +import lombok.Getter; +import org.polypheny.jdbc.properties.PolyphenyStatementProperties; +import org.polypheny.jdbc.properties.PropertyUtils; +import org.polypheny.jdbc.utils.CallbackQueue; +import org.polypheny.prism.ExecuteUnparameterizedStatementRequest; +import org.polypheny.prism.Frame; +import org.polypheny.prism.Frame.ResultCase; +import org.polypheny.prism.Response; +import org.polypheny.prism.StatementBatchResponse; +import org.polypheny.prism.StatementResponse; + +public class PolyphenyStatement implements Statement { + + @Getter + private PolyConnection polyConnection; + protected ResultSet currentResult; + protected long currentUpdateCount; + @Getter + protected int statementId; + + private boolean isClosed; + protected PolyphenyStatementProperties properties; + + // Value used to represent that no value is set for the update count according to JDBC. + protected static final int NO_UPDATE_COUNT = -1; + protected static final int NO_STATEMENT_ID = -1; + + protected List statementBatch; + + + public PolyphenyStatement( PolyConnection connection, PolyphenyStatementProperties properties ) throws SQLException { + this.polyConnection = connection; + this.properties = properties; + this.isClosed = false; + this.statementBatch = new LinkedList<>(); + this.properties.setPolyphenyStatement( this ); + this.statementId = NO_STATEMENT_ID; + this.currentResult = null; + } + + + public boolean hasStatementId() { + return statementId != NO_STATEMENT_ID; + } + + + protected PrismInterfaceClient getClient() { + return polyConnection.getPrismInterfaceClient(); + } + + + protected int longToInt( long longNumber ) { + return Math.toIntExact( longNumber ); + } + + + private void prepareForReExecution() throws SQLException { + if ( currentResult != null ) { + currentResult.close(); + } + currentUpdateCount = NO_UPDATE_COUNT; + if ( statementId != NO_STATEMENT_ID ) { + getClient().closeStatement( statementId, getTimeout() ); + statementId = NO_STATEMENT_ID; + } + } + + + public void notifyResultClosure() throws SQLException { + this.currentResult = null; + getClient().closeResult( statementId, getTimeout() ); + if ( isCloseOnCompletion() ) { + close(); + } + } + + + @Override + public void close() throws SQLException { + if ( isClosed ) { + return; + } + polyConnection.endTracking( this ); + prepareForReExecution(); + isClosed = true; + } + + + protected int getTimeout() throws SQLException { + return Math.min( getConnection().getNetworkTimeout(), properties.getQueryTimeoutSeconds() * 1000 ); + } + + + protected void throwIfClosed() throws SQLException { + if ( isClosed ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Illegal operation for a closed statement" ); + } + } + + + protected void throwIfNotRelational( Frame frame ) throws SQLException { + if ( frame.getResultCase() == ResultCase.RELATIONAL_FRAME ) { + return; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.RESULT_TYPE_INVALID, "Statement must produce a relational result" ); + } + + + @Override + public ResultSet executeQuery( String statement ) throws SQLException { + throwIfClosed(); + clearBatch(); + prepareForReExecution(); + CallbackQueue callback = new CallbackQueue<>( Response::getStatementResponse ); + String namespaceName = getConnection().getSchema(); + getClient().executeUnparameterizedStatement( namespaceName, PropertyUtils.getSQL_LANGUAGE_NAME(), statement, callback, getTimeout() ); + while ( true ) { + StatementResponse response = callback.takeNext(); + if ( !hasStatementId() ) { + statementId = response.getStatementId(); + } + if ( !response.hasResult() ) { + continue; + } + try { + callback.awaitCompletion(); + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Awaiting completion of api call failed.", e ); + } + if ( !response.getResult().hasFrame() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.RESULT_TYPE_INVALID, "Statement must produce a single ResultSet" ); + } + Frame frame = response.getResult().getFrame(); + throwIfNotRelational( frame ); + currentResult = new PolyphenyResultSet( this, frame, properties.toResultSetProperties() ); + return currentResult; + } + } + + + @Override + public int executeUpdate( String statement ) throws SQLException { + throwIfClosed(); + clearBatch(); + prepareForReExecution(); + CallbackQueue callback = new CallbackQueue<>( Response::getStatementResponse ); + String namespaceName = getConnection().getSchema(); + getClient().executeUnparameterizedStatement( namespaceName, PropertyUtils.getSQL_LANGUAGE_NAME(), statement, callback, getTimeout() ); + while ( true ) { + StatementResponse response = callback.takeNext(); + if ( !hasStatementId() ) { + statementId = response.getStatementId(); + } + if ( !response.hasResult() ) { + continue; + } + try { + callback.awaitCompletion(); + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Awaiting completion of api call failed.", e ); + } + if ( response.getResult().hasFrame() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.RESULT_TYPE_INVALID, "Statement must not produce a ResultSet" ); + } + currentUpdateCount = response.getResult().getScalar(); + return longToInt( currentUpdateCount ); + } + } + + + @Override + public int getMaxFieldSize() throws SQLException { + throwIfClosed(); + return properties.getMaxFieldSize(); + } + + + @Override + public void setMaxFieldSize( int max ) throws SQLException { + throwIfClosed(); + properties.setMaxFieldSize( max ); + } + + + @Override + public long getLargeMaxRows() throws SQLException { + throwIfClosed(); + return properties.getLargeMaxRows(); + } + + + @Override + public int getMaxRows() throws SQLException { + throwIfClosed(); + return longToInt( getLargeMaxRows() ); + } + + + @Override + public void setLargeMaxRows( long max ) throws SQLException { + throwIfClosed(); + properties.setLargeMaxRows( max ); + } + + + @Override + public void setMaxRows( int max ) throws SQLException { + setLargeMaxRows( max ); + } + + + @Override + public void setEscapeProcessing( boolean enable ) throws SQLException { + throwIfClosed(); + properties.setDoesEscapeProcessing( enable ); + } + + + @Override + public int getQueryTimeout() throws SQLException { + throwIfClosed(); + return properties.getQueryTimeoutSeconds(); + } + + + @Override + public void setQueryTimeout( int seconds ) throws SQLException { + throwIfClosed(); + if ( seconds < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal argument for max" ); + } + properties.setQueryTimeoutSeconds( seconds ); + } + + + @Override + public void cancel() throws SQLException { + throwIfClosed(); + // TODO TH: implement cancelling + } + + + @Override + public SQLWarning getWarnings() throws SQLException { + return null; + } + + + @Override + public void clearWarnings() throws SQLException { + throwIfClosed(); + } + + + @Override + public void setCursorName( String s ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public boolean execute( String statement ) throws SQLException { + throwIfClosed(); + clearBatch(); + prepareForReExecution(); + CallbackQueue callback = new CallbackQueue<>( Response::getStatementResponse ); + String namespaceName = getConnection().getSchema(); + getClient().executeUnparameterizedStatement( namespaceName, PropertyUtils.getSQL_LANGUAGE_NAME(), statement, callback, getTimeout() ); + while ( true ) { + StatementResponse response = callback.takeNext(); + if ( !hasStatementId() ) { + statementId = response.getStatementId(); + } + if ( !response.hasResult() ) { + continue; + } + try { + callback.awaitCompletion(); + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Awaiting completion of api call failed.", e ); + } + if ( !response.getResult().hasFrame() ) { + currentUpdateCount = longToInt( response.getResult().getScalar() ); + return false; + } + Frame frame = response.getResult().getFrame(); + throwIfNotRelational( frame ); + currentResult = new PolyphenyResultSet( this, frame, properties.toResultSetProperties() ); + return true; + } + } + + + @Override + public ResultSet getResultSet() throws SQLException { + throwIfClosed(); + return currentResult; + } + + + @Override + public long getLargeUpdateCount() throws SQLException { + throwIfClosed(); + return currentUpdateCount; + } + + + @Override + public int getUpdateCount() throws SQLException { + return longToInt( getLargeUpdateCount() ); + } + + + @Override + public boolean getMoreResults() throws SQLException { + throwIfClosed(); + prepareForReExecution(); + // statements can not return multiple result sets + return false; + } + + + @Override + public void setFetchDirection( int direction ) throws SQLException { + throwIfClosed(); + properties.setFetchDirection( direction ); + } + + + @Override + public int getFetchDirection() throws SQLException { + throwIfClosed(); + return properties.getFetchDirection(); + } + + + @Override + public void setFetchSize( int rows ) throws SQLException { + throwIfClosed(); + properties.setFetchSize( rows ); + } + + + @Override + public int getFetchSize() throws SQLException { + throwIfClosed(); + return properties.getFetchSize(); + } + + + @Override + public int getResultSetConcurrency() throws SQLException { + throwIfClosed(); + return properties.getResultSetConcurrency(); + } + + + @Override + public int getResultSetType() throws SQLException { + throwIfClosed(); + return properties.getResultSetType(); + } + + + @Override + public void addBatch( String sql ) throws SQLException { + throwIfClosed(); + statementBatch.add( sql ); + } + + + @Override + public void clearBatch() throws SQLException { + if ( statementBatch.isEmpty() ) { + return; + } + statementBatch.clear(); + } + + + @Override + public long[] executeLargeBatch() throws SQLException { + List scalars = executeUnparameterizedBatch(); + long[] updateCounts = new long[scalars.size()]; + for ( int i = 0; i < scalars.size(); i++ ) { + updateCounts[i] = scalars.get( i ); + } + return updateCounts; + } + + + @Override + public int[] executeBatch() throws SQLException { + List scalars = executeUnparameterizedBatch(); + int[] updateCounts = new int[scalars.size()]; + for ( int i = 0; i < scalars.size(); i++ ) { + updateCounts[i] = longToInt( scalars.get( i ) ); + } + return updateCounts; + } + + + private List executeUnparameterizedBatch() throws SQLException { + try { + throwIfClosed(); + prepareForReExecution(); + CallbackQueue callback = new CallbackQueue<>( Response::getStatementBatchResponse ); + List requests = buildBatchRequest(); + clearBatch(); + getClient().executeUnparameterizedStatementBatch( requests, callback, getTimeout() ); + while ( true ) { + StatementBatchResponse status = callback.takeNext(); + if ( !hasStatementId() ) { + statementId = status.getBatchId(); + } + if ( status.getScalarsCount() == 0 ) { + continue; + } + try { + callback.awaitCompletion(); + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Awaiting completion of api call failed.", e ); + } + return status.getScalarsList(); + } + } finally { + clearBatch(); + } + } + + + List buildBatchRequest() throws SQLException { + String namespaceName = getConnection().getSchema(); + return statementBatch.stream() + .map( + s -> { + ExecuteUnparameterizedStatementRequest.Builder builder = ExecuteUnparameterizedStatementRequest.newBuilder() + .setStatement( s ) + .setFetchSize( properties.getFetchSize() ) + .setLanguageName( PropertyUtils.getSQL_LANGUAGE_NAME() ); + if ( namespaceName != null ) { + builder.setNamespaceName( namespaceName ); + } + return builder.build(); + } + ) + .collect( Collectors.toList() ); + } + + + @Override + public Connection getConnection() throws SQLException { + throwIfClosed(); + return polyConnection; + } + + + @Override + public boolean getMoreResults( int i ) throws SQLException { + if ( i == KEEP_CURRENT_RESULT || i == CLOSE_ALL_RESULTS ) { + throw new SQLFeatureNotSupportedException(); + } + if ( i != CLOSE_CURRENT_RESULT ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for closing behaviour: " + i ); + } + throwIfClosed(); + prepareForReExecution(); + // statements can not return multiple result sets + return false; + } + + + @Override + public ResultSet getGeneratedKeys() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public long executeLargeUpdate( String sql, int autogeneratedKeys ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public int executeUpdate( String sql, int autogeneratedKeys ) throws SQLException { + return longToInt( executeLargeUpdate( sql, autogeneratedKeys ) ); + } + + + @Override + public long executeLargeUpdate( String sql, int[] columnIndexes ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public int executeUpdate( String sql, int[] columnIndexes ) throws SQLException { + return longToInt( executeLargeUpdate( sql, columnIndexes ) ); + } + + + @Override + public long executeLargeUpdate( String sql, String[] columnNames ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public int executeUpdate( String sql, String[] columnNames ) throws SQLException { + return longToInt( executeLargeUpdate( sql, columnNames ) ); + } + + + @Override + public boolean execute( String s, int i ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public boolean execute( String s, int[] ints ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public boolean execute( String s, String[] strings ) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + + @Override + public int getResultSetHoldability() throws SQLException { + throwIfClosed(); + return properties.getResultSetHoldability(); + } + + + @Override + public boolean isClosed() throws SQLException { + return isClosed; + } + + + @Override + public void setPoolable( boolean poolable ) throws SQLException { + throwIfClosed(); + properties.setIsPoolable( poolable ); + } + + + @Override + public boolean isPoolable() throws SQLException { + throwIfClosed(); + return properties.isPoolable(); + } + + + @Override + public void closeOnCompletion() throws SQLException { + throwIfClosed(); + properties.setCloseOnCompletion( true ); + } + + + @Override + public boolean isCloseOnCompletion() throws SQLException { + throwIfClosed(); + return properties.isCloseOnCompletion(); + } + + + @Override + public T unwrap( Class aClass ) throws SQLException { + if ( aClass.isInstance( this ) ) { + return aClass.cast( this ); + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.WRAPPER_INCORRECT_TYPE, "Not a wrapper for " + aClass ); + } + + + @Override + public boolean isWrapperFor( Class aClass ) { + return aClass.isInstance( this ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PrismInterfaceClient.java b/src/main/java/org/polypheny/jdbc/PrismInterfaceClient.java new file mode 100644 index 00000000..e0752294 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/PrismInterfaceClient.java @@ -0,0 +1,364 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Properties; +import java.util.stream.Collectors; +import org.polypheny.jdbc.properties.PolyphenyConnectionProperties; +import org.polypheny.jdbc.transport.PlainTransport; +import org.polypheny.jdbc.transport.Transport; +import org.polypheny.jdbc.types.TypedValue; +import org.polypheny.jdbc.utils.CallbackQueue; +import org.polypheny.jdbc.utils.ProtoUtils; +import org.polypheny.jdbc.utils.VersionUtil; +import org.polypheny.prism.ClientInfoProperties; +import org.polypheny.prism.ClientInfoPropertiesRequest; +import org.polypheny.prism.CloseResultRequest; +import org.polypheny.prism.CloseStatementRequest; +import org.polypheny.prism.CommitRequest; +import org.polypheny.prism.ConnectionCheckRequest; +import org.polypheny.prism.ConnectionProperties; +import org.polypheny.prism.ConnectionPropertiesUpdateRequest; +import org.polypheny.prism.ConnectionRequest; +import org.polypheny.prism.ConnectionResponse; +import org.polypheny.prism.DbmsVersionRequest; +import org.polypheny.prism.DbmsVersionResponse; +import org.polypheny.prism.DefaultNamespaceRequest; +import org.polypheny.prism.DisconnectRequest; +import org.polypheny.prism.EntitiesRequest; +import org.polypheny.prism.Entity; +import org.polypheny.prism.ExecuteIndexedStatementBatchRequest; +import org.polypheny.prism.ExecuteIndexedStatementRequest; +import org.polypheny.prism.ExecuteUnparameterizedStatementBatchRequest; +import org.polypheny.prism.ExecuteUnparameterizedStatementRequest; +import org.polypheny.prism.FetchRequest; +import org.polypheny.prism.Frame; +import org.polypheny.prism.Function; +import org.polypheny.prism.FunctionsRequest; +import org.polypheny.prism.IndexedParameters; +import org.polypheny.prism.Namespace; +import org.polypheny.prism.NamespacesRequest; +import org.polypheny.prism.PrepareStatementRequest; +import org.polypheny.prism.PreparedStatementSignature; +import org.polypheny.prism.Procedure; +import org.polypheny.prism.ProceduresRequest; +import org.polypheny.prism.RollbackRequest; +import org.polypheny.prism.SqlKeywordsRequest; +import org.polypheny.prism.SqlNumericFunctionsRequest; +import org.polypheny.prism.SqlStringFunctionsRequest; +import org.polypheny.prism.SqlSystemFunctionsRequest; +import org.polypheny.prism.SqlTimeDateFunctionsRequest; +import org.polypheny.prism.StatementBatchResponse; +import org.polypheny.prism.StatementResponse; +import org.polypheny.prism.StatementResult; +import org.polypheny.prism.TableType; +import org.polypheny.prism.TableTypesRequest; +import org.polypheny.prism.Type; +import org.polypheny.prism.TypesRequest; + +public class PrismInterfaceClient { + + private final Transport con; + private final RpcService rpc; + + + public PrismInterfaceClient( String host, int port, Map parameters ) throws PrismInterfaceServiceException { + try { + String transport = parameters.getOrDefault( "transport", "plain" ); + if ( transport.equals( "plain" ) ) { + con = new PlainTransport( host, port ); + } else { + throw new PrismInterfaceServiceException( "Unknown transport " + transport ); + } + rpc = new RpcService( con ); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( e ); + } + } + + + public boolean checkConnection( int timeout ) { + ConnectionCheckRequest request = ConnectionCheckRequest.newBuilder().build(); + try { + /* ConnectionCheckResponses are empty messages */ + rpc.checkConnection( request, timeout ); + return true; + } catch ( PrismInterfaceServiceException e ) { + return false; + } + } + + + public ConnectionResponse register( PolyphenyConnectionProperties connectionProperties, int timeout ) throws PrismInterfaceServiceException { + ConnectionRequest.Builder requestBuilder = ConnectionRequest.newBuilder(); + Optional.ofNullable( connectionProperties.getUsername() ).ifPresent( requestBuilder::setUsername ); + Optional.ofNullable( connectionProperties.getPassword() ).ifPresent( requestBuilder::setPassword ); + requestBuilder + .setMajorApiVersion( VersionUtil.MAJOR_API_VERSION ) + .setMinorApiVersion( VersionUtil.MINOR_API_VERSION ) + //.setClientUuid( clientUUID ) + .setConnectionProperties( buildConnectionProperties( connectionProperties ) ); + ConnectionResponse connectionResponse = rpc.connect( requestBuilder.build(), timeout ); + if ( !connectionResponse.getIsCompatible() ) { + throw new PrismInterfaceServiceException( "client version " + getClientApiVersionString() + + " not compatible with server version " + getServerApiVersionString( connectionResponse ) + "." ); + } + return connectionResponse; + } + + + private ConnectionProperties buildConnectionProperties( PolyphenyConnectionProperties properties ) { + ConnectionProperties.Builder propertiesBuilder = ConnectionProperties.newBuilder(); + Optional.ofNullable( properties.getNamespaceName() ).ifPresent( propertiesBuilder::setNamespaceName ); + return propertiesBuilder + .setIsAutoCommit( properties.isAutoCommit() ) + .build(); + } + + + public void unregister( int timeout ) throws PrismInterfaceServiceException { + DisconnectRequest request = DisconnectRequest.newBuilder().build(); + try { + rpc.disconnect( request, timeout ); + } finally { + rpc.close(); + } + } + + + public void executeUnparameterizedStatement( String namespaceName, String languageName, String statement, CallbackQueue callback, int timeout ) throws PrismInterfaceServiceException { + ExecuteUnparameterizedStatementRequest.Builder requestBuilder = ExecuteUnparameterizedStatementRequest.newBuilder(); + if ( namespaceName != null ) { + requestBuilder.setNamespaceName( namespaceName ); + } + ExecuteUnparameterizedStatementRequest request = requestBuilder + .setLanguageName( languageName ) + .setStatement( statement ) + .build(); + rpc.executeUnparameterizedStatement( request, callback ); // TODO timeout + } + + + public void executeUnparameterizedStatementBatch( List requests, CallbackQueue updateCallback, int timeout ) throws PrismInterfaceServiceException { + ExecuteUnparameterizedStatementBatchRequest request = ExecuteUnparameterizedStatementBatchRequest.newBuilder() + .addAllStatements( requests ) + .build(); + rpc.executeUnparameterizedStatementBatch( request, updateCallback ); // TODO timeout + } + + + public PreparedStatementSignature prepareIndexedStatement( String namespaceName, String languageName, String statement, int timeout ) throws PrismInterfaceServiceException { + PrepareStatementRequest.Builder requestBuilder = PrepareStatementRequest.newBuilder(); + if ( namespaceName != null ) { + requestBuilder.setNamespaceName( namespaceName ); + } + PrepareStatementRequest request = requestBuilder + .setStatement( statement ) + .setLanguageName( languageName ) + .build(); + + return rpc.prepareIndexedStatement( request, timeout ); + } + + + public StatementResult executeIndexedStatement( int statementId, List values, int fetchSize, int timeout ) throws PrismInterfaceServiceException { + IndexedParameters parameters = IndexedParameters.newBuilder() + .addAllParameters( ProtoUtils.serializeParameterList( values ) ) + .build(); + ExecuteIndexedStatementRequest request = ExecuteIndexedStatementRequest.newBuilder() + .setStatementId( statementId ) + .setParameters( parameters ) + .setFetchSize( fetchSize ) + .build(); + + return rpc.executeIndexedStatement( request, timeout ); + } + + + public StatementBatchResponse executeIndexedStatementBatch( int statementId, List> parameterBatch, int timeout ) throws PrismInterfaceServiceException { + List parameters = parameterBatch.stream() + .map( ProtoUtils::serializeParameterList ) + .map( p -> IndexedParameters.newBuilder().addAllParameters( p ).build() ) + .collect( Collectors.toList() ); + ExecuteIndexedStatementBatchRequest request = ExecuteIndexedStatementBatchRequest.newBuilder() + .setStatementId( statementId ) + .addAllParameters( parameters ) + .build(); + + return rpc.executeIndexedStatementBatch( request, timeout ); + } + + + public void commitTransaction( int timeout ) throws PrismInterfaceServiceException { + CommitRequest commitRequest = CommitRequest.newBuilder().build(); + + rpc.commit( commitRequest, timeout ); + } + + + public void rollbackTransaction( int timeout ) throws PrismInterfaceServiceException { + RollbackRequest rollbackRequest = RollbackRequest.newBuilder().build(); + + rpc.rollback( rollbackRequest, timeout ); + } + + + public void closeStatement( int statementId, int timeout ) throws PrismInterfaceServiceException { + CloseStatementRequest request = CloseStatementRequest.newBuilder() + .setStatementId( statementId ) + .build(); + + rpc.closeStatement( request, timeout ); + } + + + public void closeResult( int statementId, int timeout ) throws PrismInterfaceServiceException { + CloseResultRequest resultCloseRequest = CloseResultRequest.newBuilder() + .setStatementId( statementId ) + .build(); + + rpc.closeResult( resultCloseRequest, timeout ); + } + + + public Frame fetchResult( int statementId, int fetchSize, int timeout ) throws PrismInterfaceServiceException { + FetchRequest fetchRequest = FetchRequest.newBuilder() + .setFetchSize( fetchSize ) + .setStatementId( statementId ) + .build(); + + return rpc.fetchResult( fetchRequest, timeout ); + } + + + private String getServerApiVersionString( ConnectionResponse response ) { + return response.getMajorApiVersion() + "." + response.getMinorApiVersion(); + } + + + private static String getClientApiVersionString() { + return VersionUtil.MAJOR + "." + VersionUtil.MINOR; + } + + + public DbmsVersionResponse getDbmsVersion( int timeout ) throws PrismInterfaceServiceException { + DbmsVersionRequest dbmsVersionRequest = DbmsVersionRequest.newBuilder().build(); + + return rpc.getDbmsVersion( dbmsVersionRequest, timeout ); + } + + + public String getDefaultNamespace( int timeout ) throws PrismInterfaceServiceException { + return rpc.defaultNamespaceRequest( DefaultNamespaceRequest.newBuilder().build(), timeout ).getDefaultNamespace(); + } + + + public List getTypes( int timeout ) throws PrismInterfaceServiceException { + return rpc.getTypes( TypesRequest.newBuilder().build(), timeout ).getTypesList(); + } + + + public String getSqlStringFunctions( int timeout ) throws PrismInterfaceServiceException { + return rpc.getSqlStringFunctions( SqlStringFunctionsRequest.newBuilder().build(), timeout ).getString(); + } + + + public String getSqlSystemFunctions( int timeout ) throws PrismInterfaceServiceException { + return rpc.getSqlSystemFunctions( SqlSystemFunctionsRequest.newBuilder().build(), timeout ).getString(); + } + + + public String getSqlTimeDateFunctions( int timeout ) throws PrismInterfaceServiceException { + return rpc.getSqlTimeDateFunctions( SqlTimeDateFunctionsRequest.newBuilder().build(), timeout ).getString(); + } + + + public String getSqlNumericFunctions( int timeout ) throws PrismInterfaceServiceException { + return rpc.getSqlNumericFunctions( SqlNumericFunctionsRequest.newBuilder().build(), timeout ).getString(); + } + + + public String getSqlKeywords( int timeout ) throws PrismInterfaceServiceException { + return rpc.getSqlKeywords( SqlKeywordsRequest.newBuilder().build(), timeout ).getString(); + } + + + public void setConnectionProperties( PolyphenyConnectionProperties connectionProperties, int timeout ) throws PrismInterfaceServiceException { + ConnectionPropertiesUpdateRequest request = ConnectionPropertiesUpdateRequest.newBuilder() + .setConnectionProperties( buildConnectionProperties( connectionProperties ) ) + .build(); + rpc.updateConnectionProperties( request, timeout ); + } + + + public List searchProcedures( String languageName, String procedureNamePattern, int timeout ) throws PrismInterfaceServiceException { + ProceduresRequest.Builder requestBuilder = ProceduresRequest.newBuilder(); + requestBuilder.setLanguage( languageName ); + Optional.ofNullable( procedureNamePattern ).ifPresent( requestBuilder::setProcedureNamePattern ); + return rpc.searchProcedures( requestBuilder.build(), timeout ).getProceduresList(); + } + + + public Map getClientInfoProperties( int timeout ) throws PrismInterfaceServiceException { + return rpc.getClientInfoProperties( ClientInfoPropertiesRequest.newBuilder().build(), timeout ).getPropertiesMap(); + } + + + public List searchNamespaces( String schemaPattern, String protoNamespaceType, int timeout ) throws PrismInterfaceServiceException { + NamespacesRequest.Builder requestBuilder = NamespacesRequest.newBuilder(); + Optional.ofNullable( schemaPattern ).ifPresent( requestBuilder::setNamespacePattern ); + Optional.ofNullable( protoNamespaceType ).ifPresent( requestBuilder::setNamespaceType ); + + return rpc.searchNamespaces( requestBuilder.build(), timeout ).getNamespacesList(); + } + + + public List searchEntities( String namespace, String entityNamePattern, int timeout ) throws PrismInterfaceServiceException { + EntitiesRequest.Builder requestBuilder = EntitiesRequest.newBuilder(); + requestBuilder.setNamespaceName( namespace ); + Optional.ofNullable( entityNamePattern ).ifPresent( requestBuilder::setEntityPattern ); + + return rpc.searchEntities( requestBuilder.build(), timeout ).getEntitiesList(); + } + + + public List getTablesTypes( int timeout ) throws PrismInterfaceServiceException { + return rpc.getTableTypes( TableTypesRequest.newBuilder().build(), timeout ).getTableTypesList(); + } + + + public void setClientInfoProperties( Properties properties, int timeout ) throws PrismInterfaceServiceException { + ClientInfoProperties.Builder requestBuilder = ClientInfoProperties.newBuilder(); + properties.stringPropertyNames().forEach( s -> requestBuilder.putProperties( s, properties.getProperty( s ) ) ); + rpc.setClientInfoProperties( requestBuilder.build(), timeout ); + } + + + public List searchFunctions( String languageName, String functionCategory, int timeout ) throws PrismInterfaceServiceException { + FunctionsRequest functionsRequest = FunctionsRequest.newBuilder() + .setQueryLanguage( languageName ) + .setFunctionCategory( functionCategory ) + .build(); + + return rpc.searchFunctions( functionsRequest, timeout ).getFunctionsList(); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PrismInterfaceErrors.java b/src/main/java/org/polypheny/jdbc/PrismInterfaceErrors.java new file mode 100644 index 00000000..4fc20040 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/PrismInterfaceErrors.java @@ -0,0 +1,50 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +public enum PrismInterfaceErrors { + UNSPECIFIED( "UNSPECIFIED", -1 ), + DRIVER_THREADING_ERROR( "I1001", 1 ), + URL_PARSING_INVALID( "I2001", 2 ), + RESULT_TYPE_INVALID( "I3001", 3 ), + COLUMN_NOT_EXISTS( "42S22", 4 ), + COLUMN_ACCESS_ILLEGAL( "22003", 5 ), + OPERATION_ILLEGAL( "42000", 6 ), + MODIFICATION_NOT_PERMITTED( "2F002", 7 ), + VALUE_ILLEGAL( "22003", 8 ), + STREAM_ERROR( "I4001", 9 ), + WRAPPER_INCORRECT_TYPE( "I5001", 10 ), + CONNECTION_LOST( "08003", 11 ), + UDT_REACHED_END( "I4002", 12 ), + PARAMETER_NOT_EXISTS( "42000", 13 ), + OPTION_NOT_SUPPORTED( "0A000", 14 ), + DATA_TYPE_MISMATCH( "42S22", 17 ), + MISSING_INTERFACE( "I4003", 18 ), + UDT_CONSTRUCTION_FAILED( "I4003", 19 ), + ENTRY_NOT_EXISTS( "I5001", 20 ); + + + public final String state; + public final int errorCode; + + + PrismInterfaceErrors( String state, int errorCode ) { + this.state = state; + this.errorCode = errorCode; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PrismInterfaceServiceException.java b/src/main/java/org/polypheny/jdbc/PrismInterfaceServiceException.java new file mode 100644 index 00000000..8f42b11c --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/PrismInterfaceServiceException.java @@ -0,0 +1,83 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.sql.SQLException; +import org.polypheny.prism.ErrorDetails; + +public class PrismInterfaceServiceException extends SQLException { + + public PrismInterfaceServiceException( PrismInterfaceErrors sqlError, String message ) { + this( message, sqlError.state, sqlError.errorCode ); + } + + + public PrismInterfaceServiceException( PrismInterfaceErrors sqlError, String message, Throwable cause ) { + this( message, sqlError.state, sqlError.errorCode, cause ); + } + + + public PrismInterfaceServiceException( String reason, String state, int errorCode ) { + super( reason, state, errorCode ); + } + + + public PrismInterfaceServiceException( String reason, String state ) { + super( reason, state ); + + } + + + public PrismInterfaceServiceException( String reason ) { + super( reason, PrismInterfaceErrors.UNSPECIFIED.state, PrismInterfaceErrors.UNSPECIFIED.errorCode ); + } + + + public PrismInterfaceServiceException() { + super(); + } + + + public PrismInterfaceServiceException( Throwable cause ) { + super( cause.getMessage(), PrismInterfaceErrors.UNSPECIFIED.state, PrismInterfaceErrors.UNSPECIFIED.errorCode, cause ); + } + + + public PrismInterfaceServiceException( String reason, Throwable cause ) { + super( reason, cause ); + } + + + public PrismInterfaceServiceException( String reason, String state, Throwable cause ) { + super( reason, state, cause ); + } + + + public PrismInterfaceServiceException( String reason, String state, int errorCode, Throwable cause ) { + super( reason, state, errorCode, cause ); + } + + + public PrismInterfaceServiceException( ErrorDetails errorDetails ) { + super( + errorDetails.hasMessage() ? errorDetails.getMessage() : "No message provided.", + errorDetails.hasState() ? errorDetails.getState() : PrismInterfaceErrors.UNSPECIFIED.state, + errorDetails.hasErrorCode() ? errorDetails.getErrorCode() : PrismInterfaceErrors.UNSPECIFIED.errorCode + ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/RemotePolyphenyMeta.java b/src/main/java/org/polypheny/jdbc/RemotePolyphenyMeta.java deleted file mode 100644 index b332b6ab..00000000 --- a/src/main/java/org/polypheny/jdbc/RemotePolyphenyMeta.java +++ /dev/null @@ -1,407 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.jdbc; - - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import org.apache.calcite.avatica.AvaticaConnection; -import org.apache.calcite.avatica.AvaticaUtils; -import org.apache.calcite.avatica.ColumnMetaData; -import org.apache.calcite.avatica.ConnectionPropertiesImpl; -import org.apache.calcite.avatica.Meta; -import org.apache.calcite.avatica.MetaImpl; -import org.apache.calcite.avatica.MissingResultsException; -import org.apache.calcite.avatica.NoSuchStatementException; -import org.apache.calcite.avatica.QueryState; -import org.apache.calcite.avatica.remote.Service; -import org.apache.calcite.avatica.remote.Service.CloseConnectionRequest; -import org.apache.calcite.avatica.remote.Service.CloseConnectionResponse; -import org.apache.calcite.avatica.remote.Service.CloseStatementRequest; -import org.apache.calcite.avatica.remote.Service.CommitRequest; -import org.apache.calcite.avatica.remote.Service.OpenConnectionRequest; -import org.apache.calcite.avatica.remote.Service.RollbackRequest; -import org.apache.calcite.avatica.remote.TypedValue; - - -/** - * see org.apache.calcite.avatica.remote.RemoteMeta - */ -class RemotePolyphenyMeta extends MetaImpl { - - final Service service; - final Map propsMap = new HashMap<>(); - private Map databaseProperties; - - - RemotePolyphenyMeta( final AvaticaConnection connection ) { - super( connection ); - this.service = connection.getService(); - } - - - RemotePolyphenyMeta( final AvaticaConnection connection, final Service service ) { - super( connection ); - this.service = service; - } - - - private MetaResultSet toResultSet( final Class clazz, final Service.ResultSetResponse response ) { - if ( response.updateCount != -1 ) { - return MetaResultSet.count( response.connectionId, response.statementId, response.updateCount ); - } - Signature signature0 = response.signature; - if ( signature0 == null ) { - final List columns = - clazz == null - ? Collections.emptyList() - : fieldMetaData( clazz ).columns; - - signature0 = Signature.create( columns, "?", Collections.emptyList(), response.signature.cursorFactory, Meta.StatementType.SELECT ); - } - return MetaResultSet.create( response.connectionId, response.statementId, response.ownStatement, signature0, response.firstFrame ); - } - - - @Override - public Map getDatabaseProperties( final ConnectionHandle connectionHandle ) { - synchronized ( this ) { - // Compute map on first use, and cache - if ( databaseProperties == null ) { - databaseProperties = service.apply( new Service.DatabasePropertyRequest( connectionHandle.id ) ).map; - } - return databaseProperties; - } - } - - - @Override - public StatementHandle createStatement( final ConnectionHandle connectionHandle ) { - return connection.invokeWithRetries( () -> { - // sync connection state if necessary - connectionSync( connectionHandle, new ConnectionPropertiesImpl() ); - final Service.CreateStatementResponse response = service.apply( new Service.CreateStatementRequest( connectionHandle.id ) ); - return new StatementHandle( response.connectionId, response.statementId, null ); - } ); - } - - - @Override - public void closeStatement( final StatementHandle statementHandle ) { - connection.invokeWithRetries( () -> service.apply( new CloseStatementRequest( statementHandle.connectionId, statementHandle.id ) ) ); - } - - - @Override - public void openConnection( final ConnectionHandle connectionHandle, final Map info ) { - connection.invokeWithRetries( () -> service.apply( new OpenConnectionRequest( connectionHandle.id, info ) ) ); - } - - - @Override - public void closeConnection( final ConnectionHandle connectionHandle ) { - connection.invokeWithRetries( () -> { - final CloseConnectionResponse response = service.apply( new CloseConnectionRequest( connectionHandle.id ) ); - propsMap.remove( connectionHandle.id ); - return response; - } ); - } - - - @Override - public ConnectionProperties connectionSync( final ConnectionHandle connectionHandle, final ConnectionProperties connProps ) { - return connection.invokeWithRetries( - () -> { - ConnectionPropertiesImpl localProps = propsMap.get( connectionHandle.id ); - if ( localProps == null ) { - localProps = new ConnectionPropertiesImpl(); - localProps.setDirty( true ); - propsMap.put( connectionHandle.id, localProps ); - } - - // Only make an RPC if necessary. RPC is necessary when we have local changes that need flushed to the server (be sure to introduce any new changes from connProps before - // checking AND when connProps.isEmpty() (meaning, this was a request for a value, not overriding a value). Otherwise, accumulate the change locally and return immediately. - if ( localProps.merge( connProps ).isDirty() && connProps.isEmpty() ) { - final Service.ConnectionSyncResponse response = service.apply( new Service.ConnectionSyncRequest( connectionHandle.id, localProps ) ); - propsMap.put( connectionHandle.id, (ConnectionPropertiesImpl) response.connProps ); - return response.connProps; - } else { - return localProps; - } - } ); - } - - - @Override - public MetaResultSet getCatalogs( final ConnectionHandle connectionHandle ) { - return connection.invokeWithRetries( () -> { - final Service.ResultSetResponse response = service.apply( new Service.CatalogsRequest( connectionHandle.id ) ); - return toResultSet( MetaCatalog.class, response ); - } ); - } - - - @Override - public MetaResultSet getSchemas( final ConnectionHandle connectionHandle, final String catalog, final Pat schemaPattern ) { - return connection.invokeWithRetries( () -> { - final Service.ResultSetResponse response = service.apply( new Service.SchemasRequest( connectionHandle.id, catalog, schemaPattern.s ) ); - return toResultSet( MetaSchema.class, response ); - } ); - } - - - @Override - public MetaResultSet getTables( final ConnectionHandle connectionHandle, final String catalog, final Pat schemaPattern, final Pat tableNamePattern, final List typeList ) { - return connection.invokeWithRetries( () -> { - final Service.ResultSetResponse response = service.apply( new Service.TablesRequest( connectionHandle.id, catalog, schemaPattern.s, tableNamePattern.s, typeList ) ); - return toResultSet( MetaTable.class, response ); - } ); - } - - - @Override - public MetaResultSet getTableTypes( final ConnectionHandle connectionHandle ) { - return connection.invokeWithRetries( () -> { - final Service.ResultSetResponse response = service.apply( new Service.TableTypesRequest( connectionHandle.id ) ); - return toResultSet( MetaTableType.class, response ); - } ); - } - - - @Override - public MetaResultSet getTypeInfo( final ConnectionHandle connectionHandle ) { - return connection.invokeWithRetries( () -> { - final Service.ResultSetResponse response = service.apply( new Service.TypeInfoRequest( connectionHandle.id ) ); - return toResultSet( MetaTypeInfo.class, response ); - } ); - } - - - @Override - public MetaResultSet getPrimaryKeys( final ConnectionHandle connectionHandle, final String catalog, final String schema, final String table ) { - return connection.invokeWithRetries( () -> { - final Service.ResultSetResponse response = service.apply( new Service.PrimaryKeysRequest( connectionHandle.id, catalog, schema, table ) ); - return toResultSet( MetaPrimaryKey.class, response ); - } ); - } - - - @Override - public MetaResultSet getImportedKeys( final ConnectionHandle connectionHandle, final String catalog, final String schema, final String table ) { - return connection.invokeWithRetries( () -> { - final Service.ResultSetResponse response = service.apply( new Service.ImportedKeysRequest( connectionHandle.id, catalog, schema, table ) ); - return toResultSet( MetaImportedKey.class, response ); - } ); - } - - - @Override - public MetaResultSet getExportedKeys( final ConnectionHandle connectionHandle, final String catalog, final String schema, final String table ) { - return connection.invokeWithRetries( () -> { - final Service.ResultSetResponse response = service.apply( new Service.ExportedKeysRequest( connectionHandle.id, catalog, schema, table ) ); - return toResultSet( MetaExportedKey.class, response ); - } ); - } - - - @Override - public MetaResultSet getIndexInfo( final ConnectionHandle connectionHandle, final String catalog, final String schema, final String table, final boolean unique, final boolean approximate ) { - return connection.invokeWithRetries( () -> { - final Service.ResultSetResponse response = service.apply( new Service.IndexInfoRequest( connectionHandle.id, catalog, schema, table, unique, approximate ) ); - return toResultSet( MetaIndexInfo.class, response ); - } ); - } - - - @Override - public MetaResultSet getColumns( final ConnectionHandle connectionHandle, final String catalog, final Pat schemaPattern, final Pat tableNamePattern, final Pat columnNamePattern ) { - return connection.invokeWithRetries( () -> { - final Service.ResultSetResponse response = service.apply( new Service.ColumnsRequest( connectionHandle.id, catalog, schemaPattern.s, tableNamePattern.s, columnNamePattern.s ) ); - return toResultSet( MetaColumn.class, response ); - } ); - } - - - @Override - public StatementHandle prepare( final ConnectionHandle connectionHandle, final String sql, final long maxRowCount ) { - return connection.invokeWithRetries( () -> { - connectionSync( connectionHandle, new ConnectionPropertiesImpl() ); // sync connection state if necessary - final Service.PrepareResponse response = service.apply( new Service.PrepareRequest( connectionHandle.id, sql, maxRowCount ) ); - return response.statement; - } ); - } - - - @Override - public ExecuteResult prepareAndExecute( final StatementHandle statementHandle, final String sql, final long maxRowCount, final PrepareCallback callback ) throws NoSuchStatementException { - // The old semantics were that maxRowCount was also treated as the maximum number of elements in the first Frame of results. A value of -1 would also preserve this, but an - // explicit (positive) number is easier to follow, IMO. - return prepareAndExecute( statementHandle, sql, maxRowCount, AvaticaUtils.toSaturatedInt( maxRowCount ), callback ); - } - - - @Override - public ExecuteResult prepareAndExecute( final StatementHandle statementHandle, final String sql, final long maxRowCount, int maxRowsInFirstFrame, final PrepareCallback callback ) throws NoSuchStatementException { - try { - return connection.invokeWithRetries( () -> { - // sync connection state if necessary - connectionSync( new ConnectionHandle( statementHandle.connectionId ), new ConnectionPropertiesImpl() ); - final Service.ExecuteResponse response; - try { - synchronized ( callback.getMonitor() ) { - callback.clear(); - response = service.apply( new Service.PrepareAndExecuteRequest( statementHandle.connectionId, statementHandle.id, sql, maxRowCount ) ); - if ( response.missingStatement ) { - throw new RuntimeException( new NoSuchStatementException( statementHandle ) ); //NOSONAR "squid:S00112" - Justification: The RuntimeException is an envelope for the NoSuchStatementException - } - if ( !response.results.isEmpty() ) { - final Service.ResultSetResponse result = response.results.get( 0 ); - callback.assign( result.signature, result.firstFrame, result.updateCount ); - } - } - callback.execute(); - List metaResultSets = new ArrayList<>(); - for ( Service.ResultSetResponse result : response.results ) { - metaResultSets.add( toResultSet( null, result ) ); - } - return new ExecuteResult( metaResultSets ); - } catch ( SQLException e ) { - throw new RuntimeException( e ); //NOSONAR "squid:S00112" - Justification: The RuntimeException is an envelope for the SQLException - } - } ); - } catch ( RuntimeException e ) { - Throwable cause = e.getCause(); - if ( cause instanceof NoSuchStatementException ) { - throw (NoSuchStatementException) cause; - } - throw e; - } - } - - - @Override - public Frame fetch( final StatementHandle statementHandle, final long offset, final int fetchMaxRowCount ) throws NoSuchStatementException, MissingResultsException { - try { - return connection.invokeWithRetries( () -> { - final Service.FetchResponse response = service.apply( new Service.FetchRequest( statementHandle.connectionId, statementHandle.id, offset, fetchMaxRowCount ) ); - if ( response.missingStatement ) { - throw new RuntimeException( new NoSuchStatementException( statementHandle ) ); //NOSONAR "squid:S00112" - Justification: The RuntimeException is an envelope for the NoSuchStatementException - } - if ( response.missingResults ) { - throw new RuntimeException( new MissingResultsException( statementHandle ) ); //NOSONAR "squid:S00112" - Justification: The RuntimeException is an envelope for the MissingResultsException - } - return response.frame; - } ); - } catch ( RuntimeException e ) { - Throwable cause = e.getCause(); - if ( cause instanceof NoSuchStatementException ) { - throw (NoSuchStatementException) cause; - } else if ( cause instanceof MissingResultsException ) { - throw (MissingResultsException) cause; - } - throw e; - } - } - - - @Override - public ExecuteResult execute( final StatementHandle statementHandle, final List parameterValues, final long maxRowCount ) throws NoSuchStatementException { - return execute( statementHandle, parameterValues, AvaticaUtils.toSaturatedInt( maxRowCount ) ); - } - - - @Override - public ExecuteResult execute( final StatementHandle statementHandle, final List parameterValues, final int maxRowsInFirstFrame ) throws NoSuchStatementException { - try { - return connection.invokeWithRetries( () -> { - final Service.ExecuteResponse response = service.apply( new Service.ExecuteRequest( statementHandle, parameterValues, maxRowsInFirstFrame ) ); - - if ( response.missingStatement ) { - throw new RuntimeException( new NoSuchStatementException( statementHandle ) ); //NOSONAR "squid:S00112" - Justification: The RuntimeException is an envelope for the NoSuchStatementException - } - - List metaResultSets = new ArrayList<>(); - for ( Service.ResultSetResponse result : response.results ) { - metaResultSets.add( toResultSet( null, result ) ); - } - - return new ExecuteResult( metaResultSets ); - } ); - } catch ( RuntimeException e ) { - Throwable cause = e.getCause(); - if ( cause instanceof NoSuchStatementException ) { - throw (NoSuchStatementException) cause; - } - throw e; - } - } - - - @Override - public boolean syncResults( final StatementHandle statementHandle, final QueryState state, final long offset ) throws NoSuchStatementException { - try { - return connection.invokeWithRetries( () -> { - final Service.SyncResultsResponse response = service.apply( new Service.SyncResultsRequest( statementHandle.connectionId, statementHandle.id, state, offset ) ); - if ( response.missingStatement ) { - throw new RuntimeException( new NoSuchStatementException( statementHandle ) ); //NOSONAR "squid:S00112" - Justification: The RuntimeException is an envelope for the NoSuchStatementException - } - return response.moreResults; - } ); - } catch ( RuntimeException e ) { - Throwable cause = e.getCause(); - if ( cause instanceof NoSuchStatementException ) { - throw (NoSuchStatementException) cause; - } - throw e; - } - } - - - @Override - public void commit( final ConnectionHandle connectionHandle ) { - connection.invokeWithRetries( () -> service.apply( new CommitRequest( connectionHandle.id ) ) ); - } - - - @Override - public void rollback( final ConnectionHandle connectionHandle ) { - connection.invokeWithRetries( () -> service.apply( new RollbackRequest( connectionHandle.id ) ) ); - } - - - @Override - public ExecuteBatchResult prepareAndExecuteBatch( final StatementHandle statementHandle, final List sqlCommands ) { - return connection.invokeWithRetries( () -> { - Service.ExecuteBatchResponse response = service.apply( new Service.PrepareAndExecuteBatchRequest( statementHandle.connectionId, statementHandle.id, sqlCommands ) ); - return new ExecuteBatchResult( response.updateCounts ); - } ); - } - - - @Override - public ExecuteBatchResult executeBatch( final StatementHandle statementHandle, final List> parameterValues ) { - return connection.invokeWithRetries( () -> { - Service.ExecuteBatchResponse response = service.apply( new Service.ExecuteBatchRequest( statementHandle.connectionId, statementHandle.id, parameterValues ) ); - return new ExecuteBatchResult( response.updateCounts ); - } ); - } - -} diff --git a/src/main/java/org/polypheny/jdbc/ResultFetcher.java b/src/main/java/org/polypheny/jdbc/ResultFetcher.java new file mode 100644 index 00000000..dcc01a50 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/ResultFetcher.java @@ -0,0 +1,80 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.util.List; +import lombok.Getter; +import lombok.Setter; +import org.polypheny.jdbc.properties.PolyphenyResultSetProperties; +import org.polypheny.jdbc.types.TypedValue; +import org.polypheny.jdbc.utils.TypedValueUtils; +import org.polypheny.prism.Frame; +import org.polypheny.prism.Frame.ResultCase; +import org.polypheny.prism.Row; + +public class ResultFetcher implements Runnable { + + private PrismInterfaceClient client; + private int statementId; + @Setter + @Getter + private PolyphenyResultSetProperties properties; + private int fetchTimeout; + private long totalFetched; + @Setter + @Getter + private boolean isLast; + @Getter + private List> fetchedValues; + + + public ResultFetcher( PrismInterfaceClient client, int statementId, PolyphenyResultSetProperties properties, long totalFetched, int fetchTimeout ) { + this.fetchTimeout = fetchTimeout; + this.client = client; + this.statementId = statementId; + this.properties = properties; + this.totalFetched = totalFetched; + this.isLast = false; + } + + + @Override + public void run() { + long fetchEnd = totalFetched + properties.getStatementFetchSize(); + Frame nextFrame; + try { + nextFrame = client.fetchResult( statementId, properties.getFetchSize(), fetchTimeout ); + } catch ( PrismInterfaceServiceException e ) { + throw new RuntimeException( e ); + } + if ( nextFrame.getResultCase() != ResultCase.RELATIONAL_FRAME ) { + throw new RuntimeException( new PrismInterfaceServiceException( "Illegal result type." ) ); + } + List rows = nextFrame.getRelationalFrame().getRowsList(); + if ( properties.getLargeMaxRows() != 0 && fetchEnd > properties.getLargeMaxRows() ) { + long rowEndIndex = properties.getLargeMaxRows() - totalFetched; + if ( rowEndIndex > Integer.MAX_VALUE ) { + throw new RuntimeException( "Should never be thrown" ); + } + rows = rows.subList( 0, (int) rowEndIndex ); + } + fetchedValues = TypedValueUtils.buildRows( rows ); + totalFetched = totalFetched + rows.size(); + isLast = nextFrame.getIsLast(); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/RpcService.java b/src/main/java/org/polypheny/jdbc/RpcService.java new file mode 100644 index 00000000..2c60110e --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/RpcService.java @@ -0,0 +1,456 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.io.EOFException; +import java.io.IOException; +import java.nio.channels.ClosedChannelException; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicLong; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.jdbc.transport.Transport; +import org.polypheny.jdbc.utils.CallbackQueue; +import org.polypheny.prism.ClientInfoProperties; +import org.polypheny.prism.ClientInfoPropertiesRequest; +import org.polypheny.prism.ClientInfoPropertiesResponse; +import org.polypheny.prism.CloseResultRequest; +import org.polypheny.prism.CloseResultResponse; +import org.polypheny.prism.CloseStatementRequest; +import org.polypheny.prism.CloseStatementResponse; +import org.polypheny.prism.CommitRequest; +import org.polypheny.prism.CommitResponse; +import org.polypheny.prism.ConnectionCheckRequest; +import org.polypheny.prism.ConnectionCheckResponse; +import org.polypheny.prism.ConnectionPropertiesUpdateRequest; +import org.polypheny.prism.ConnectionPropertiesUpdateResponse; +import org.polypheny.prism.ConnectionRequest; +import org.polypheny.prism.ConnectionResponse; +import org.polypheny.prism.DbmsVersionRequest; +import org.polypheny.prism.DbmsVersionResponse; +import org.polypheny.prism.DefaultNamespaceRequest; +import org.polypheny.prism.DefaultNamespaceResponse; +import org.polypheny.prism.DisconnectRequest; +import org.polypheny.prism.DisconnectResponse; +import org.polypheny.prism.EntitiesRequest; +import org.polypheny.prism.EntitiesResponse; +import org.polypheny.prism.ExecuteIndexedStatementBatchRequest; +import org.polypheny.prism.ExecuteIndexedStatementRequest; +import org.polypheny.prism.ExecuteUnparameterizedStatementBatchRequest; +import org.polypheny.prism.ExecuteUnparameterizedStatementRequest; +import org.polypheny.prism.FetchRequest; +import org.polypheny.prism.Frame; +import org.polypheny.prism.FunctionsRequest; +import org.polypheny.prism.FunctionsResponse; +import org.polypheny.prism.MetaStringResponse; +import org.polypheny.prism.NamespacesRequest; +import org.polypheny.prism.NamespacesResponse; +import org.polypheny.prism.PrepareStatementRequest; +import org.polypheny.prism.PreparedStatementSignature; +import org.polypheny.prism.ProceduresRequest; +import org.polypheny.prism.ProceduresResponse; +import org.polypheny.prism.Request; +import org.polypheny.prism.Request.TypeCase; +import org.polypheny.prism.Response; +import org.polypheny.prism.RollbackRequest; +import org.polypheny.prism.RollbackResponse; +import org.polypheny.prism.SqlKeywordsRequest; +import org.polypheny.prism.SqlNumericFunctionsRequest; +import org.polypheny.prism.SqlStringFunctionsRequest; +import org.polypheny.prism.SqlSystemFunctionsRequest; +import org.polypheny.prism.SqlTimeDateFunctionsRequest; +import org.polypheny.prism.StatementBatchResponse; +import org.polypheny.prism.StatementResponse; +import org.polypheny.prism.StatementResult; +import org.polypheny.prism.TableTypesRequest; +import org.polypheny.prism.TableTypesResponse; +import org.polypheny.prism.TypesRequest; +import org.polypheny.prism.TypesResponse; + +@Slf4j +public class RpcService { + + private final AtomicLong idCounter = new AtomicLong( 1 ); + private final Transport con; + private final Thread service; + private boolean closed = false; + private boolean disconnectSent = false; + private IOException error = null; + private final Map> callbacks = new ConcurrentHashMap<>(); + private final Map> callbackQueues = new ConcurrentHashMap<>(); + + + RpcService( Transport con ) { + this.con = con; + this.service = new Thread( this::readResponses, "PrismInterfaceResponseHandler" ); + this.service.start(); + } + + + void close() { + closed = true; + con.close(); + try { + service.join(); + } catch ( InterruptedException e ) { + log.warn( "Could not join response handler", e ); + } + } + + + private Request.Builder newMessage() { + long id = idCounter.getAndIncrement(); + return Request.newBuilder().setId( id ); + } + + + private void sendMessage( Request req ) throws IOException { + if ( this.error != null ) { + synchronized ( this ) { + IOException e = this.error; + this.error = null; + throw e; + } + } + if ( this.closed ) { + throw new IOException( "Connection is closed" ); + } + con.sendMessage( req.toByteArray() ); + } + + + private Response receiveMessage() throws IOException { + return Response.parseFrom( con.receiveMessage() ); + } + + + private void readResponses() { + try { + while ( true ) { + Response resp = receiveMessage(); + if ( resp.getId() == 0 ) { + throw new RuntimeException( "Invalid message id" ); + } + CompletableFuture c = callbacks.get( resp.getId() ); + if ( c == null ) { + CallbackQueue cq = callbackQueues.get( resp.getId() ); + if ( cq != null ) { + if ( resp.hasErrorResponse() ) { + callbackQueues.remove( resp.getId() ); + cq.onError( new PrismInterfaceServiceException( resp.getErrorResponse().getMessage() ) ); + } else { + cq.onNext( resp ); + if ( resp.getLast() ) { + callbackQueues.remove( resp.getId() ); + cq.onCompleted(); + } + } + } else { + if ( log.isDebugEnabled() ) { + log.info( "No callback for response of type {}", resp.getTypeCase() ); + } + } + continue; + } + if ( resp.getLast() ) { + callbacks.remove( resp.getId() ); + } + c.complete( resp ); + } + } catch ( EOFException | ClosedChannelException e ) { + this.closed = true; + callbacks.forEach( ( id, c ) -> c.completeExceptionally( e ) ); + callbackQueues.forEach( ( id, cq ) -> cq.onError( e ) ); + } catch ( IOException e ) { // Communicate this to ProtoInterfaceClient + this.closed = true; + callbacks.forEach( ( id, c ) -> c.completeExceptionally( e ) ); + callbackQueues.forEach( ( id, cq ) -> cq.onError( e ) ); + /* For Windows */ + if ( e.getMessage().contains( "An existing connection was forcibly closed by the remote host" ) && disconnectSent ) { + return; + } + // This will cause the exception to be thrown when the next call is made + // TODO: Is this good enough, or should the program be alerted sooner? + this.error = e; + throw new RuntimeException( e ); + } catch ( Throwable t ) { + this.closed = true; + callbacks.forEach( ( id, c ) -> c.completeExceptionally( t ) ); + callbackQueues.forEach( ( id, cq ) -> cq.onError( t ) ); + log.error( "Unhandled exception", t ); + throw t; + } + } + + + private Response waitForCompletion( CompletableFuture f, int timeout ) throws PrismInterfaceServiceException { + try { + if ( timeout == 0 ) { + return f.get(); + } else { + return f.get( timeout, TimeUnit.MILLISECONDS ); + } + } catch ( ExecutionException | InterruptedException | TimeoutException e ) { + throw new PrismInterfaceServiceException( e ); + } + } + + + private Response completeSynchronously( Request.Builder req, int timeout ) throws PrismInterfaceServiceException { + try { + CompletableFuture f = new CompletableFuture<>(); + callbacks.put( req.getId(), f ); + if ( req.getTypeCase() == TypeCase.DISCONNECT_REQUEST ) { + disconnectSent = true; + } + sendMessage( req.build() ); + Response resp = waitForCompletion( f, timeout ); + if ( resp.hasErrorResponse() ) { + throw new PrismInterfaceServiceException( resp.getErrorResponse().getMessage() ); + } + return resp; + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( e ); + } + } + + + ConnectionResponse connect( ConnectionRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setConnectionRequest( msg ); + return completeSynchronously( req, timeout ).getConnectionResponse(); + } + + + ConnectionCheckResponse checkConnection( ConnectionCheckRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setConnectionCheckRequest( msg ); + return completeSynchronously( req, timeout ).getConnectionCheckResponse(); + } + + + ConnectionPropertiesUpdateResponse updateConnectionProperties( ConnectionPropertiesUpdateRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setConnectionPropertiesUpdateRequest( msg ); + return completeSynchronously( req, timeout ).getConnectionPropertiesUpdateResponse(); + } + + + DbmsVersionResponse getDbmsVersion( DbmsVersionRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setDbmsVersionRequest( msg ); + return completeSynchronously( req, timeout ).getDbmsVersionResponse(); + } + + + DefaultNamespaceResponse defaultNamespaceRequest( DefaultNamespaceRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setDefaultNamespaceRequest( msg ); + return completeSynchronously( req, timeout ).getDefaultNamespaceResponse(); + } + + + TableTypesResponse getTableTypes( TableTypesRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setTableTypesRequest( msg ); + return completeSynchronously( req, timeout ).getTableTypesResponse(); + } + + + TypesResponse getTypes( TypesRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setTypesRequest( msg ); + return completeSynchronously( req, timeout ).getTypesResponse(); + } + + + ProceduresResponse searchProcedures( ProceduresRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setProceduresRequest( msg ); + return completeSynchronously( req, timeout ).getProceduresResponse(); + } + + + FunctionsResponse searchFunctions( FunctionsRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setFunctionsRequest( msg ); + return completeSynchronously( req, timeout ).getFunctionsResponse(); + } + + + NamespacesResponse searchNamespaces( NamespacesRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setNamespacesRequest( msg ); + return completeSynchronously( req, timeout ).getNamespacesResponse(); + } + + + EntitiesResponse searchEntities( EntitiesRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setEntitiesRequest( msg ); + return completeSynchronously( req, timeout ).getEntitiesResponse(); + } + + + ClientInfoPropertiesResponse setClientInfoProperties( ClientInfoProperties msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setSetClientInfoPropertiesRequest( msg ); + return completeSynchronously( req, timeout ).getSetClientInfoPropertiesResponse(); + } + + + ClientInfoProperties getClientInfoProperties( ClientInfoPropertiesRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setClientInfoPropertiesRequest( msg ); + return completeSynchronously( req, timeout ).getClientInfoPropertiesResponse(); + } + + + MetaStringResponse getSqlStringFunctions( SqlStringFunctionsRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setSqlStringFunctionsRequest( msg ); + return completeSynchronously( req, timeout ).getSqlStringFunctionsResponse(); + } + + + MetaStringResponse getSqlSystemFunctions( SqlSystemFunctionsRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setSqlSystemFunctionsRequest( msg ); + return completeSynchronously( req, timeout ).getSqlSystemFunctionsResponse(); + } + + + MetaStringResponse getSqlTimeDateFunctions( SqlTimeDateFunctionsRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setSqlTimeDateFunctionsRequest( msg ); + return completeSynchronously( req, timeout ).getSqlTimeDateFunctionsResponse(); + } + + + MetaStringResponse getSqlNumericFunctions( SqlNumericFunctionsRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setSqlNumericFunctionsRequest( msg ); + return completeSynchronously( req, timeout ).getSqlNumericFunctionsResponse(); + } + + + MetaStringResponse getSqlKeywords( SqlKeywordsRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setSqlKeywordsRequest( msg ); + return completeSynchronously( req, timeout ).getSqlKeywordsResponse(); + } + + + DisconnectResponse disconnect( DisconnectRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setDisconnectRequest( msg ); + try { + return completeSynchronously( req, timeout ).getDisconnectResponse(); + } catch ( PrismInterfaceServiceException e ) { + /* For Windows */ + if ( e.getMessage().contains( "An existing connection was forcibly closed by the remote host" ) ) { + return DisconnectResponse.newBuilder().build(); + } + throw e; + } + } + + + CommitResponse commit( CommitRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setCommitRequest( msg ); + return completeSynchronously( req, timeout ).getCommitResponse(); + } + + + RollbackResponse rollback( RollbackRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setRollbackRequest( msg ); + return completeSynchronously( req, timeout ).getRollbackResponse(); + } + + + void executeUnparameterizedStatement( ExecuteUnparameterizedStatementRequest msg, CallbackQueue callback ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setExecuteUnparameterizedStatementRequest( msg ); + try { + callbackQueues.put( req.getId(), callback ); + sendMessage( req.build() ); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( e ); + } + } + + + void executeUnparameterizedStatementBatch( ExecuteUnparameterizedStatementBatchRequest msg, CallbackQueue callback ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setExecuteUnparameterizedStatementBatchRequest( msg ); + try { + callbackQueues.put( req.getId(), callback ); + sendMessage( req.build() ); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( e ); + } + } + + + PreparedStatementSignature prepareIndexedStatement( PrepareStatementRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setPrepareIndexedStatementRequest( msg ); + return completeSynchronously( req, timeout ).getPreparedStatementSignature(); + } + + + StatementResult executeIndexedStatement( ExecuteIndexedStatementRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setExecuteIndexedStatementRequest( msg ); + return completeSynchronously( req, timeout ).getStatementResult(); + } + + + StatementBatchResponse executeIndexedStatementBatch( ExecuteIndexedStatementBatchRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setExecuteIndexedStatementBatchRequest( msg ); + return completeSynchronously( req, timeout ).getStatementBatchResponse(); + } + + + Frame fetchResult( FetchRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setFetchRequest( msg ); + return completeSynchronously( req, timeout ).getFrame(); + } + + + CloseStatementResponse closeStatement( CloseStatementRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setCloseStatementRequest( msg ); + return completeSynchronously( req, timeout ).getCloseStatementResponse(); + } + + + CloseResultResponse closeResult( CloseResultRequest msg, int timeout ) throws PrismInterfaceServiceException { + Request.Builder req = newMessage(); + req.setCloseResultRequest( msg ); + return completeSynchronously( req, timeout ).getCloseResultResponse(); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/Scrollable.java b/src/main/java/org/polypheny/jdbc/Scrollable.java new file mode 100644 index 00000000..97e2cba8 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/Scrollable.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +public interface Scrollable { + + void fetchAllAndSync() throws InterruptedException; + + boolean next() throws PrismInterfaceServiceException; + + T current(); + + void close(); + + boolean isBeforeFirst(); + + boolean isAfterLast(); + + boolean isFirst(); + + boolean isLast(); + + int getRow(); + + boolean hasCurrent(); + +} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcPreparedStatement.java b/src/main/java/org/polypheny/jdbc/meta/GenericMetaContainer.java similarity index 64% rename from src/main/java/org/polypheny/jdbc/PolyphenyJdbcPreparedStatement.java rename to src/main/java/org/polypheny/jdbc/meta/GenericMetaContainer.java index 087a2744..3c16c0c4 100644 --- a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcPreparedStatement.java +++ b/src/main/java/org/polypheny/jdbc/meta/GenericMetaContainer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2020 The Polypheny Project + * Copyright 2019-2024 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,9 +14,20 @@ * limitations under the License. */ -package org.polypheny.jdbc; +package org.polypheny.jdbc.meta; +public class GenericMetaContainer { -public interface PolyphenyJdbcPreparedStatement extends java.sql.PreparedStatement { + Object[] values; + + + public GenericMetaContainer( Object... values ) { + this.values = values; + } + + + public Object getValue( int valueIndex ) { + return values[valueIndex]; + } } diff --git a/src/main/java/org/polypheny/jdbc/meta/MetaResultSetBuilder.java b/src/main/java/org/polypheny/jdbc/meta/MetaResultSetBuilder.java new file mode 100644 index 00000000..07e53ecb --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/MetaResultSetBuilder.java @@ -0,0 +1,426 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; +import org.polypheny.jdbc.PolyphenyResultSet; +import org.polypheny.jdbc.types.TypedValue; +import org.polypheny.prism.ClientInfoPropertyMeta; +import org.polypheny.prism.Column; +import org.polypheny.prism.ForeignKey; +import org.polypheny.prism.Function; +import org.polypheny.prism.Index; +import org.polypheny.prism.Namespace; +import org.polypheny.prism.PrimaryKey; +import org.polypheny.prism.Procedure; +import org.polypheny.prism.Table; +import org.polypheny.prism.TableType; +import org.polypheny.prism.Type; +import org.polypheny.prism.UserDefinedType; + +public class MetaResultSetBuilder { + + private static PolyphenyResultSet buildEmptyResultSet( String entityName, List> metaResultSetParameters ) throws SQLException { + List columnMetas = buildMetas( entityName, metaResultSetParameters ); + List> rows = new ArrayList<>(); + return new PolyphenyResultSet( columnMetas, rows ); + } + + + private static PolyphenyResultSet buildResultSet( String entityName, List messages, List> metaResultSetParameters ) throws SQLException { + List columnMetas = buildMetas( entityName, metaResultSetParameters ); + List> rows = buildRows( messages, metaResultSetParameters ); + return new PolyphenyResultSet( columnMetas, rows ); + } + + + private static List buildMetas( String entityName, List> metaResultSetParameters ) { + AtomicInteger ordinal = new AtomicInteger(); + return metaResultSetParameters.stream() + .map( p -> PolyphenyColumnMeta.fromSpecification( ordinal.getAndIncrement(), p.getName(), entityName, p.getJdbcType() ) ) + .collect( Collectors.toList() ); + } + + + private static List> buildRows( List messages, List> metaResultSetParameters ) throws SQLException { + List> arrayLists = new ArrayList<>(); + for ( T p : messages ) { + arrayLists.add( buildRow( p, metaResultSetParameters ) ); + } + return arrayLists; + } + + + private static List buildRow( T message, List> metaResultSetParameters ) throws SQLException { + List typedValues = new ArrayList<>(); + for ( MetaResultSetParameter p : metaResultSetParameters ) { + TypedValue typedValue = p.retrieveFrom( message ); + typedValues.add( typedValue ); + } + return typedValues; + } + + + public static ResultSet buildFromTables( List tables ) throws SQLException { + // jdbc standard about tables: Rows are ordered by TABLE_TYPE, TABLE_CAT, TABLE_SCHEM and TABLE_NAME ascending + tables = tables.stream().sorted( MetaResultSetComparators.TABLE_COMPARATOR ).collect( Collectors.toList() ); + return buildResultSet( + "TABLES", + tables, + MetaResultSetSignatures.TABLE_SIGNATURE + ); + } + + + public static ResultSet buildFromTableTypes( List tableTypes ) throws SQLException { + return buildResultSet( + "TABLE_TYPES", + tableTypes, + MetaResultSetSignatures.TABLE_TYPE_SIGNATURE + ); + } + + + public static ResultSet buildFromNamespaces( List namespaces ) throws SQLException { + // jdbc standard about schemas: Rows are ordered by TABLE_CATALOG and TABLE_SCHEM ascending + namespaces = namespaces.stream().sorted( MetaResultSetComparators.NAMESPACE_COMPARATOR ).collect( Collectors.toList() ); + return buildResultSet( + "NAMESPACES", + namespaces, + MetaResultSetSignatures.NAMESPACE_SIGNATURE + ); + } + + + public static ResultSet buildFromColumns( List columns ) throws SQLException { + // jdbc standard about columns: Rows are ordered by TABLE_CAT, TABLE_SCHEM, TABLE_NAME, and ORDINAL_POSITION + columns = columns.stream().sorted( MetaResultSetComparators.COLUMN_COMPARATOR ).collect( Collectors.toList() ); + return buildResultSet( + "COLUMNS", + columns, + MetaResultSetSignatures.COLUMN_SIGNATURE + ); + } + + + public static ResultSet buildFromPrimaryKeys( List primaryKeys ) throws SQLException { + List metaColumns = primaryKeys.stream() + .map( MetaResultSetBuilder::expandPrimaryKey ) + .flatMap( List::stream ) + .sorted( MetaResultSetComparators.PRIMARY_KEY_COMPARATOR ) // jdbc standard about primary keys: Rows are ordered by COLUMN_NAME ascending + .collect( Collectors.toList() ); + + return buildResultSet( + "PRIMARY_KEYS", + metaColumns, + MetaResultSetSignatures.PRIMARY_KEY_GMC_SIGNATURE + ); + } + + + private static List expandPrimaryKey( PrimaryKey primaryKey ) { + // sequenceIndexes start with 1 in jdbc + AtomicInteger sequenceIndex = new AtomicInteger( 1 ); + return primaryKey.getColumnsList().stream().map( c -> new GenericMetaContainer( + c.getNamespaceName(), + c.getTableName(), + c.getColumnName(), + sequenceIndex.getAndIncrement(), + null + ) ).collect( Collectors.toList() ); + } + + + public static ResultSet buildFromDatabases( String defaultNamespace ) throws SQLException { + // jdbc standard about catalogs: Rows are ordered by TABLE_CAT ascending + return buildResultSet( + "CATALOGS", + Collections.singletonList( defaultNamespace ), + MetaResultSetSignatures.CATALOG_SIGNATURE + ); + } + + + public static ResultSet buildFromImportedKeys( List foreignKeys ) throws SQLException { + // jdbc standard about imported keys: Rows are ordered by PKTABLE_CAT, PKTABLE_SCHEM, PKTABLE_NAME, and KEY_SEQ ascending + return buildFromForeignKeys( foreignKeys, "IMPORTED_KEYS", MetaResultSetComparators.IMPORTED_KEYS_COMPARATOR ); + } + + + public static ResultSet buildFromExportedKeys( List foreignKeys ) throws SQLException { + // jdbc standard about exported keys: Rows are ordered by PKTABLE_NAME, PKCOLUMN_NAME, FKTABLE_NAME, and FKEY_SEQ ascending + return buildFromForeignKeys( foreignKeys, "EXPORTED_KEYS", MetaResultSetComparators.EXPORTED_KEYS_COMPARATOR ); + } + + + public static ResultSet buildFromCrossReference( List foreignKeys ) throws SQLException { + // jdbc standard about primary keys: Rows are ordered by COLUMN_NAME ascending + return buildFromForeignKeys( foreignKeys, "CROSS_REFERENCE", MetaResultSetComparators.CROSS_REFERENCE_COMPARATOR ); + } + + + private static ResultSet buildFromForeignKeys( List foreignKeys, String entityName, Comparator comparator ) throws SQLException { + List metaColumns = foreignKeys.stream() + .map( MetaResultSetBuilder::expandForeignKey ) + .flatMap( List::stream ) + .sorted( comparator ) + .collect( Collectors.toList() ); + + return buildResultSet( + entityName, + metaColumns, + MetaResultSetSignatures.FOREIGN_KEY_GMC_SIGNATURE + ); + } + + + private static List expandForeignKey( ForeignKey foreignKey ) { + // key sequences start with 1 in jdbc + AtomicInteger sequenceIndex = new AtomicInteger( 1 ); + return foreignKey.getForeignColumnsList().stream().map( c -> new GenericMetaContainer( + foreignKey.getReferencedNamespaceName(), + foreignKey.getReferencedTableName(), + c.getNamespaceName(), + c.getTableName(), + c.getColumnName(), + sequenceIndex.getAndIncrement(), + foreignKey.getUpdateRule(), + foreignKey.getDeleteRule(), + foreignKey.getKeyName() + ) ).collect( Collectors.toList() ); + } + + + public static ResultSet buildFromTypes( List types ) throws SQLException { + // jdbc standard about type info: Rows are ordered by DATA_TYPE ascending + types = types.stream().sorted( MetaResultSetComparators.TYPE_INFO_COMPARATOR ).collect( Collectors.toList() ); + return buildResultSet( + "TYPE_INFO", + types, + MetaResultSetSignatures.TYPE_SIGNATURE + ); + } + + + public static ResultSet buildFromIndexes( List indexes ) throws SQLException { + List metaColumns = indexes.stream() + .map( MetaResultSetBuilder::expandIndex ) + .flatMap( List::stream ) + .sorted( MetaResultSetComparators.INDEX_COMPARATOR ) // jdbc standard about indexes: Rows are ordered by NON_UNIQUE, INDEX_NAME, and ORDINAL_POSITION ascending + .collect( Collectors.toList() ); + + return buildResultSet( + "INDEX_INFO", + metaColumns, + MetaResultSetSignatures.INDEX_GMC_SIGNATURE + ); + } + + + private static List expandIndex( Index index ) { + AtomicInteger ordinalPosition = new AtomicInteger( 1 ); + return index.getColumnsList().stream().map( c -> new GenericMetaContainer( + index.getNamespaceName(), + index.getTableName(), + !index.getUnique(), // jdbc lists non uniqueness + index.getIndexName(), + ordinalPosition.getAndIncrement(), + c.getColumnName(), + index.getLocation(), + index.getIndexType() + ) ).collect( Collectors.toList() ); + } + + + public static ResultSet buildFromProcedures( List procedures ) throws SQLException { + // This creates an empty dummy result set because the requested information does not exist on the server side. + return buildEmptyResultSet( + "PROCEDURES", + MetaResultSetSignatures.PROCEDURE_SIGNATURE + ); + } + + + public static ResultSet buildFromProcedureColumns() throws SQLException { + return buildEmptyResultSet( + "PROCEDURE_COLUMNS", + MetaResultSetSignatures.PROCEDURE_COLUMN_EMPTY_SIGNATURE + ); + } + + + public static ResultSet buildFromColumnPrivileges( List columns, String userName ) throws SQLException { + List columnPrivileges = columns.stream() + .map( c -> createDummyColumnPrivileges( c, userName ) ) + .flatMap( List::stream ) + .sorted( MetaResultSetComparators.COLUMN_PRIVILEGE_COMPARATOR ) //jdbc standard on column privileges: Rows are ordered by COLUMN_NAME and PRIVILEGE + .collect( Collectors.toList() ); + + return buildResultSet( + "COLUMN_PRIVILEGES", + columnPrivileges, + MetaResultSetSignatures.COLUMN_PRIVILEGES_GMC_SIGNATURE + ); + } + + + private static List createDummyColumnPrivileges( Column colum, String userName ) { + // This method is used to create a dummy full rights result set for a column because the requested information does not exist on the server side. + List accessRights = Arrays.asList( "INSERT", "REFERENCE", "SELECT", "UPDATE" ); + return accessRights.stream().map( a -> new GenericMetaContainer( + colum.getNamespaceName(), + colum.getTableName(), + colum.getColumnName(), + null, + userName, + a, + "NO" + ) ).collect( Collectors.toList() ); + } + + + public static ResultSet buildFromTablePrivileges( List
tables, String userName ) throws SQLException { + List tablePrivileges = tables.stream() + .map( t -> createDummyTablePrivileges( t, userName ) ) + .flatMap( List::stream ) + .sorted( MetaResultSetComparators.TABLE_PRIVILEGE_COMPARATOR ) //jdbc standard on column privileges: Rows are ordered by TABLE_CAT, TABLE_SCHEM, TABLE_NAME, and PRIVILEGE + .collect( Collectors.toList() ); + + return buildResultSet( + "TABLE_PRIVILEGES", + tablePrivileges, + MetaResultSetSignatures.TABLE_PRIVILEGES_GMC_SIGNATURE + ); + } + + + private static List createDummyTablePrivileges( Table table, String userName ) { + // This method is used to create a dummy full rights result set for a table because the requested information does not exist on the server side. + List accessRights = Arrays.asList( "SELECT", "INSERT", "UPDATE", "DELETE", "REFERENCE" ); + return accessRights.stream().map( a -> new GenericMetaContainer( + table.getNamespaceName(), + table.getTableName(), + null, + userName, + a, + "NO" + ) ).collect( Collectors.toList() ); + } + + + public static ResultSet buildFromVersionColumns( List columns ) throws SQLException { + return buildResultSet( + "VERSION_COLUMNS", + columns, + MetaResultSetSignatures.VERSION_COLUMN_SIGNATURE + ); + } + + + public static ResultSet buildFromSuperTypes() throws SQLException { + return buildEmptyResultSet( + "SUPER_TYPES", + MetaResultSetSignatures.SUPER_TYPES_EMPTY_SIGNATURE + ); + } + + + public static ResultSet buildFromSuperTables() throws SQLException { + return buildEmptyResultSet( + "SUPER_TABLES", + MetaResultSetSignatures.SUPER_TABLES_EMPTY_SIGNATURE + ); + } + + + public static ResultSet buildFromAttributes() throws SQLException { + return buildEmptyResultSet( + "ATTRIBUTES", + MetaResultSetSignatures.ATTRIBUTES_EMPTY_SIGNATURE + ); + } + + + public static ResultSet buildFromClientInfoPropertyMetas( List metas ) throws SQLException { + // jdbc standard about client info properties: Rows are ordered by NAME ascending + // metas.sort() cant be used here as protobuf messages return unmodifiable lists + metas = metas.stream().sorted( MetaResultSetComparators.CLIENT_INFO_PROPERTY_COMPARATOR ).collect( Collectors.toList() ); + return buildResultSet( + "CLIENT_INFO_PROPERTIES", + metas, + MetaResultSetSignatures.CLIENT_INFO_PROPERTY_SIGNATURE + ); + } + + + public static ResultSet buildFromPseudoColumns( List columns ) throws SQLException { + // jdbc standard about pseudo columns: Rows are ordered by TABLE_CAT,TABLE_SCHEM, TABLE_NAME and COLUMN_NAME. + columns = columns.stream().sorted( MetaResultSetComparators.PSEUDO_COLUMN_COMPARATOR ).collect( Collectors.toList() ); + return buildResultSet( + "PSEUDO_COLUMNS", + columns, + MetaResultSetSignatures.PSEUDO_COLUMN_SIGNATURE + ); + } + + + public static ResultSet fromBestRowIdentifiers( List columns ) throws SQLException { + // sorting can be ignored as the key is not supported by polypheny and thus set to a constant manually in the client + return buildResultSet( + "BEST_ROW_IDENTIFIERS", + columns, + MetaResultSetSignatures.BEST_ROW_IDENTIFIER_SIGNATURE + ); + } + + + public static ResultSet buildFromUserDefinedTypes( List userDefinedTypes ) throws SQLException { + return buildEmptyResultSet( + "USER_DEFINED_TYPES", + MetaResultSetSignatures.USER_DEFINED_TYPE_EMPTY_SIGNATURE + ); + } + + + public static ResultSet fromFunctions( List functions ) throws SQLException { + // jdbc standard about functions: Rows are ordered by FUNCTION_CAT, FUNCTION_SCHEM, FUNCTION_NAME and SPECIFIC_NAME ascending + functions = functions.stream().sorted( MetaResultSetComparators.FUNCTION_COMPARATOR ).collect( Collectors.toList() ); + return buildResultSet( + "FUNCTIONS", + functions, + MetaResultSetSignatures.FUNCTION_SIGNATURE + ); + + } + + + public static ResultSet buildFromFunctionColumns() throws SQLException { + return buildEmptyResultSet( + "FUNCTION_COLUMNS", + MetaResultSetSignatures.FUNCTION_COLUMN_EMPTY_SIGNATURE + ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/meta/MetaResultSetComparators.java b/src/main/java/org/polypheny/jdbc/meta/MetaResultSetComparators.java new file mode 100644 index 00000000..2f39aab4 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/MetaResultSetComparators.java @@ -0,0 +1,76 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.util.Comparator; +import org.polypheny.jdbc.utils.TypedValueUtils; +import org.polypheny.prism.ClientInfoPropertyMeta; +import org.polypheny.prism.Column; +import org.polypheny.prism.Function; +import org.polypheny.prism.Namespace; +import org.polypheny.prism.Table; +import org.polypheny.prism.Type; + +public class MetaResultSetComparators { + + public static final Comparator TYPE_INFO_COMPARATOR = Comparator + .comparing( t -> TypedValueUtils.getJdbcTypeFromPolyTypeName( t.getTypeName() ) ); + public static final Comparator NAMESPACE_COMPARATOR = Comparator + .comparing( Namespace::getNamespaceName ); + public static final Comparator PRIMARY_KEY_COMPARATOR = Comparator + .comparing( g -> (String) (g.getValue( 2 )) ); + public static final Comparator INDEX_COMPARATOR = Comparator + .comparing( ( GenericMetaContainer g ) -> (Boolean) (g.getValue( 2 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (int) (g.getValue( 7 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 3 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (Integer) (g.getValue( 4 )) ); + public static final Comparator IMPORTED_KEYS_COMPARATOR = Comparator + .comparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 0 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 1 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (Integer) (g.getValue( 5 )) ); + public static final Comparator EXPORTED_KEYS_COMPARATOR = Comparator + .comparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 4 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 5 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 6 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (Integer) (g.getValue( 8 )) ); + // Both use the same ordering according to JDBC standard + public static final Comparator CROSS_REFERENCE_COMPARATOR = EXPORTED_KEYS_COMPARATOR; + public static final Comparator FUNCTION_COMPARATOR = Comparator + .comparing( Function::getName ); + public static final Comparator COLUMN_COMPARATOR = Comparator + .comparing( Column::getNamespaceName ) + .thenComparing( Column::getTableName ) + .thenComparing( Column::getColumnIndex ); + public static final Comparator
TABLE_COMPARATOR = Comparator + .comparing( Table::getTableType ) + .thenComparing( Table::getNamespaceName ) + .thenComparing( Table::getTableName ); + public static final Comparator PSEUDO_COLUMN_COMPARATOR = Comparator + .comparing( Column::getNamespaceName ) + .thenComparing( Column::getTableName ) + .thenComparing( Column::getColumnName ); + public static final Comparator CLIENT_INFO_PROPERTY_COMPARATOR = Comparator + .comparing( ClientInfoPropertyMeta::getKey ); + public static final Comparator TABLE_PRIVILEGE_COMPARATOR = Comparator + .comparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 0 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 1 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 2 )) ) + .thenComparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 5 )) ); + public static final Comparator COLUMN_PRIVILEGE_COMPARATOR = Comparator + .comparing( ( GenericMetaContainer g ) -> (String) (g.getValue( 6 )) ); + +} diff --git a/src/main/java/org/polypheny/jdbc/meta/MetaResultSetParameter.java b/src/main/java/org/polypheny/jdbc/meta/MetaResultSetParameter.java new file mode 100644 index 00000000..b4b52d5c --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/MetaResultSetParameter.java @@ -0,0 +1,45 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.sql.SQLException; +import java.util.function.Function; +import lombok.Getter; +import org.polypheny.jdbc.types.TypedValue; + +class MetaResultSetParameter { + + @Getter + private final String name; + @Getter + private final int jdbcType; + @Getter + private final Function accessFunction; + + + MetaResultSetParameter( String name, int jdbcType, Function accessor ) { + this.name = name; + this.jdbcType = jdbcType; + this.accessFunction = accessor; + } + + + TypedValue retrieveFrom( T message ) throws SQLException { + return TypedValue.fromObject( accessFunction.apply( message ), jdbcType ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/meta/MetaResultSetSignatures.java b/src/main/java/org/polypheny/jdbc/meta/MetaResultSetSignatures.java new file mode 100644 index 00000000..3253ee3f --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/MetaResultSetSignatures.java @@ -0,0 +1,392 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.sql.DatabaseMetaData; +import java.sql.PseudoColumnUsage; +import java.sql.Types; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Function; +import org.apache.commons.lang3.ObjectUtils; +import org.polypheny.jdbc.utils.TypedValueUtils; +import org.polypheny.prism.ClientInfoPropertyMeta; +import org.polypheny.prism.Column; +import org.polypheny.prism.Namespace; +import org.polypheny.prism.Procedure; +import org.polypheny.prism.Table; +import org.polypheny.prism.TableType; +import org.polypheny.prism.Type; + +public class MetaResultSetSignatures { + + // Used as a placeholder for accessors in empty result sets + private static final Function DUMMY_ACCESSOR = a -> "Dummy value: Accessor not implemented"; + + + public static final List> TABLE_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "TABLE_SCHEM", Types.VARCHAR, Table::getNamespaceName ), + new MetaResultSetParameter<>( "TABLE_NAME", Types.VARCHAR, Table::getTableName ), + new MetaResultSetParameter<>( "TABLE_TYPE", Types.VARCHAR, Table::getTableType ), + new MetaResultSetParameter<>( "REMARKS", Types.VARCHAR, p -> "" ), + new MetaResultSetParameter<>( "TYPE_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "TYPE_SCHEM", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "TYPE_NAME", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "SELF_REFERENCING_COL_NAME", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "REF_GENERATION", Types.VARCHAR, p -> null ) + ); + + public static final List> TABLE_TYPE_SIGNATURE = Collections.singletonList( + new MetaResultSetParameter<>( "TABLE_TYPE", Types.VARCHAR, TableType::getTableType ) + ); + + public static final List> NAMESPACE_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_SCHEM", Types.VARCHAR, Namespace::getNamespaceName ), + new MetaResultSetParameter<>( "TABLE_CATALOG", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "SCHEMA_TYPE", Types.VARCHAR, nullIfFalse( Namespace::getNamespaceType, Namespace::hasNamespaceType ) ) + ); + + public static final List> COLUMN_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "TABLE_SCHEM", Types.VARCHAR, Column::getNamespaceName ), + new MetaResultSetParameter<>( "TABLE_NAME", Types.VARCHAR, Column::getTableName ), + new MetaResultSetParameter<>( "COLUMN_NAME", Types.VARCHAR, Column::getColumnName ), + new MetaResultSetParameter<>( "DATA_TYPE", Types.INTEGER, p -> TypedValueUtils.getJdbcTypeFromPolyTypeName( p.getTypeName() ) ), + new MetaResultSetParameter<>( "TYPE_NAME", Types.VARCHAR, Column::getTypeName ), + new MetaResultSetParameter<>( "COLUMN_SIZE", Types.INTEGER, nullIfFalse( Column::getTypeLength, Column::hasTypeLength ) ), + new MetaResultSetParameter<>( "BUFFER_LENGTH", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "DECIMAL_DIGITS", Types.INTEGER, nullIfFalse( convertScale( Column::getTypeScale ), Column::hasTypeScale ) ), + new MetaResultSetParameter<>( "NUM_PREC_RADIX", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "NULLABLE", Types.INTEGER, p -> p.getIsNullable() ? 1 : 0 ), + new MetaResultSetParameter<>( "REMARKS", Types.VARCHAR, p -> "" ), + new MetaResultSetParameter<>( "COLUMN_DEF", Types.VARCHAR, nullIfFalse( Column::getDefaultValueAsString, Column::hasDefaultValueAsString ) ), + new MetaResultSetParameter<>( "SQL_DATA_TYPE", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "SQL_DATETIME_SUB", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "CHAR_OCTET_LENGTH", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "ORDINAL_POSITION", Types.INTEGER, Column::getColumnIndex ), + new MetaResultSetParameter<>( "IS_NULLABLE", Types.VARCHAR, p -> p.getIsNullable() ? "YES" : "NO" ), + new MetaResultSetParameter<>( "SCOPE_CATALOG", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "SCOPE_SCHEMA", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "SCOPE_TABLE", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "SOURCE_DATA_TYPE", Types.SMALLINT, p -> null ), + new MetaResultSetParameter<>( "IS_AUTOINCREMENT", Types.VARCHAR, p -> "NO" ), + new MetaResultSetParameter<>( "IS_GENERATEDCOLUMN", Types.VARCHAR, p -> "NO" ), + new MetaResultSetParameter<>( "COLLATION", Types.VARCHAR, nullIfFalse( Column::getCollation, Column::hasCollation ) ) + ); + + public static final List> PRIMARY_KEY_GMC_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "TABLE_SCHEM", Types.VARCHAR, p -> p.getValue( 0 ) ), + new MetaResultSetParameter<>( "TABLE_NAME", Types.VARCHAR, p -> p.getValue( 1 ) ), + new MetaResultSetParameter<>( "COLUMN_NAME", Types.VARCHAR, p -> p.getValue( 2 ) ), + new MetaResultSetParameter<>( "KEY_SEQ", Types.SMALLINT, p -> p.getValue( 3 ) ), + new MetaResultSetParameter<>( "PK_NAME", Types.VARCHAR, p -> p.getValue( 4 ) ) + ); + + // This signature uses the term catalog as this is what jdbc calls the results in the result set generated. + public static final List> CATALOG_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_CAT", Types.VARCHAR, s -> "APP" ), + new MetaResultSetParameter<>( "DEFAULT_SCHEMA", Types.VARCHAR, s -> s ) + ); + + public static final List> FOREIGN_KEY_GMC_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "PKTABLE_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "PKTABLE_SCHEM", Types.VARCHAR, p -> p.getValue( 0 ) ), + new MetaResultSetParameter<>( "PKTABLE_NAME", Types.VARCHAR, p -> p.getValue( 1 ) ), + new MetaResultSetParameter<>( "PKCOLUMN_NAME", Types.VARCHAR, p -> null ), // TODO: This is not standard compliant! + new MetaResultSetParameter<>( "FKTABLE_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "FKTABLE_SCHEM", Types.VARCHAR, p -> p.getValue( 2 ) ), + new MetaResultSetParameter<>( "FKTABLE_NAME", Types.VARCHAR, p -> p.getValue( 3 ) ), + new MetaResultSetParameter<>( "FKCOLUMN_NAME", Types.VARCHAR, p -> p.getValue( 4 ) ), + new MetaResultSetParameter<>( "KEY_SEQ", Types.SMALLINT, p -> p.getValue( 5 ) ), + new MetaResultSetParameter<>( "UPDATE_RULE", Types.SMALLINT, p -> p.getValue( 6 ) ), + new MetaResultSetParameter<>( "DELETE_RULE", Types.SMALLINT, p -> p.getValue( 7 ) ), + new MetaResultSetParameter<>( "FK_NAME", Types.VARCHAR, p -> p.getValue( 8 ) ), + new MetaResultSetParameter<>( "PK_NAME", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "DEFERRABILITY", Types.SMALLINT, p -> null ) + ); + + public static final List> TYPE_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TYPE_NAME", Types.VARCHAR, Type::getTypeName ), + new MetaResultSetParameter<>( "DATA_TYPE", Types.INTEGER, t -> TypedValueUtils.getJdbcTypeFromPolyTypeName( t.getTypeName() ) ), + new MetaResultSetParameter<>( "PRECISION", Types.INTEGER, Type::getPrecision ), + new MetaResultSetParameter<>( "LITERAL_PREFIX", Types.VARCHAR, nullIfFalse( Type::getLiteralPrefix, Type::hasLiteralPrefix ) ), + new MetaResultSetParameter<>( "LITERAL_SUFFIX", Types.VARCHAR, nullIfFalse( Type::getLiteralSuffix, Type::hasLiteralSuffix ) ), + new MetaResultSetParameter<>( "CREATE_PARAMS", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "NULLABLE", Types.SMALLINT, p -> DatabaseMetaData.typeNullable ), + new MetaResultSetParameter<>( "CASE_SENSITIVE", Types.BOOLEAN, Type::getIsCaseSensitive ), + new MetaResultSetParameter<>( "SEARCHABLE", Types.SMALLINT, integerAsShort( Type::getIsSearchable ) ), + new MetaResultSetParameter<>( "UNSIGNED_ATTRIBUTE", Types.BOOLEAN, p -> false ), + new MetaResultSetParameter<>( "FIXED_PREC_SCALE", Types.BOOLEAN, p -> false ), + new MetaResultSetParameter<>( "AUTO_INCREMENT", Types.BOOLEAN, Type::getIsAutoIncrement ), + new MetaResultSetParameter<>( "LOCAL_TYPE_NAME", Types.VARCHAR, Type::getTypeName ), + new MetaResultSetParameter<>( "MINIMUM_SCALE", Types.SMALLINT, convertScale( Type::getMinScale ) ), + new MetaResultSetParameter<>( "MAXIMUM_SCALE", Types.SMALLINT, convertScale( Type::getMaxScale ) ), + new MetaResultSetParameter<>( "SQL_DATA_TYPE", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "SQL_DATETIME_SUB", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "NUM_PREC_RADIX", Types.INTEGER, Type::getRadix ) + ); + + public static final List> INDEX_GMC_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "TABLE_SCHEM", Types.VARCHAR, p -> p.getValue( 0 ) ), + new MetaResultSetParameter<>( "TABLE_NAME", Types.VARCHAR, p -> p.getValue( 1 ) ), + new MetaResultSetParameter<>( "NON_UNIQUE", Types.BOOLEAN, p -> p.getValue( 2 ) ), + new MetaResultSetParameter<>( "INDEX_QUALIFIER", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "INDEX_NAME", Types.VARCHAR, p -> p.getValue( 3 ) ), + new MetaResultSetParameter<>( "TYPE", Types.TINYINT, p -> 0 ), + new MetaResultSetParameter<>( "ORDINAL_POSITION", Types.TINYINT, integerAsShort( p -> p.getValue( 4 ) ) ), + new MetaResultSetParameter<>( "COLUMN_NAME", Types.VARCHAR, p -> p.getValue( 5 ) ), + new MetaResultSetParameter<>( "ASC_OR_DESC", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "CARDINALITY", Types.BIGINT, p -> (long) -1 ), + new MetaResultSetParameter<>( "PAGES", Types.BIGINT, p -> null ), + new MetaResultSetParameter<>( "FILTER_CONDITION", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "LOCATION", Types.INTEGER, p -> p.getValue( 6 ) ), + new MetaResultSetParameter<>( "INDEX_TYPE", Types.INTEGER, p -> p.getValue( 7 ) ) + ); + + public static final List> PROCEDURE_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "PROCEDURE_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "PROCEDURE_SCHEM", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "PROCEDURE_NAME", Types.VARCHAR, Procedure::getTrivialName ), + new MetaResultSetParameter<>( "reserved for future use", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "reserved for future use", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "reserved for future use", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "REMARKS", Types.VARCHAR, Procedure::getDescription ), + new MetaResultSetParameter<>( "PROCEDURE_TYPE", Types.TINYINT, Procedure::getReturnTypeValue ), + new MetaResultSetParameter<>( "SPECIFIC_NAME", Types.VARCHAR, Procedure::getUniqueName ) + ); + + + // Used to build an EMPTY result set thus no types and accessors are specified. + public static final List> PROCEDURE_COLUMN_EMPTY_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "PROCEDURE_CAT", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "PROCEDURE_SCHEM", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "PROCEDURE_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "COLUMN_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "COLUMN_TYPE", Types.TINYINT, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "DATA_TYPE", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "TYPE_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "PRECISION", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "LENGTH", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SCALE", Types.TINYINT, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "RADIX", Types.TINYINT, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "NULLABLE", Types.TINYINT, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "REMARKS", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "COLUMN_DEF", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SQL_DATA_TYPE", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SQL_DATETIME_SUB", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "CHAR_OCTET_LENGTH", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "ORDINAL_POSITION", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "IS_NULLABLE", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SPECIFIC_NAME", Types.VARCHAR, DUMMY_ACCESSOR ) + ); + + public static final List> COLUMN_PRIVILEGES_GMC_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "TABLE_SCHEM", Types.VARCHAR, p -> p.getValue( 0 ) ), + new MetaResultSetParameter<>( "TABLE_NAME", Types.VARCHAR, p -> p.getValue( 1 ) ), + new MetaResultSetParameter<>( "COLUMN_NAME", Types.VARCHAR, p -> p.getValue( 2 ) ), + new MetaResultSetParameter<>( "GRANTOR", Types.VARCHAR, p -> p.getValue( 3 ) ), + new MetaResultSetParameter<>( "GRANTEE", Types.VARCHAR, p -> p.getValue( 4 ) ), + new MetaResultSetParameter<>( "PRIVILEGE", Types.VARCHAR, p -> p.getValue( 5 ) ), + new MetaResultSetParameter<>( "IS_GRANTABLE", Types.VARCHAR, p -> p.getValue( 6 ) ) + ); + + public static final List> TABLE_PRIVILEGES_GMC_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_CAT", Types.VARCHAR, p -> p.getValue( 0 ) ), + new MetaResultSetParameter<>( "TABLE_SCHEM", Types.VARCHAR, p -> p.getValue( 1 ) ), + new MetaResultSetParameter<>( "TABLE_NAME", Types.VARCHAR, p -> p.getValue( 2 ) ), + new MetaResultSetParameter<>( "GRANTOR", Types.VARCHAR, p -> p.getValue( 3 ) ), + new MetaResultSetParameter<>( "GRANTEE ", Types.VARCHAR, p -> p.getValue( 4 ) ), + new MetaResultSetParameter<>( "PRIVILEGE", Types.VARCHAR, p -> p.getValue( 5 ) ), + new MetaResultSetParameter<>( "IS_GRANTABLE", Types.VARCHAR, p -> p.getValue( 6 ) ) + ); + + public static final List> VERSION_COLUMN_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "SCOPE", Types.TINYINT, p -> null ), + new MetaResultSetParameter<>( "COLUMN_NAME", Types.VARCHAR, Column::getColumnName ), + new MetaResultSetParameter<>( "DATA_TYPE", Types.INTEGER, p -> TypedValueUtils.getJdbcTypeFromPolyTypeName( p.getTypeName() ) ), + new MetaResultSetParameter<>( "TYPE_NAME", Types.VARCHAR, Column::getTypeName ), + new MetaResultSetParameter<>( "COLUMN_SIZE", Types.INTEGER, Column::getTypeLength ), + new MetaResultSetParameter<>( "BUFFER_LENGTH", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "DECIMAL_DIGITS", Types.TINYINT, nullIfFalse( convertScale( Column::getTypeScale ), Column::hasTypeScale ) ), + new MetaResultSetParameter<>( "PSEUDO_COLUMN", Types.TINYINT, p -> p.getIsHidden() + ? DatabaseMetaData.versionColumnPseudo + : DatabaseMetaData.versionColumnNotPseudo ) + ); + + // Used to build an EMPTY result set thus no types and accessors are specified. + public static final List> SUPER_TYPES_EMPTY_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TYPE_CAT", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "TYPE_SCHEM", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "TYPE_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SUPERTYPE_CAT", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SUPERTYPE_SCHEM", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SUPERTYPE_NAME", Types.VARCHAR, DUMMY_ACCESSOR ) + ); + + public static final List> SUPER_TABLES_EMPTY_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_CAT", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "TABLE_SCHEM", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "TABLE_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SUPERTABLE_NAME", Types.VARCHAR, DUMMY_ACCESSOR ) + ); + + public static final List> ATTRIBUTES_EMPTY_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_CAT", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "TABLE_SCHEM", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "TABLE_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "ATTR_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "DATA_TYPE", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "ATTR_TYPE_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "ATTR_SIZE", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "DECIMAL_DIGITS", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "NUM_PREC_RADIX", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "NULLABLE", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "REMARKS", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "ATTR_DEF", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SQL_DATA_TYPE", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SQL_DATETIME_SUB", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "CHAR_OCTET_LENGTH", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "ORDINAL_POSITION", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "IS_NULLABLE", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SCOPE_CATALOG", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SCOPE_SCHEMA", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SCOPE_TABLE", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SOURCE_DATA_TYPE", Types.VARCHAR, DUMMY_ACCESSOR ) + ); + + public static final List> CLIENT_INFO_PROPERTY_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "NAME", Types.VARCHAR, ClientInfoPropertyMeta::getKey ), + new MetaResultSetParameter<>( "MAX_LEN", Types.VARCHAR, ClientInfoPropertyMeta::getMaxlength ), + new MetaResultSetParameter<>( "DEFAULT_VALUE", Types.VARCHAR, ClientInfoPropertyMeta::getDefaultValue ), + new MetaResultSetParameter<>( "DESCRIPTION", Types.VARCHAR, ClientInfoPropertyMeta::getDescription ) + ); + + public static final List> PSEUDO_COLUMN_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TABLE_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "TABLE_SCHEM", Types.VARCHAR, Column::getNamespaceName ), + new MetaResultSetParameter<>( "TABLE_NAME", Types.VARCHAR, Column::getTableName ), + new MetaResultSetParameter<>( "COLUMN_NAME", Types.VARCHAR, Column::getColumnName ), + new MetaResultSetParameter<>( "DATA_TYPE", Types.VARCHAR, p -> TypedValueUtils.getJdbcTypeFromPolyTypeName( p.getTypeName() ) ), + new MetaResultSetParameter<>( "COLUMN_SIZE", Types.INTEGER, nullIfFalse( Column::getTypeLength, Column::hasTypeLength ) ), + new MetaResultSetParameter<>( "DECIMAL_DIGITS", Types.INTEGER, nullIfFalse( convertScale( Column::getTypeScale ), Column::hasTypeScale ) ), + new MetaResultSetParameter<>( "NUM_PREC_RADIX", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "COLUMN_USAGE", Types.VARCHAR, p -> PseudoColumnUsage.USAGE_UNKNOWN ), + new MetaResultSetParameter<>( "REMARKS", Types.VARCHAR, p -> "" ), + new MetaResultSetParameter<>( "CHAR_OCTET_LENGTH", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "IS_NULLABLE", Types.VARCHAR, p -> p.getIsNullable() ? "YES" : "NO" ) + ); + + public static final List> BEST_ROW_IDENTIFIER_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "SCOPE", Types.SMALLINT, integerAsShort( p -> DatabaseMetaData.bestRowSession ) ), + new MetaResultSetParameter<>( "COLUMN_NAME", Types.VARCHAR, Column::getColumnName ), + new MetaResultSetParameter<>( "DATA_TYPE", Types.INTEGER, p -> TypedValueUtils.getJdbcTypeFromPolyTypeName( p.getTypeName() ) ), + new MetaResultSetParameter<>( "TYPE_NAME", Types.VARCHAR, Column::getTypeName ), + new MetaResultSetParameter<>( "COLUMN_SIZE", Types.INTEGER, nullIfFalse( Column::getTypeLength, Column::hasTypeLength ) ), + new MetaResultSetParameter<>( "BUFFER_LENGTH", Types.INTEGER, p -> null ), + new MetaResultSetParameter<>( "DECIMAL_DIGITS", Types.SMALLINT, nullIfFalse( convertScale( Column::getTypeScale ), Column::hasTypeScale ) ), + new MetaResultSetParameter<>( "PSEUDO_COLUMN", Types.SMALLINT, p -> p.getIsHidden() + ? DatabaseMetaData.bestRowPseudo + : DatabaseMetaData.bestRowNotPseudo + ) + ); + + // Used to build an EMPTY result set thus no types and accessors are specified. + public static final List> USER_DEFINED_TYPE_EMPTY_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "TYPE_CAT", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "TYPE_SCHEM", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "TYPE_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "CLASS_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "DATA_TYPE", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "REMARKS", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "BASE_TYPE", Types.SMALLINT, DUMMY_ACCESSOR ) + ); + + // Used to build an EMPTY result set thus no types and accessors are specified. + public static final List> FUNCTION_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "FUNCTION_CAT", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "FUNCTION_SCHEM", Types.VARCHAR, p -> null ), + new MetaResultSetParameter<>( "FUNCTION_NAME", Types.VARCHAR, org.polypheny.prism.Function::getName ), + new MetaResultSetParameter<>( "REMARKS", Types.VARCHAR, org.polypheny.prism.Function::getSyntax ), + new MetaResultSetParameter<>( "FUNCTION_TYPE", Types.SMALLINT, p -> p.getIsTableFunction() + ? DatabaseMetaData.functionReturnsTable + : DatabaseMetaData.functionNoTable ), + new MetaResultSetParameter<>( "REMARKS", Types.VARCHAR, org.polypheny.prism.Function::getName ) + ); + + // Used to build an EMPTY result set thus no types and accessors are specified. + public static final List> FUNCTION_COLUMN_EMPTY_SIGNATURE = Arrays.asList( + new MetaResultSetParameter<>( "FUNCTION_CAT", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "FUNCTION_SCHEM", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "FUNCTION_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "COLUMN_NAME", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "COLUMN_TYPE", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "DATA_TYPE", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "TYPE_NAME", Types.SMALLINT, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "PRECISION", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "LENGTH", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SCALE", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "RADIX", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "NULLABLE", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "REMARKS", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "CHAR_OCTET_LENGTH", Types.SMALLINT, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "ORDINAL_POSITION", Types.INTEGER, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "IS_NULLABLE", Types.VARCHAR, DUMMY_ACCESSOR ), + new MetaResultSetParameter<>( "SPECIFIC_NAME", Types.SMALLINT, DUMMY_ACCESSOR ) + ); + + + private static Function nullIfFalse( Function accessor, Function booleanFunction ) { + return message -> { + if ( booleanFunction.apply( message ) ) { + return accessor.apply( message ); + } + return null; + }; + } + + + private static Function integerAsShort( Function accessor ) { + return message -> { + Object value = accessor.apply( message ); + if ( value instanceof Integer ) { + return ((Integer) value).shortValue(); + } + throw new IllegalArgumentException( "Can't convert this value to a short" ); + }; + } + + + private static Function convertScale( Function accessor ) { + return message -> { + Object value = accessor.apply( message ); + if ( !(value instanceof Integer) ) { + throw new IllegalArgumentException( "Can't convert this value to a short" ); + } + Integer integer = (Integer) value; + if ( integer == -1 ) { + return 0; + } + return integer.shortValue(); + }; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/meta/MetaScroller.java b/src/main/java/org/polypheny/jdbc/meta/MetaScroller.java new file mode 100644 index 00000000..94aab549 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/MetaScroller.java @@ -0,0 +1,223 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.util.List; +import org.polypheny.jdbc.BidirectionalScrollable; +import org.polypheny.jdbc.PrismInterfaceServiceException; + +public class MetaScroller implements BidirectionalScrollable { + + private static final int CURSOR_BEFORE_DATA = -1; + + private final List data; + private T current; + private int currentIndex; + + + public MetaScroller( List rows ) { + this.data = rows; + this.current = null; + this.currentIndex = CURSOR_BEFORE_DATA; + } + + + private int rowToIndex( int row ) { + return row - 1; + } + + + private int indexToRow( int index ) { + return index + 1; + } + + + @Override + public void fetchAllAndSync() { + } + + + @Override + public boolean absolute( int rowIndex ) { + if ( rowIndex == 0 ) { + current = null; + currentIndex = CURSOR_BEFORE_DATA; + return false; + } + if ( rowIndex > 0 ) { + current = null; + currentIndex = rowToIndex( rowIndex ); + if ( currentIndex >= data.size() ) { + currentIndex = data.size(); + return false; + } + current = data.get( currentIndex ); + return true; + } + return accessFromBack( rowIndex ); + } + + + private boolean accessFromBack( int rowIndex ) { + current = null; + currentIndex = data.size() + rowIndex; + if ( currentIndex > CURSOR_BEFORE_DATA ) { + current = data.get( currentIndex ); + return true; + } + currentIndex = CURSOR_BEFORE_DATA; + return false; + } + + + @Override + public boolean relative( int offset ) { + current = null; + currentIndex += offset; + if ( currentIndex < 0 ) { + currentIndex = CURSOR_BEFORE_DATA; + return false; + } + if ( currentIndex >= data.size() ) { + currentIndex = data.size(); + return false; + } + current = data.get( currentIndex ); + return true; + } + + + @Override + public boolean previous() { + current = null; + currentIndex--; + if ( currentIndex > CURSOR_BEFORE_DATA && currentIndex < data.size() ) { + current = data.get( currentIndex ); + return true; + } + currentIndex = CURSOR_BEFORE_DATA; + return false; + } + + + @Override + public void beforeFirst() { + current = null; + currentIndex = CURSOR_BEFORE_DATA; + } + + + @Override + public void afterLast() { + current = null; + currentIndex = data.size(); + } + + + @Override + public boolean first() { + current = null; + currentIndex = CURSOR_BEFORE_DATA; + if ( data.isEmpty() ) { + return false; + } + currentIndex = 0; + current = data.get( currentIndex ); + return true; + } + + + @Override + public boolean last() { + current = null; + currentIndex = CURSOR_BEFORE_DATA; + if ( data.isEmpty() ) { + return false; + } + currentIndex = data.size() - 1; + current = data.get( currentIndex ); + return true; + } + + + @Override + public boolean next() throws PrismInterfaceServiceException { + current = null; + currentIndex++; + if ( currentIndex >= data.size() ) { + currentIndex = data.size(); + return false; + } + current = data.get( currentIndex ); + return true; + } + + + @Override + public T current() { + return current; + } + + + @Override + public void close() { + // Used to close any open streams to or from this scrollable. There are none. + } + + + @Override + public boolean isBeforeFirst() { + return currentIndex == CURSOR_BEFORE_DATA; + } + + + @Override + public boolean isAfterLast() { + return data.isEmpty() || currentIndex >= data.size(); + } + + + @Override + public boolean isFirst() { + return currentIndex == 0; + } + + + @Override + public boolean isLast() { + return currentIndex == data.size() - 1; + } + + + @Override + public int getRow() { + if ( currentIndex < 0 ) { + return 0; + } + if ( currentIndex >= data.size() ) { + return 0; + } + return indexToRow( currentIndex ); + } + + + @Override + public boolean hasCurrent() { + return current != null; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/meta/MetaUtils.java b/src/main/java/org/polypheny/jdbc/meta/MetaUtils.java new file mode 100644 index 00000000..d81809ef --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/MetaUtils.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.util.List; +import java.util.stream.Collectors; +import org.polypheny.prism.ColumnMeta; +import org.polypheny.prism.ParameterMeta; + +public class MetaUtils { + + public static List buildColumnMetas( List protoColumnMetas ) { + return protoColumnMetas.stream().map( PolyphenyColumnMeta::new ).collect( Collectors.toList() ); + } + + + public static List buildParameterMetas( List protoParameterMetas ) { + return protoParameterMetas.stream().map( PolyphenyParameterMeta::new ).collect( Collectors.toList() ); + } + + + public enum NamespaceTypes { + RELATIONAL, + GRAPH, + DOCUMENT + } + + + public static String convertToRegex( String jdbcPattern ) { + return jdbcPattern.replace( "_", "(.)" ).replace( "%", "(.*)" ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/meta/PolyphenyColumnMeta.java b/src/main/java/org/polypheny/jdbc/meta/PolyphenyColumnMeta.java new file mode 100644 index 00000000..c029549d --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/PolyphenyColumnMeta.java @@ -0,0 +1,144 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.sql.JDBCType; +import java.sql.ResultSetMetaData; +import lombok.Getter; +import org.apache.commons.lang3.NotImplementedException; +import org.polypheny.jdbc.types.ProtoToJdbcTypeMap; +import org.polypheny.prism.ColumnMeta; +import org.polypheny.prism.ProtoPolyType; + +public class PolyphenyColumnMeta { + + @Getter + private final int ordinal; + @Getter + private final boolean autoIncrement; + @Getter + private final boolean caseSensitive; + @Getter + private final boolean searchable; + @Getter + private final boolean currency; + @Getter + private final int nullable; + @Getter + private final boolean signed; + @Getter + private final int displaySize; + @Getter + private final String columnLabel; + @Getter + private final String columnName; + @Getter + private final String namespace; + @Getter + private final int precision; + @Getter + private final int scale; + @Getter + private final String tableName; + @Getter + private final String catalogName; + @Getter + private final boolean readOnly; + @Getter + private final boolean writable; + @Getter + private final boolean definitelyWritable; + @Getter + private final String columnClassName; + @Getter + private final int sqlType; + @Getter + private final String polyphenyFieldTypeName; + + + //column = field + public PolyphenyColumnMeta( ColumnMeta protoColumnMeta ) { + this.ordinal = protoColumnMeta.getColumnIndex(); + this.autoIncrement = false; + this.caseSensitive = true; + this.searchable = false; + this.currency = false; + this.nullable = protoColumnMeta.getIsNullable() ? ResultSetMetaData.columnNullable : ResultSetMetaData.columnNoNulls; + this.signed = false; + this.displaySize = protoColumnMeta.getLength(); + this.columnLabel = protoColumnMeta.getColumnLabel(); + this.columnName = protoColumnMeta.getColumnName(); + this.namespace = protoColumnMeta.getNamespace(); + this.precision = protoColumnMeta.getPrecision(); + this.scale = 1; + // table = entity + this.tableName = protoColumnMeta.getEntityName(); + this.catalogName = ""; + this.readOnly = false; + this.writable = false; + this.definitelyWritable = false; + this.columnClassName = ""; + if ( protoColumnMeta.getTypeMeta().getProtoValueType() == ProtoPolyType.USER_DEFINED_TYPE ) { + //TODO: This is required once user defined types are introduced. Depending on their implementation this might even become obsolete. + throw new NotImplementedException( "Struct types not implemented yet" ); + } + if ( protoColumnMeta.getTypeMeta().getProtoValueType() == ProtoPolyType.ARRAY ) { + ProtoPolyType type = protoColumnMeta.getTypeMeta().getArrayMeta().getElementType().getProtoValueType(); + this.sqlType = JDBCType.ARRAY.getVendorTypeNumber(); + this.polyphenyFieldTypeName = type.name(); + return; + } + ProtoPolyType type = protoColumnMeta.getTypeMeta().getProtoValueType(); + this.sqlType = ProtoToJdbcTypeMap.getJdbcTypeFromProto( type ); + this.polyphenyFieldTypeName = type.name(); + } + + + // Only there so constructor remains hidden to indicate that it shouldn't be used for anything else + public static PolyphenyColumnMeta fromSpecification( int ordinal, String columnLabel, String entityName, int jdcType ) { + return new PolyphenyColumnMeta( ordinal, columnLabel, entityName, jdcType ); + } + + + /* This constructor is used exclusively to create metadata for the responses of the meta endpoint since these must be + * represented as ResultSets. + */ + private PolyphenyColumnMeta( int ordinal, String columnLabel, String entityName, int jdbcType ) { + this.ordinal = ordinal; + this.autoIncrement = false; + this.caseSensitive = true; + this.searchable = false; + this.currency = false; + this.nullable = ResultSetMetaData.columnNullable; + this.signed = false; + this.displaySize = -1; + this.columnLabel = columnLabel; + this.columnName = columnLabel; + this.namespace = null; + this.precision = -1; + this.scale = 1; + this.tableName = entityName; + this.catalogName = ""; + this.readOnly = false; + this.writable = false; + this.definitelyWritable = false; + this.columnClassName = ""; + this.sqlType = jdbcType; + this.polyphenyFieldTypeName = ""; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/meta/PolyphenyDatabaseMetadata.java b/src/main/java/org/polypheny/jdbc/meta/PolyphenyDatabaseMetadata.java new file mode 100644 index 00000000..66912ab1 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/PolyphenyDatabaseMetadata.java @@ -0,0 +1,1365 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.RowIdLifetime; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.polypheny.jdbc.ConnectionString; +import org.polypheny.jdbc.PolyConnection; +import org.polypheny.jdbc.PrismInterfaceClient; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; +import org.polypheny.jdbc.properties.DriverProperties; +import org.polypheny.jdbc.properties.PropertyUtils; +import org.polypheny.prism.Column; +import org.polypheny.prism.DbmsVersionResponse; +import org.polypheny.prism.Entity; +import org.polypheny.prism.ForeignKey; +import org.polypheny.prism.Function; +import org.polypheny.prism.Index; +import org.polypheny.prism.Namespace; +import org.polypheny.prism.PrimaryKey; +import org.polypheny.prism.Procedure; +import org.polypheny.prism.Table; +import org.polypheny.prism.TableType; +import org.polypheny.prism.Type; + +public class PolyphenyDatabaseMetadata implements DatabaseMetaData { + + private static final int NO_VERSION = -1; + private ConnectionString connectionString; + + private NullSorting nullSorting; + + private PrismInterfaceClient prismInterfaceClient; + + private PolyConnection polyConnection; + + private String productName; + private String productVersion; + private int databaseMinorVersion = NO_VERSION; + private int databaseMajorVersion = NO_VERSION; + + + private enum NullSorting { + START, + END, + HIGH, + LOW + } + + + public PolyphenyDatabaseMetadata( PrismInterfaceClient prismInterfaceClient, ConnectionString target ) { + this.prismInterfaceClient = prismInterfaceClient; + this.connectionString = target; + this.nullSorting = NullSorting.END; + } + + + private void throwNotSupportedIfStrict() throws SQLFeatureNotSupportedException { + if ( !polyConnection.isStrict() ) { + return; + } + throw new SQLFeatureNotSupportedException(); + } + + + public void setConnection( PolyConnection connection ) { + this.polyConnection = connection; + } + + + private void fetchDbmsVersionInfo() throws SQLException { + DbmsVersionResponse response = prismInterfaceClient.getDbmsVersion( getConnection().getNetworkTimeout() ); + productName = response.getDbmsName(); + productVersion = response.getVersionName(); + databaseMinorVersion = response.getMinorVersion(); + databaseMajorVersion = response.getMajorVersion(); + } + + + @Override + public boolean allProceduresAreCallable() throws SQLException { + return true; + } + + + @Override + public boolean allTablesAreSelectable() throws SQLException { + return true; + } + + + @Override + public String getURL() throws SQLException { + if ( connectionString == null ) { + return null; + } + return DriverProperties.getDRIVER_URL_SCHEMA() + "//" + connectionString.getTarget(); + } + + + @Override + public String getUserName() throws SQLException { + if ( connectionString == null ) { + return null; + } + return connectionString.getUser(); + } + + + @Override + public boolean isReadOnly() throws SQLException { + return PropertyUtils.isDEFAULT_READ_ONLY(); + } + + + @Override + public boolean nullsAreSortedHigh() throws SQLException { + return nullSorting == NullSorting.HIGH; + } + + + @Override + public boolean nullsAreSortedLow() throws SQLException { + return nullSorting == NullSorting.LOW; + } + + + @Override + public boolean nullsAreSortedAtStart() throws SQLException { + return nullSorting == NullSorting.START; + } + + + @Override + public boolean nullsAreSortedAtEnd() throws SQLException { + return nullSorting == NullSorting.END; + } + + + @Override + public String getDatabaseProductName() throws SQLException { + if ( productName == null ) { + fetchDbmsVersionInfo(); + } + return productName; + } + + + @Override + public String getDatabaseProductVersion() throws SQLException { + if ( productVersion == null ) { + fetchDbmsVersionInfo(); + } + return productVersion; + } + + + @Override + public String getDriverName() throws SQLException { + return DriverProperties.getDRIVER_NAME(); + } + + + @Override + public String getDriverVersion() throws SQLException { + return DriverProperties.getDRIVER_VERSION(); + } + + + @Override + public int getDriverMajorVersion() { + return DriverProperties.getDRIVER_MAJOR_VERSION(); + } + + + @Override + public int getDriverMinorVersion() { + return DriverProperties.getDRIVER_MINOR_VERSION(); + } + + + @Override + public boolean usesLocalFiles() throws SQLException { + return false; + } + + + @Override + public boolean usesLocalFilePerTable() throws SQLException { + return false; + } + + + @Override + public boolean supportsMixedCaseIdentifiers() throws SQLException { + return true; + } + + + @Override + public boolean storesUpperCaseIdentifiers() throws SQLException { + return false; + } + + + @Override + public boolean storesLowerCaseIdentifiers() throws SQLException { + return false; + } + + + @Override + public boolean storesMixedCaseIdentifiers() throws SQLException { + return true; + } + + + @Override + public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException { + return false; + } + + + @Override + public boolean storesUpperCaseQuotedIdentifiers() throws SQLException { + return false; + } + + + @Override + public boolean storesLowerCaseQuotedIdentifiers() throws SQLException { + return false; + } + + + @Override + public boolean storesMixedCaseQuotedIdentifiers() throws SQLException { + return false; + } + + + @Override + public String getIdentifierQuoteString() throws SQLException { + return " "; + } + + + @Override + public String getSQLKeywords() throws SQLException { + return prismInterfaceClient.getSqlKeywords( getConnection().getNetworkTimeout() ); + } + + + @Override + public String getNumericFunctions() throws SQLException { + return prismInterfaceClient.getSqlNumericFunctions( getConnection().getNetworkTimeout() ); + } + + + @Override + public String getStringFunctions() throws SQLException { + return prismInterfaceClient.getSqlStringFunctions( getConnection().getNetworkTimeout() ); + } + + + @Override + public String getSystemFunctions() throws SQLException { + return prismInterfaceClient.getSqlSystemFunctions( getConnection().getNetworkTimeout() ); + } + + + @Override + public String getTimeDateFunctions() throws SQLException { + return prismInterfaceClient.getSqlTimeDateFunctions( getConnection().getNetworkTimeout() ); + } + + + @Override + public String getSearchStringEscape() throws SQLException { + return "\\"; + } + + + @Override + public String getExtraNameCharacters() throws SQLException { + return ""; + } + + + @Override + public boolean supportsAlterTableWithAddColumn() throws SQLException { + return false; + } + + + @Override + public boolean supportsAlterTableWithDropColumn() throws SQLException { + return false; + } + + + @Override + public boolean supportsColumnAliasing() throws SQLException { + return true; + } + + + @Override + public boolean nullPlusNonNullIsNull() throws SQLException { + return true; + } + + + @Override + public boolean supportsConvert() throws SQLException { + // This is independent of the conversion used for set or get Object. This is related to the CONVERT keyword in JDBC + return false; + } + + + @Override + public boolean supportsConvert( int fromType, int toType ) throws SQLException { + return false; + } + + + @Override + public boolean supportsTableCorrelationNames() throws SQLException { + return false; + } + + + @Override + public boolean supportsDifferentTableCorrelationNames() throws SQLException { + return false; + } + + + @Override + public boolean supportsExpressionsInOrderBy() throws SQLException { + return true; + } + + + @Override + public boolean supportsOrderByUnrelated() throws SQLException { + return true; + } + + + @Override + public boolean supportsGroupBy() throws SQLException { + return true; + } + + + @Override + public boolean supportsGroupByUnrelated() throws SQLException { + return true; + } + + + @Override + public boolean supportsGroupByBeyondSelect() throws SQLException { + return true; + } + + + @Override + public boolean supportsLikeEscapeClause() throws SQLException { + return true; + } + + + @Override + public boolean supportsMultipleResultSets() throws SQLException { + return false; + } + + + @Override + public boolean supportsMultipleTransactions() throws SQLException { + return false; + } + + + @Override + public boolean supportsNonNullableColumns() throws SQLException { + return true; + } + + + @Override + public boolean supportsMinimumSQLGrammar() throws SQLException { + return true; + } + + + @Override + public boolean supportsCoreSQLGrammar() throws SQLException { + return true; + } + + + @Override + public boolean supportsExtendedSQLGrammar() throws SQLException { + return true; + } + + + @Override + public boolean supportsANSI92EntryLevelSQL() throws SQLException { + return true; + } + + + @Override + public boolean supportsANSI92IntermediateSQL() throws SQLException { + return true; + } + + + @Override + public boolean supportsANSI92FullSQL() throws SQLException { + return true; + } + + + @Override + public boolean supportsIntegrityEnhancementFacility() throws SQLException { + return false; + } + + + @Override + public boolean supportsOuterJoins() throws SQLException { + return true; + } + + + @Override + public boolean supportsFullOuterJoins() throws SQLException { + return true; + } + + + @Override + public boolean supportsLimitedOuterJoins() throws SQLException { + return true; + } + + + @Override + public String getSchemaTerm() throws SQLException { + return "namespace"; + } + + + @Override + public String getProcedureTerm() throws SQLException { + // Stored procedures not supported... + return "procedure"; + } + + + @Override + public String getCatalogTerm() throws SQLException { + return ""; + } + + + @Override + public boolean isCatalogAtStart() throws SQLException { + return false; + } + + + @Override + public String getCatalogSeparator() throws SQLException { + return ""; + } + + + @Override + public boolean supportsSchemasInDataManipulation() throws SQLException { + return true; + } + + + @Override + public boolean supportsSchemasInProcedureCalls() throws SQLException { + return true; + } + + + @Override + public boolean supportsSchemasInTableDefinitions() throws SQLException { + return true; + } + + + @Override + public boolean supportsSchemasInIndexDefinitions() throws SQLException { + // Index definition not supported by polypheny! + return true; + } + + + @Override + public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException { + // Privilege definition not supported by polypheny! + return true; + } + + + @Override + public boolean supportsCatalogsInDataManipulation() throws SQLException { + return false; + } + + + @Override + public boolean supportsCatalogsInProcedureCalls() throws SQLException { + return false; + } + + + @Override + public boolean supportsCatalogsInTableDefinitions() throws SQLException { + return false; + } + + + @Override + public boolean supportsCatalogsInIndexDefinitions() throws SQLException { + // Index definition not supported by polypheny! + return false; + } + + + @Override + public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException { + // Privilege Definition not supported by polypheny! + return false; + } + + + @Override + public boolean supportsPositionedDelete() throws SQLException { + return false; + } + + + @Override + public boolean supportsPositionedUpdate() throws SQLException { + return false; + } + + + @Override + public boolean supportsSelectForUpdate() throws SQLException { + return false; + } + + + @Override + public boolean supportsStoredProcedures() throws SQLException { + return false; + } + + + @Override + public boolean supportsSubqueriesInComparisons() throws SQLException { + return true; + } + + + @Override + public boolean supportsSubqueriesInExists() throws SQLException { + return true; + } + + + @Override + public boolean supportsSubqueriesInIns() throws SQLException { + return true; + } + + + @Override + public boolean supportsSubqueriesInQuantifieds() throws SQLException { + return false; + } + + + @Override + public boolean supportsCorrelatedSubqueries() throws SQLException { + return true; + } + + + @Override + public boolean supportsUnion() throws SQLException { + return true; + } + + + @Override + public boolean supportsUnionAll() throws SQLException { + return true; + } + + + @Override + public boolean supportsOpenCursorsAcrossCommit() throws SQLException { + return false; + } + + + @Override + public boolean supportsOpenCursorsAcrossRollback() throws SQLException { + return false; + } + + + @Override + public boolean supportsOpenStatementsAcrossCommit() throws SQLException { + return false; + } + + + @Override + public boolean supportsOpenStatementsAcrossRollback() throws SQLException { + return false; + } + + + @Override + public int getMaxBinaryLiteralLength() throws SQLException { + return 0; + } + + + @Override + public int getMaxCharLiteralLength() throws SQLException { + return 0; + } + + + @Override + public int getMaxColumnNameLength() throws SQLException { + return 0; + } + + + @Override + public int getMaxColumnsInGroupBy() throws SQLException { + return 0; + } + + + @Override + public int getMaxColumnsInIndex() throws SQLException { + return 0; + } + + + @Override + public int getMaxColumnsInOrderBy() throws SQLException { + return 0; + } + + + @Override + public int getMaxColumnsInSelect() throws SQLException { + return 0; + } + + + @Override + public int getMaxColumnsInTable() throws SQLException { + return 0; + } + + + @Override + public int getMaxConnections() throws SQLException { + // GRPC supports 100 concurrent streams by default. Beyond this queuing occurs. + return 0; + } + + + @Override + public int getMaxCursorNameLength() throws SQLException { + return 0; + } + + + @Override + public int getMaxIndexLength() throws SQLException { + return 0; + } + + + @Override + public int getMaxSchemaNameLength() throws SQLException { + return 0; + } + + + @Override + public int getMaxProcedureNameLength() throws SQLException { + return 0; + } + + + @Override + public int getMaxCatalogNameLength() throws SQLException { + return 0; + } + + + @Override + public int getMaxRowSize() throws SQLException { + return 0; + } + + + @Override + public boolean doesMaxRowSizeIncludeBlobs() throws SQLException { + return false; + } + + + @Override + public int getMaxStatementLength() throws SQLException { + return 0; + } + + + @Override + public int getMaxStatements() throws SQLException { + return 0; + } + + + @Override + public int getMaxTableNameLength() throws SQLException { + return 0; + } + + + @Override + public int getMaxTablesInSelect() throws SQLException { + return 0; + } + + + @Override + public int getMaxUserNameLength() throws SQLException { + return 0; + } + + + @Override + public int getDefaultTransactionIsolation() throws SQLException { + return PropertyUtils.getDEFAULT_TRANSACTION_ISOLATION(); + } + + + @Override + public boolean supportsTransactions() throws SQLException { + return true; + } + + + @Override + public boolean supportsTransactionIsolationLevel( int level ) throws SQLException { + return PropertyUtils.isValidIsolationLevel( level ); + } + + + @Override + public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException { + return true; + } + + + @Override + public boolean supportsDataManipulationTransactionsOnly() throws SQLException { + return true; + } + + + @Override + public boolean dataDefinitionCausesTransactionCommit() throws SQLException { + return true; + } + + + @Override + public boolean dataDefinitionIgnoredInTransactions() throws SQLException { + return false; + } + + + @Override + public ResultSet getProcedures( String catalog, String schemaPattern, String procedureNamePattern ) throws SQLException { + throwNotSupportedIfStrict(); + List procedures = prismInterfaceClient.searchProcedures( "sql", procedureNamePattern, getConnection().getNetworkTimeout() ); + return MetaResultSetBuilder.buildFromProcedures( procedures ); + } + + + @Override + public ResultSet getProcedureColumns( String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern ) throws SQLException { + // For now an empty result set is returned + // For the production version the getProcedures api call should be used to retrieve the procedures meta + // which will contain all info required to build this result set. + throwNotSupportedIfStrict(); + return MetaResultSetBuilder.buildFromProcedureColumns(); + } + + + @Override + public ResultSet getTables( String catalog, String schemaPattern, String tableNamePattern, String[] types ) throws SQLException { + // catalog ignored because polypheny doesn't have those + + List
tables = getTableStream( schemaPattern, tableNamePattern ).collect( Collectors.toList() ); + if ( types == null ) { + return MetaResultSetBuilder.buildFromTables( tables ); + } + HashSet tableTypes = new HashSet<>( Arrays.asList( types ) ); + tables = tables.stream().filter( t -> tableTypes.contains( t.getTableType() ) ).collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromTables( tables ); + + } + + + @Override + public ResultSet getSchemas() throws SQLException { + return getSchemas( null, null ); + } + + + @Override + public ResultSet getCatalogs() throws SQLException { + String defaultNamespace = prismInterfaceClient.getDefaultNamespace( getConnection().getNetworkTimeout() ); + return MetaResultSetBuilder.buildFromDatabases( defaultNamespace ); + } + + + @Override + public ResultSet getTableTypes() throws SQLException { + List tableTypes = prismInterfaceClient.getTablesTypes( getConnection().getNetworkTimeout() ); + return MetaResultSetBuilder.buildFromTableTypes( tableTypes ); + } + + + @Override + public ResultSet getColumns( String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern ) throws SQLException { + List columns = prismInterfaceClient.searchNamespaces( schemaPattern, MetaUtils.NamespaceTypes.RELATIONAL.name(), getConnection().getNetworkTimeout() ) + .stream() + .map( n -> { + try { + return getMatchingColumns( n, tableNamePattern, columnNamePattern ); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } + } ) + .flatMap( List::stream ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromColumns( columns ); + } + + + private List getMatchingColumns( Namespace namespace, String tableNamePattern, String columnNamePattern ) throws SQLException { + Stream columnStream = prismInterfaceClient.searchEntities( namespace.getNamespaceName(), tableNamePattern, getConnection().getNetworkTimeout() ).stream() + .filter( Entity::hasTable ) + .map( Entity::getTable ) + .map( Table::getColumnsList ) + .flatMap( List::stream ); + if ( columnNamePattern == null ) { + return columnStream.collect( Collectors.toList() ); + } + return columnStream + .filter( c -> columnMatchesPattern( namespace, c, columnNamePattern ) ) + .collect( Collectors.toList() ); + } + + + private boolean columnMatchesPattern( Namespace namespace, Column column, String columnNamePattern ) { + if ( namespace.getIsCaseSensitive() ) { + return column.getColumnName().matches( MetaUtils.convertToRegex( columnNamePattern ) ); + } + return Pattern.compile( MetaUtils.convertToRegex( columnNamePattern ), Pattern.CASE_INSENSITIVE ) + .matcher( column.getColumnName().toLowerCase() ).find(); + } + + + @Override + public ResultSet getColumnPrivileges( String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern ) throws SQLException { + /* This feature is currently not supported by Polypheny thus the following workaround is used: + * 1) get all columns using getColumns() + * 2) the MetaResultSetBuilder constructs a full rights result set from the response of the getColumns() api call. + * + * For proper implementation a dedicated api call should be used the result of witch should be passed to the MetaResultSet builder. + */ + throwNotSupportedIfStrict(); + List columns = prismInterfaceClient.searchNamespaces( schemaPattern, MetaUtils.NamespaceTypes.RELATIONAL.name(), getConnection().getNetworkTimeout() ) + .stream() + .map( n -> { + try { + return getMatchingColumns( n, tableNamePattern, columnNamePattern ); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } + } ) + .flatMap( List::stream ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromColumnPrivileges( columns, getUserName() ); + } + + + @Override + public ResultSet getTablePrivileges( String catalog, String schemaPattern, String tableNamePattern ) throws SQLException { + /* This feature is currently not supported by polypheny thus the following workaround is used: + * 1) get all tables using getColumns() + * 2) the MetaResultSetBuilder constructs a full rights result set from the response of the searchNamespaces() and searchEntities() api calls. + */ + throwNotSupportedIfStrict(); + List
tables = getTableStream( schemaPattern, tableNamePattern ).collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromTablePrivileges( tables, getUserName() ); + } + + + @Override + public ResultSet getBestRowIdentifier( String catalog, String schema, String table, int scope, boolean nullable ) throws SQLException { + List columns = getTableStream( schema, table ) + .filter( Table::hasPrimaryKey ) + .map( Table::getPrimaryKey ) + .map( PrimaryKey::getColumnsList ) + .flatMap( List::stream ) + .filter( c -> !c.getIsNullable() || nullable ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.fromBestRowIdentifiers( columns ); + } + + + @Override + public ResultSet getVersionColumns( String catalog, String schema, String table ) throws SQLException { + List columns = getTableStream( schema, table ) + .map( Table::getColumnsList ) + .flatMap( List::stream ) + .filter( c -> c.getColumnType() == Column.ColumnType.VERSION ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromVersionColumns( columns ); + } + + + @Override + public ResultSet getPrimaryKeys( String catalog, String schema, String table ) throws SQLException { + List primaryKeys = getTableStream( schema, table ) + .filter( Table::hasPrimaryKey ) + .map( Table::getPrimaryKey ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromPrimaryKeys( primaryKeys ); + } + + + @Override + public ResultSet getImportedKeys( String catalog, String schema, String table ) throws SQLException { + List foreignKeys = getTableStream( schema, table ) + .map( Table::getForeignKeysList ) + .flatMap( List::stream ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromImportedKeys( foreignKeys ); + } + + + @Override + public ResultSet getExportedKeys( String catalog, String schema, String table ) throws SQLException { + List exportedKeys = getTableStream( schema, table ) + .map( Table::getExportedKeysList ) + .flatMap( List::stream ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromExportedKeys( exportedKeys ); + } + + + private Stream
getTableStream( String namespace, String table ) throws SQLException { + return prismInterfaceClient.searchNamespaces( namespace, MetaUtils.NamespaceTypes.RELATIONAL.name(), getConnection().getNetworkTimeout() ) + .stream() + .map( Namespace::getNamespaceName ) + .map( name -> { + try { + return prismInterfaceClient.searchEntities( name, table, getConnection().getNetworkTimeout() ); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } + } ) + .flatMap( List::stream ) + .filter( Entity::hasTable ) + .map( Entity::getTable ); + } + + + @Override + public ResultSet getCrossReference( + String parentCatalog, String parentSchema, String parentTable, + String foreignCatalog, String foreignSchema, String foreignTable ) throws SQLException { + HashMap parentTables = getTableStream( parentSchema, parentTable ) + .collect( Collectors.toMap( Table::getTableName, t -> t, ( prev, next ) -> next, HashMap::new ) ); + List foreignKeys = getTableStream( foreignSchema, foreignTable ) + .map( Table::getForeignKeysList ) + .flatMap( List::stream ) + .filter( f -> referencesTable( f, parentTables.get( f.getReferencedTableName() ) ) ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromCrossReference( foreignKeys ); + } + + + private boolean referencesTable( ForeignKey foreignKey, Table table ) { + if ( table == null ) { + return false; + } + if ( !foreignKey.getReferencedTableName().equals( table.getTableName() ) ) { + return false; + } + if ( !foreignKey.getReferencedNamespaceName().equals( table.getNamespaceName() ) ) { + return false; + } + return true; + } + + + @Override + public ResultSet getTypeInfo() throws SQLException { + List types = prismInterfaceClient.getTypes( getConnection().getNetworkTimeout() ); + return MetaResultSetBuilder.buildFromTypes( types ); + } + + + @Override + public ResultSet getIndexInfo( String catalog, String schema, String table, boolean unique, boolean approximate ) throws SQLException { + List indexes = getTableStream( schema, table ) + .map( Table::getIndexesList ) + .flatMap( List::stream ) + .filter( i -> i.getUnique() || !unique ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromIndexes( indexes ); + } + + + @Override + public boolean supportsResultSetType( int type ) throws SQLException { + return PropertyUtils.isValidResultSetType( type ); + } + + + @Override + public boolean supportsResultSetConcurrency( int type, int concurrency ) throws SQLException { + return PropertyUtils.isValidResultSetConcurrency( type, concurrency ); + } + + + @Override + public boolean ownUpdatesAreVisible( int type ) throws SQLException { + return false; + } + + + @Override + public boolean ownDeletesAreVisible( int type ) throws SQLException { + return false; + } + + + @Override + public boolean ownInsertsAreVisible( int type ) throws SQLException { + return false; + } + + + @Override + public boolean othersUpdatesAreVisible( int type ) throws SQLException { + return false; + } + + + @Override + public boolean othersDeletesAreVisible( int type ) throws SQLException { + return false; + } + + + @Override + public boolean othersInsertsAreVisible( int type ) throws SQLException { + return false; + } + + + @Override + public boolean updatesAreDetected( int i ) throws SQLException { + return false; + } + + + @Override + public boolean deletesAreDetected( int i ) throws SQLException { + return false; + } + + + @Override + public boolean insertsAreDetected( int i ) throws SQLException { + return false; + } + + + @Override + public boolean supportsBatchUpdates() throws SQLException { + return true; + } + + + @Override + public ResultSet getUDTs( String catalog, String schemaPattern, String typeNamePattern, int[] types ) throws SQLException { + throwNotSupportedIfStrict(); + throw new SQLFeatureNotSupportedException( "This feature is not yet supported." ); + //List userDefinedTypes = prismInterfaceClient.getUserDefinedTypes( getConnection().getNetworkTimeout() ); + //return MetaResultSetBuilder.buildFromUserDefinedTypes( userDefinedTypes ); + } + + + @Override + public Connection getConnection() throws SQLException { + return polyConnection; + } + + + @Override + public boolean supportsSavepoints() throws SQLException { + return false; + } + + + @Override + public boolean supportsNamedParameters() throws SQLException { + return true; + } + + + @Override + public boolean supportsMultipleOpenResults() throws SQLException { + return false; + } + + + @Override + public boolean supportsGetGeneratedKeys() throws SQLException { + return false; + } + + + @Override + public ResultSet getSuperTypes( String catalog, String schemaPattern, String typeNamePattern ) throws SQLException { + throwNotSupportedIfStrict(); + return MetaResultSetBuilder.buildFromSuperTypes(); + } + + + @Override + public ResultSet getSuperTables( String catalog, String schemaPattern, String tableNamePattern ) throws SQLException { + throwNotSupportedIfStrict(); + return MetaResultSetBuilder.buildFromSuperTables(); + } + + + @Override + public ResultSet getAttributes( String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern ) throws SQLException { + throwNotSupportedIfStrict(); + // Now creates an empty result set. In the future the getUserDefinedTypes api call should be used to retrieve + // the UDT meta which will contain all data necessary to build this result set. + return MetaResultSetBuilder.buildFromAttributes(); + } + + + @Override + public boolean supportsResultSetHoldability( int resultSetHoldability ) throws SQLException { + return PropertyUtils.isValidResultSetHoldability( resultSetHoldability ); + } + + + @Override + public int getResultSetHoldability() throws SQLException { + return PropertyUtils.getDEFAULT_RESULTSET_HOLDABILITY(); + } + + + @Override + public int getDatabaseMajorVersion() throws SQLException { + if ( databaseMajorVersion == NO_VERSION ) { + fetchDbmsVersionInfo(); + } + return databaseMinorVersion; + } + + + @Override + public int getDatabaseMinorVersion() throws SQLException { + if ( databaseMajorVersion == NO_VERSION ) { + fetchDbmsVersionInfo(); + } + return databaseMinorVersion; + } + + + @Override + public int getJDBCMajorVersion() throws SQLException { + return DriverProperties.getDRIVER_MAJOR_VERSION(); + } + + + @Override + public int getJDBCMinorVersion() throws SQLException { + return DriverProperties.getDRIVER_MINOR_VERSION(); + } + + + @Override + public int getSQLStateType() throws SQLException { + return sqlStateSQL; + } + + + @Override + public boolean locatorsUpdateCopy() throws SQLException { + return true; + } + + + @Override + public boolean supportsStatementPooling() throws SQLException { + return false; + } + + + @Override + public RowIdLifetime getRowIdLifetime() throws SQLException { + return RowIdLifetime.ROWID_UNSUPPORTED; + } + + + @Override + public ResultSet getSchemas( String catalog, String schemaPattern ) throws SQLException { + // TODO: Always search all namespace types? + List namespaces = prismInterfaceClient.searchNamespaces( schemaPattern, null, getConnection().getNetworkTimeout() ); + return MetaResultSetBuilder.buildFromNamespaces( namespaces ); + } + + + @Override + public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException { + return false; + } + + + @Override + public boolean autoCommitFailureClosesAllResultSets() throws SQLException { + return false; + } + + + @Override + public ResultSet getClientInfoProperties() throws SQLException { + throw new SQLFeatureNotSupportedException( "This feature is not yet supported." ); + //List metas = prismInterfaceClient.getClientInfoPropertyMetas( getConnection().getNetworkTimeout() ); + //return MetaResultSetBuilder.buildFromClientInfoPropertyMetas( metas ); + } + + + @Override + public ResultSet getFunctions( String catalog, String schemaPattern, String functionNamePattern ) throws SQLException { + List functions = prismInterfaceClient.searchFunctions( "sql", "SYSTEM", getConnection().getNetworkTimeout() ) + .stream() + .filter( f -> f.getName().matches( MetaUtils.convertToRegex( functionNamePattern ) ) ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.fromFunctions( functions ); + } + + + @Override + public ResultSet getFunctionColumns( String s, String s1, String s2, String s3 ) throws SQLException { + throwNotSupportedIfStrict(); + return MetaResultSetBuilder.buildFromFunctionColumns(); + } + + + @Override + public ResultSet getPseudoColumns( String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern ) throws SQLException { + List columns = prismInterfaceClient.searchNamespaces( schemaPattern, MetaUtils.NamespaceTypes.RELATIONAL.name(), getConnection().getNetworkTimeout() ) + .stream() + .map( n -> { + try { + return getMatchingColumns( n, tableNamePattern, columnNamePattern ); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } + } ) + .flatMap( List::stream ) + .filter( Column::getIsHidden ) + .collect( Collectors.toList() ); + return MetaResultSetBuilder.buildFromPseudoColumns( columns ); + } + + + @Override + public boolean generatedKeyAlwaysReturned() throws SQLException { + return false; + } + + + @Override + public T unwrap( Class aClass ) throws SQLException { + if ( aClass.isInstance( this ) ) { + return aClass.cast( this ); + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.WRAPPER_INCORRECT_TYPE, "Not a wrapper for " + aClass ); + } + + + @Override + public boolean isWrapperFor( Class aClass ) { + return aClass.isInstance( this ); + + } + +} diff --git a/src/main/java/org/polypheny/jdbc/meta/PolyphenyParameterMeta.java b/src/main/java/org/polypheny/jdbc/meta/PolyphenyParameterMeta.java new file mode 100644 index 00000000..4d829529 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/PolyphenyParameterMeta.java @@ -0,0 +1,60 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.sql.ParameterMetaData; +import lombok.Getter; +import org.polypheny.jdbc.utils.TypedValueUtils; +import org.polypheny.prism.ParameterMeta; + +public class PolyphenyParameterMeta { + + /* As all values are unsigned in polypheny we hardcoded this. */ + private static final boolean SIGNEDNESS = false; + private static final int PARAMETER_MODE = ParameterMetaData.parameterModeIn; + private static final int NULLABILITY = ParameterMetaData.parameterNullableUnknown; + + @Getter + private String parameterClassName; + @Getter + private int parameterMode; + @Getter + private int parameterType; + @Getter + private String parameterTypeName; + @Getter + private int precision; + @Getter + private int scale; + @Getter + private int isNullable; + @Getter + private boolean isSigned; + + + public PolyphenyParameterMeta( ParameterMeta parameterMeta ) { + this.parameterClassName = null; + this.parameterMode = PARAMETER_MODE; + this.parameterType = TypedValueUtils.getJdbcTypeFromPolyTypeName( parameterMeta.getTypeName() ); + this.parameterTypeName = parameterMeta.getTypeName(); + this.precision = parameterMeta.getPrecision(); + this.scale = parameterMeta.getScale(); + this.isNullable = NULLABILITY; + this.isSigned = SIGNEDNESS; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/meta/PolyphenyParameterMetaData.java b/src/main/java/org/polypheny/jdbc/meta/PolyphenyParameterMetaData.java new file mode 100644 index 00000000..2c26f61b --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/PolyphenyParameterMetaData.java @@ -0,0 +1,125 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.sql.ParameterMetaData; +import java.sql.SQLException; +import java.util.List; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; +import org.polypheny.prism.PreparedStatementSignature; + +public class PolyphenyParameterMetaData implements ParameterMetaData { + + private int parameterCount; + private List parameterMetas; + + + public PolyphenyParameterMetaData( PreparedStatementSignature statementSignature ) { + this.parameterCount = statementSignature.getParameterMetasCount(); + this.parameterMetas = MetaUtils.buildParameterMetas( statementSignature.getParameterMetasList() ); + } + + + private void throwIfOutOfBounds( int param ) throws SQLException { + /* jdbc indexes start with 1 */ + param--; + if ( param < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.PARAMETER_NOT_EXISTS, "Index out of Bounds." ); + } + if ( param >= parameterCount ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.PARAMETER_NOT_EXISTS, "Index out of Bounds." ); + } + } + + + private PolyphenyParameterMeta getMeta( int param ) throws SQLException { + throwIfOutOfBounds( param ); + return parameterMetas.get( param ); + } + + + @Override + public int getParameterCount() throws SQLException { + return parameterCount; + } + + + @Override + public int isNullable( int param ) throws SQLException { + return getMeta( param ).getIsNullable(); + } + + + @Override + public boolean isSigned( int param ) throws SQLException { + return getMeta( param ).isSigned(); + } + + + @Override + public int getPrecision( int param ) throws SQLException { + return getMeta( param ).getPrecision(); + } + + + @Override + public int getScale( int param ) throws SQLException { + return getMeta( param ).getScale(); + } + + + @Override + public int getParameterType( int param ) throws SQLException { + return getMeta( param ).getParameterType(); + } + + + @Override + public String getParameterTypeName( int param ) throws SQLException { + return getMeta( param ).getParameterTypeName(); + } + + + @Override + public String getParameterClassName( int param ) throws SQLException { + return getMeta( param ).getParameterClassName(); + } + + + @Override + public int getParameterMode( int param ) throws SQLException { + return getMeta( param ).getParameterMode(); + } + + + @Override + public T unwrap( Class aClass ) throws SQLException { + if ( aClass.isInstance( this ) ) { + return aClass.cast( this ); + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.WRAPPER_INCORRECT_TYPE, "Not a wrapper for " + aClass ); + } + + + @Override + public boolean isWrapperFor( Class aClass ) { + return aClass.isInstance( this ); + + } + +} diff --git a/src/main/java/org/polypheny/jdbc/meta/PolyphenyResultSetMetadata.java b/src/main/java/org/polypheny/jdbc/meta/PolyphenyResultSetMetadata.java new file mode 100644 index 00000000..3021f23c --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/meta/PolyphenyResultSetMetadata.java @@ -0,0 +1,198 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.meta; + +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; + +public class PolyphenyResultSetMetadata implements ResultSetMetaData { + + private List columnMetas; + private Map columnIndexes; + + + public PolyphenyResultSetMetadata( List columnMetas ) { + this.columnMetas = columnMetas; + this.columnIndexes = this.columnMetas.stream().collect( Collectors.toMap( PolyphenyColumnMeta::getColumnLabel, c -> c.getOrdinal() + 1, ( m, n ) -> n ) ); + + } + + + private PolyphenyColumnMeta getMeta( int columnIndex ) throws SQLException { + try { + return columnMetas.get( columnIndex - 1 ); + } catch ( IndexOutOfBoundsException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Column index out of bounds", e ); + } + } + + + public int getColumnIndexFromLabel( String columnLabel ) throws SQLException { + Integer columnIndex = columnIndexes.get( columnLabel ); + if ( columnIndex == null ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.COLUMN_NOT_EXISTS, "Invalid column label: " + columnLabel ); + } + return columnIndex; + } + + + @Override + public int getColumnCount() throws SQLException { + return columnMetas.size(); + } + + + @Override + public boolean isAutoIncrement( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).isAutoIncrement(); + } + + + @Override + public boolean isCaseSensitive( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).isCaseSensitive(); + } + + + @Override + public boolean isSearchable( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).isSearchable(); + } + + + @Override + public boolean isCurrency( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).isCurrency(); + } + + + @Override + public int isNullable( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getNullable(); + } + + + @Override + public boolean isSigned( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).isSigned(); + } + + + @Override + public int getColumnDisplaySize( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getDisplaySize(); + } + + + @Override + public String getColumnLabel( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getColumnLabel(); + } + + + @Override + public String getColumnName( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getColumnName(); + } + + + @Override + public String getSchemaName( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getNamespace(); + } + + + @Override + public int getPrecision( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getPrecision(); + } + + + @Override + public int getScale( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getScale(); + } + + + @Override + public String getTableName( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getTableName(); + } + + + @Override + public String getCatalogName( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getCatalogName(); + } + + + @Override + public int getColumnType( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getSqlType(); + } + + + @Override + public String getColumnTypeName( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getPolyphenyFieldTypeName(); + } + + + @Override + public boolean isReadOnly( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).isReadOnly(); + } + + + @Override + public boolean isWritable( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).isWritable(); + } + + + @Override + public boolean isDefinitelyWritable( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).isDefinitelyWritable(); + } + + + @Override + public String getColumnClassName( int columnIndex ) throws SQLException { + return getMeta( columnIndex ).getColumnClassName(); + } + + + @Override + public T unwrap( Class aClass ) throws SQLException { + if ( aClass.isInstance( this ) ) { + return aClass.cast( this ); + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.WRAPPER_INCORRECT_TYPE, "Not a wrapper for " + aClass ); + } + + + @Override + public boolean isWrapperFor( Class aClass ) { + return aClass.isInstance( this ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/multimodel/DocumentResult.java b/src/main/java/org/polypheny/jdbc/multimodel/DocumentResult.java new file mode 100644 index 00000000..f6f9d03e --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/multimodel/DocumentResult.java @@ -0,0 +1,116 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.multimodel; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import org.polypheny.jdbc.PolyConnection; +import org.polypheny.jdbc.PrismInterfaceClient; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; +import org.polypheny.jdbc.properties.PropertyUtils; +import org.polypheny.jdbc.types.PolyDocument; +import org.polypheny.prism.DocumentFrame; +import org.polypheny.prism.Frame; +import org.polypheny.prism.Frame.ResultCase; + +public class DocumentResult extends Result implements Iterable { + + private final PolyStatement polyStatement; + private final List documents; + private boolean isFullyFetched; + + + public DocumentResult( Frame frame, PolyStatement polyStatement ) { + super( ResultType.DOCUMENT ); + this.polyStatement = polyStatement; + this.isFullyFetched = frame.getIsLast(); + this.documents = new ArrayList<>(); + addDocuments( frame.getDocumentFrame() ); + } + + + private void addDocuments( DocumentFrame documentFrame ) { + documentFrame.getDocumentsList().forEach( d -> documents.add( new PolyDocument( d ) ) ); + } + + + private void fetchMore() throws PrismInterfaceServiceException { + int id = polyStatement.getStatementId(); + int timeout = getPolyphenyConnection().getTimeout(); + Frame frame = getPrismInterfaceClient().fetchResult( id, timeout, PropertyUtils.getDEFAULT_FETCH_SIZE() ); + if ( frame.getResultCase() != ResultCase.DOCUMENT_FRAME ) { + throw new PrismInterfaceServiceException( + PrismInterfaceErrors.RESULT_TYPE_INVALID, + "Statement returned a result of illegal type " + frame.getResultCase() + ); + } + isFullyFetched = frame.getIsLast(); + addDocuments( frame.getDocumentFrame() ); + } + + + private PolyConnection getPolyphenyConnection() { + return polyStatement.getConnection(); + } + + + private PrismInterfaceClient getPrismInterfaceClient() { + return getPolyphenyConnection().getPrismInterfaceClient(); + } + + + @Override + public Iterator iterator() { + return new DocumentIterator(); + } + + + class DocumentIterator implements Iterator { + + int index = -1; + + + @Override + public boolean hasNext() { + if ( index + 1 >= documents.size() ) { + if ( isFullyFetched ) { + return false; + } + try { + fetchMore(); + } catch ( PrismInterfaceServiceException e ) { + throw new RuntimeException( e ); + } + } + return index + 1 < documents.size(); + } + + + @Override + public PolyDocument next() { + if ( !hasNext() ) { + throw new NoSuchElementException( "There are no more documents" ); + } + return documents.get( ++index ); + } + + } + +} diff --git a/src/main/java/org/polypheny/jdbc/multimodel/PolyRow.java b/src/main/java/org/polypheny/jdbc/multimodel/PolyRow.java new file mode 100644 index 00000000..5fda1edd --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/multimodel/PolyRow.java @@ -0,0 +1,60 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.multimodel; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import org.polypheny.jdbc.types.TypedValue; +import org.polypheny.prism.Row; + +public class PolyRow { + + List values; + + + public PolyRow( List value ) { + this.values = new ArrayList<>( value ); + } + + + public PolyRow( TypedValue... value ) { + this( Arrays.asList( value ) ); + } + + + public int getColumnCount() { + return values.size(); + } + + + public TypedValue getValue( int columnIndex ) { + return values.get( columnIndex ); + } + + + public static PolyRow of( E... values ) { + return new PolyRow( values ); + } + + + public static PolyRow fromProto( Row protoRow ) { + return new PolyRow( protoRow.getValuesList().stream().map( TypedValue::new ).collect( Collectors.toList() ) ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/multimodel/PolyStatement.java b/src/main/java/org/polypheny/jdbc/multimodel/PolyStatement.java new file mode 100644 index 00000000..debf74a3 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/multimodel/PolyStatement.java @@ -0,0 +1,97 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.multimodel; + +import lombok.Getter; +import org.polypheny.jdbc.PolyConnection; +import org.polypheny.jdbc.PrismInterfaceClient; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; +import org.polypheny.jdbc.utils.CallbackQueue; +import org.polypheny.prism.Frame; +import org.polypheny.prism.Response; +import org.polypheny.prism.StatementResponse; + +public class PolyStatement { + + private static final long SCALAR_NOT_SET = -1; + private static final int NO_STATEMENT_ID = -1; + + @Getter + private PolyConnection connection; + @Getter + private int statementId; + + + private void resetStatement() { + statementId = NO_STATEMENT_ID; + } + + + private PrismInterfaceClient getPrismInterfaceClient() { + return connection.getPrismInterfaceClient(); + } + + + private Result getResultFromFrame( Frame frame ) throws PrismInterfaceServiceException { + switch ( frame.getResultCase() ) { + case RELATIONAL_FRAME: + return new RelationalResult( frame, this ); + case DOCUMENT_FRAME: + return new DocumentResult( frame, this ); + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.RESULT_TYPE_INVALID, "Statement produced unknown result type" ); + } + + + public PolyStatement( PolyConnection polyConnection ) { + this.connection = polyConnection; + } + + + public Result execute( String namespaceName, String languageName, String statement ) throws PrismInterfaceServiceException { + resetStatement(); + CallbackQueue callback = new CallbackQueue<>( Response::getStatementResponse ); + int timeout = connection.getTimeout(); + getPrismInterfaceClient().executeUnparameterizedStatement( + namespaceName, + languageName, + statement, + callback, + timeout + ); + while ( true ) { + StatementResponse response = callback.takeNext(); + if ( statementId == NO_STATEMENT_ID ) { + statementId = response.getStatementId(); + } + if ( !response.hasResult() ) { + continue; + } + try { + callback.awaitCompletion(); + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Awaiting completion of api call failed.", e ); + } + if ( !response.getResult().hasFrame() ) { + return new ScalarResult( response.getResult().getScalar() ); + } + return getResultFromFrame( response.getResult().getFrame() ); + } + } + +} diff --git a/src/main/java/org/polypheny/jdbc/multimodel/RelationalResult.java b/src/main/java/org/polypheny/jdbc/multimodel/RelationalResult.java new file mode 100644 index 00000000..4198591e --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/multimodel/RelationalResult.java @@ -0,0 +1,115 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.multimodel; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import org.polypheny.jdbc.PolyConnection; +import org.polypheny.jdbc.PrismInterfaceClient; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; +import org.polypheny.jdbc.properties.PropertyUtils; +import org.polypheny.prism.Frame; +import org.polypheny.prism.Frame.ResultCase; +import org.polypheny.prism.RelationalFrame; + +public class RelationalResult extends Result implements Iterable { + + private final PolyStatement polyStatement; + private final List rows; + private boolean isFullyFetched; + + + public RelationalResult( Frame frame, PolyStatement polyStatement ) throws PrismInterfaceServiceException { + super( ResultType.RELATIONAL ); + this.polyStatement = polyStatement; + this.isFullyFetched = frame.getIsLast(); + this.rows = new ArrayList<>(); + addRows( frame.getRelationalFrame() ); + } + + + private void addRows( RelationalFrame relationalFrame ) { + relationalFrame.getRowsList().forEach( d -> rows.add( PolyRow.fromProto( d ) ) ); + } + + + private void fetchMore() throws PrismInterfaceServiceException { + int id = polyStatement.getStatementId(); + int timeout = getPolyphenyConnection().getTimeout(); + Frame frame = getPrismInterfaceClient().fetchResult( id, timeout, PropertyUtils.getDEFAULT_FETCH_SIZE() ); + if ( frame.getResultCase() != ResultCase.DOCUMENT_FRAME ) { + throw new PrismInterfaceServiceException( + PrismInterfaceErrors.RESULT_TYPE_INVALID, + "Statement returned a result of illegal type " + frame.getResultCase() + ); + } + isFullyFetched = frame.getIsLast(); + addRows( frame.getRelationalFrame() ); + } + + + private PolyConnection getPolyphenyConnection() { + return polyStatement.getConnection(); + } + + + private PrismInterfaceClient getPrismInterfaceClient() { + return getPolyphenyConnection().getPrismInterfaceClient(); + } + + + @Override + public Iterator iterator() { + return new RelationalResult.RowIterator(); + } + + + class RowIterator implements Iterator { + + int index = -1; + + + @Override + public boolean hasNext() { + if ( index + 1 >= rows.size() ) { + if ( isFullyFetched ) { + return false; + } + try { + fetchMore(); + } catch ( PrismInterfaceServiceException e ) { + throw new RuntimeException( e ); + } + } + return index + 1 < rows.size(); + } + + + @Override + public PolyRow next() { + if ( !hasNext() ) { + throw new NoSuchElementException( "There are no more documents" ); + } + return rows.get( ++index ); + } + + } + +} diff --git a/src/main/java/org/polypheny/jdbc/multimodel/Result.java b/src/main/java/org/polypheny/jdbc/multimodel/Result.java new file mode 100644 index 00000000..eb9db433 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/multimodel/Result.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.multimodel; + +import lombok.Getter; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; + +@Getter +public abstract class Result { + + private final ResultType resultType; + + + public Result( ResultType resultType ) { + this.resultType = resultType; + } + + + public T unwrap( Class aClass ) throws PrismInterfaceServiceException { + if ( aClass.isInstance( this ) ) { + return aClass.cast( this ); + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.WRAPPER_INCORRECT_TYPE, "Not a wrapper for " + aClass ); + } + + + public enum ResultType { + RELATIONAL, + DOCUMENT, + SCALAR + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcResultSet.java b/src/main/java/org/polypheny/jdbc/multimodel/ScalarResult.java similarity index 65% rename from src/main/java/org/polypheny/jdbc/PolyphenyJdbcResultSet.java rename to src/main/java/org/polypheny/jdbc/multimodel/ScalarResult.java index e0bbf369..9ab92d25 100644 --- a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcResultSet.java +++ b/src/main/java/org/polypheny/jdbc/multimodel/ScalarResult.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2020 The Polypheny Project + * Copyright 2019-2024 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,9 +14,19 @@ * limitations under the License. */ -package org.polypheny.jdbc; +package org.polypheny.jdbc.multimodel; +import lombok.Getter; -public interface PolyphenyJdbcResultSet extends java.sql.ResultSet { +public class ScalarResult extends Result { + + @Getter + long scalar; + + + public ScalarResult( long scalar ) { + super( ResultType.SCALAR ); + this.scalar = scalar; + } } diff --git a/src/main/java/org/polypheny/jdbc/properties/DriverProperties.java b/src/main/java/org/polypheny/jdbc/properties/DriverProperties.java new file mode 100644 index 00000000..ba92716b --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/properties/DriverProperties.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.properties; + +import java.util.TimeZone; +import lombok.Getter; +import org.polypheny.jdbc.utils.VersionUtil; + +public class DriverProperties { + + @Getter + private static final String DRIVER_NAME = "JDBC driver for Polypheny"; + @Getter + private static final int DRIVER_MAJOR_VERSION = VersionUtil.MAJOR; + @Getter + private static final int DRIVER_MINOR_VERSION = VersionUtil.MINOR; + @Getter + private static final String DRIVER_VERSION_QUALIFIER = VersionUtil.QUALIFIER; + @Getter + private static final String DRIVER_VERSION = VersionUtil.VERSION_STRING; + @Getter + private static final boolean JDBC_COMPLIANT = true; + @Getter + private static final String DRIVER_URL_SCHEMA = "jdbc:polypheny:"; + @Getter + private static final TimeZone DEFAULT_TIMEZONE = TimeZone.getDefault(); + @Getter + // This feature is for testing purposes only! Always set to false before release! + private static final boolean BACKDOOR_ENABLED = false; + @Getter + private static final String BACKDOR_STRING = "dasKannKeinEmptyString"; + +} diff --git a/src/main/java/org/polypheny/jdbc/properties/PolyphenyConnectionProperties.java b/src/main/java/org/polypheny/jdbc/properties/PolyphenyConnectionProperties.java new file mode 100644 index 00000000..fa17e561 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/properties/PolyphenyConnectionProperties.java @@ -0,0 +1,210 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.properties; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Calendar; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.TimeZone; +import lombok.Getter; +import org.polypheny.jdbc.ConnectionString; +import org.polypheny.jdbc.PrismInterfaceClient; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; + +public class PolyphenyConnectionProperties { + + public PolyphenyConnectionProperties( ConnectionString connectionString, PrismInterfaceClient prismInterfaceClient ) throws SQLException { + this.prismInterfaceClient = prismInterfaceClient; + this.isAutoCommit = PropertyUtils.isDEFAULT_AUTOCOMMIT(); + this.isReadOnly = PropertyUtils.isDEFAULT_READ_ONLY(); + this.resultSetHoldability = PropertyUtils.getDEFAULT_RESULTSET_HOLDABILITY(); + this.networkTimeout = PropertyUtils.getDEFAULT_NETWORK_TIMEOUT(); + this.transactionIsolation = PropertyUtils.getDEFAULT_TRANSACTION_ISOLATION(); + this.calendar = Calendar.getInstance( DriverProperties.getDEFAULT_TIMEZONE(), Locale.ROOT ); + this.catalogName = null; + this.isStrict = true; + + Map parameters = connectionString.getParameters(); + Optional.ofNullable( parameters.get( PropertyUtils.getUSERNAME_KEY() ) ).ifPresent( p -> this.username = p ); + Optional.ofNullable( parameters.get( PropertyUtils.getPASSWORD_KEY() ) ).ifPresent( p -> this.password = p ); + Optional.ofNullable( parameters.get( PropertyUtils.getAUTOCOMMIT_KEY() ) ).ifPresent( p -> this.isAutoCommit = Boolean.parseBoolean( p ) ); + Optional.ofNullable( parameters.get( PropertyUtils.getREAD_ONLY_KEY() ) ).ifPresent( p -> this.isReadOnly = Boolean.parseBoolean( p ) ); + Optional.ofNullable( parameters.get( PropertyUtils.getNETWORK_TIMEOUT_KEY() ) ).ifPresent( p -> this.networkTimeout = Integer.parseInt( p ) ); + Optional.ofNullable( parameters.get( PropertyUtils.getNAMESPACE_KEY() ) ).ifPresent( p -> this.namespaceName = p ); + Optional.ofNullable( parameters.get( PropertyUtils.getTIMEZONE_KEY() ) ).ifPresent( p -> this.calendar = Calendar.getInstance( TimeZone.getTimeZone( p ), Locale.ROOT ) ); + Optional.ofNullable( parameters.get( PropertyUtils.getSTRICT_MODE_KEY() ) ).ifPresent( p -> this.isStrict = Boolean.parseBoolean( p ) ); + + if ( parameters.containsKey( PropertyUtils.getRESULT_SET_HOLDABILITY_KEY() ) ) { + int resultSetHoldability = parseResultSetHoldability( parameters.get( PropertyUtils.getRESULT_SET_HOLDABILITY_KEY() ) ); + if ( !PropertyUtils.isValidResultSetHoldability( resultSetHoldability ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Result set holdability not supported:" + resultSetHoldability ); + } + this.resultSetHoldability = resultSetHoldability; + } + if ( parameters.containsKey( PropertyUtils.getTRANSACTION_ISOLATION_KEY() ) ) { + int transactionIsolation = parseTransactionIsolation( parameters.get( PropertyUtils.getTRANSACTION_ISOLATION_KEY() ) ); + if ( !PropertyUtils.isValidIsolationLevel( transactionIsolation ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Transaction isolation level not supported: " + transactionIsolation ); + } + this.transactionIsolation = transactionIsolation; + } + } + + + private static int parseTransactionIsolation( String string ) throws SQLException { + switch ( string ) { + case "COMMITTED": + return Connection.TRANSACTION_READ_COMMITTED; + case "DIRTY": + return Connection.TRANSACTION_READ_UNCOMMITTED; + case "SERIALIZABLE": + return Connection.TRANSACTION_SERIALIZABLE; + case "REPEATABLE_READ": + return Connection.TRANSACTION_REPEATABLE_READ; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Invalid value for transaction isolation: " + string ); + } + + + private int parseResultSetHoldability( String string ) throws SQLException { + switch ( string ) { + case "HOLD": + return ResultSet.HOLD_CURSORS_OVER_COMMIT; + case "CLOSE": + return ResultSet.CLOSE_CURSORS_AT_COMMIT; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Invalid value for result set holdability: " + string ); + } + + + @Getter + private PrismInterfaceClient prismInterfaceClient; + @Getter + private String username; + @Getter + private String password; + @Getter + private boolean isAutoCommit; + @Getter + private boolean isReadOnly; + @Getter + private int resultSetHoldability; + @Getter + private int networkTimeout; + @Getter + private int transactionIsolation; + @Getter + // not transmitted to server + private String catalogName; + @Getter + private String namespaceName; + @Getter + private Calendar calendar; + @Getter + private boolean isStrict; + + + public void setAutoCommit( boolean isAutoCommit ) throws PrismInterfaceServiceException { + this.isAutoCommit = isAutoCommit; + sync(); + } + + + public void setReadOnly( boolean isReadOnly ) throws PrismInterfaceServiceException { + this.isReadOnly = isReadOnly; + sync(); + } + + + public void setResultSetHoldability( int resultSetHoldability ) throws SQLException { + if ( !PropertyUtils.isValidResultSetHoldability( resultSetHoldability ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Invalid value for result set holdability" ); + } + this.resultSetHoldability = resultSetHoldability; + // not transmitted to server -> no sync() + } + + + public void setNetworkTimeout( int networkTimeout ) throws PrismInterfaceServiceException { + this.networkTimeout = networkTimeout; + sync(); + } + + + public void setTransactionIsolation( int transactionIsolation ) throws SQLException { + if ( !PropertyUtils.isValidIsolationLevel( transactionIsolation ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Invalid value for transaction isolation" ); + } + this.transactionIsolation = transactionIsolation; + sync(); + } + + + public void setCatalogName( String catalogName ) { + this.catalogName = catalogName; + // not transmitted to server -> no sync() + } + + + public void setNamespaceName( String namespaceName ) throws PrismInterfaceServiceException { + this.namespaceName = namespaceName; + sync(); + } + + + private void sync() throws PrismInterfaceServiceException { + prismInterfaceClient.setConnectionProperties( this, getNetworkTimeout() ); + } + + + public PolyphenyStatementProperties toStatementProperties() throws SQLException { + return toStatementProperties( + PropertyUtils.getDEFAULT_RESULTSET_TYPE(), + PropertyUtils.getDEFAULT_RESULTSET_CONCURRENCY() + ); + } + + + public PolyphenyStatementProperties toStatementProperties( int resultSetType, int resultSetConcurrency ) throws SQLException { + return toStatementProperties( resultSetType, resultSetConcurrency, resultSetHoldability ); + } + + + public PolyphenyStatementProperties toStatementProperties( int resultSetType, int resultSetConcurrency, int resultSetHoldability ) throws SQLException { + PolyphenyStatementProperties properties = new PolyphenyStatementProperties(); + properties.setCalendar( calendar ); + properties.setPrismInterfaceClient( prismInterfaceClient ); + properties.setQueryTimeoutSeconds( PropertyUtils.getDEFAULT_QUERY_TIMEOUT_SECONDS() ); + properties.setResultSetType( resultSetType ); + properties.setResultSetConcurrency( resultSetConcurrency ); + properties.setResultSetHoldability( resultSetHoldability ); + properties.setFetchSize( PropertyUtils.getDEFAULT_FETCH_SIZE() ); + properties.setFetchDirection( PropertyUtils.getDEFAULT_FETCH_DIRECTION() ); + properties.setMaxFieldSize( PropertyUtils.getDEFAULT_MAX_FIELD_SIZE() ); + properties.setLargeMaxRows( PropertyUtils.getDEFAULT_LARGE_MAX_ROWS() ); + properties.setDoesEscapeProcessing( PropertyUtils.isDEFAULT_DOING_ESCAPE_PROCESSING() ); + properties.setIsPoolable( PropertyUtils.isDEFAULT_STATEMENT_POOLABLE() ); + properties.setCloseOnCompletion( false ); + return properties; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/properties/PolyphenyResultSetProperties.java b/src/main/java/org/polypheny/jdbc/properties/PolyphenyResultSetProperties.java new file mode 100644 index 00000000..efd12744 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/properties/PolyphenyResultSetProperties.java @@ -0,0 +1,89 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.properties; + +import java.sql.ResultSet; +import java.util.Calendar; +import java.util.Locale; +import java.util.TimeZone; +import lombok.Getter; +import lombok.Setter; + +public class PolyphenyResultSetProperties { + + @Getter + @Setter + private int resultSetType; + @Getter + @Setter + private int resultSetConcurrency; + @Getter + @Setter + private int resultSetHoldability; + @Getter + @Setter + private int fetchDirection; + @Getter + private int statementFetchSize; + private int resultSetFetchSize; + @Getter + @Setter + private int maxFieldSize; + @Getter + @Setter + private long largeMaxRows; + + @Getter + @Setter + private Calendar calendar; + + + public boolean isReadOnly() { + return resultSetConcurrency == ResultSet.CONCUR_READ_ONLY; + } + + + public static PolyphenyResultSetProperties forMetaResultSet() { + PolyphenyResultSetProperties properties = new PolyphenyResultSetProperties(); + properties.setResultSetType( ResultSet.TYPE_SCROLL_INSENSITIVE ); + properties.setResultSetConcurrency( ResultSet.CONCUR_READ_ONLY ); + properties.setResultSetHoldability( ResultSet.CLOSE_CURSORS_AT_COMMIT ); + properties.setFetchDirection( ResultSet.FETCH_FORWARD ); + properties.setStatementFetchSize( 0 ); + properties.setMaxFieldSize( 0 ); + properties.setLargeMaxRows( 0 ); + properties.setCalendar( Calendar.getInstance( TimeZone.getDefault(), Locale.ROOT ) ); + return properties; + } + + + public void setStatementFetchSize( int fetchSize ) { + statementFetchSize = fetchSize; + resultSetFetchSize = fetchSize; + } + + + public void setFetchSize( int fetchSize ) { + resultSetFetchSize = fetchSize; + } + + + public int getFetchSize() { + return resultSetFetchSize; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/properties/PolyphenyStatementProperties.java b/src/main/java/org/polypheny/jdbc/properties/PolyphenyStatementProperties.java new file mode 100644 index 00000000..d9bcc92f --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/properties/PolyphenyStatementProperties.java @@ -0,0 +1,168 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.properties; + +import java.sql.SQLException; +import java.util.Calendar; +import lombok.Getter; +import lombok.Setter; +import org.polypheny.jdbc.PolyphenyStatement; +import org.polypheny.jdbc.PrismInterfaceClient; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; + +public class PolyphenyStatementProperties { + + private static final int UNSET_INT = -1; + + @Setter + PrismInterfaceClient prismInterfaceClient; + PolyphenyStatement polyphenyStatement; + @Getter + private int queryTimeoutSeconds; + @Getter + private int resultSetType = UNSET_INT; + @Getter + private int resultSetConcurrency = UNSET_INT; + @Getter + private int resultSetHoldability = UNSET_INT; + @Getter + private int fetchSize; + @Getter + private int fetchDirection; + @Getter + private int maxFieldSize; + @Getter + private long largeMaxRows; + @Getter + private boolean doesEscapeProcessing; + @Getter + private boolean isPoolable; + @Getter + @Setter + private Calendar calendar; + @Getter + @Setter + private boolean isCloseOnCompletion; + + + public void setPolyphenyStatement( PolyphenyStatement polyphenyStatement ) throws SQLException { + if ( this.polyphenyStatement != null ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Can't change polyphenyStatement" + polyphenyStatement ); + } + this.polyphenyStatement = polyphenyStatement; + } + + + public void setQueryTimeoutSeconds( int queryTimeoutSeconds ) throws SQLException { + if ( queryTimeoutSeconds < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for max" ); + } + this.queryTimeoutSeconds = queryTimeoutSeconds; + } + + + public void setResultSetType( int resultSetType ) throws SQLException { + if ( this.resultSetType != UNSET_INT ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Can't change result set type" ); + } + if ( !PropertyUtils.isValidResultSetType( resultSetType ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for result set type" ); + } + this.resultSetType = resultSetType; + } + + + public void setResultSetConcurrency( int resultSetConcurrency ) throws SQLException { + if ( this.resultSetConcurrency != UNSET_INT ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Can't change result set type" ); + } + if ( !PropertyUtils.isValidResultSetConcurrency( resultSetConcurrency ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for result set concurrency" ); + } + this.resultSetConcurrency = resultSetConcurrency; + } + + + public void setResultSetHoldability( int resultSetHoldability ) throws SQLException { + if ( this.resultSetHoldability != UNSET_INT ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Can't change result set type" ); + } + if ( !PropertyUtils.isValidResultSetHoldability( resultSetHoldability ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for result set concurrency" ); + } + this.resultSetHoldability = resultSetHoldability; + // not transmitted to server -> no sync() + } + + + public void setFetchSize( int fetchSize ) throws SQLException { + if ( fetchSize < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for fetch size" ); + } + this.fetchSize = fetchSize; + } + + + public void setFetchDirection( int fetchDirection ) throws SQLException { + if ( PropertyUtils.isInvalidFetchDirection( fetchDirection ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for fetch direction" ); + } + this.fetchDirection = fetchDirection; + } + + + public void setMaxFieldSize( int maxFieldSize ) throws SQLException { + if ( maxFieldSize < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.STREAM_ERROR, "Illegal argument for max field size" ); + } + this.maxFieldSize = maxFieldSize; + } + + + public void setLargeMaxRows( long largeMaxRows ) throws SQLException { + if ( largeMaxRows < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for large max rows" ); + } + this.largeMaxRows = largeMaxRows; + } + + + public void setDoesEscapeProcessing( boolean doesEscapeProcessing ) throws SQLException { + this.doesEscapeProcessing = doesEscapeProcessing; + } + + + public void setIsPoolable( boolean isPoolable ) throws SQLException { + this.isPoolable = isPoolable; + } + + + public PolyphenyResultSetProperties toResultSetProperties() { + PolyphenyResultSetProperties properties = new PolyphenyResultSetProperties(); + properties.setResultSetType( resultSetType ); + properties.setResultSetConcurrency( resultSetConcurrency ); + properties.setResultSetHoldability( resultSetHoldability ); + properties.setFetchDirection( fetchDirection ); + properties.setStatementFetchSize( fetchSize ); + properties.setMaxFieldSize( maxFieldSize ); + properties.setLargeMaxRows( largeMaxRows ); + properties.setCalendar( calendar ); + return properties; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/properties/PropertyUtils.java b/src/main/java/org/polypheny/jdbc/properties/PropertyUtils.java new file mode 100644 index 00000000..457c62ba --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/properties/PropertyUtils.java @@ -0,0 +1,251 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.properties; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import lombok.Getter; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; + +public class PropertyUtils { + + // Default values for JDBC properties. + @Getter + private static final int DEFAULT_TRANSACTION_ISOLATION = Connection.TRANSACTION_READ_COMMITTED; + @Getter + private static final int DEFAULT_NETWORK_TIMEOUT = 0; + @Getter + private static final int DEFAULT_QUERY_TIMEOUT_SECONDS = 0; + @Getter + private static final int DEFAULT_FETCH_SIZE = 100; + @Getter + private static final int DEFAULT_FETCH_DIRECTION = ResultSet.FETCH_FORWARD; + @Getter + private static final int DEFAULT_RESULTSET_TYPE = ResultSet.TYPE_FORWARD_ONLY; + @Getter + private static final int DEFAULT_RESULTSET_CONCURRENCY = ResultSet.CONCUR_READ_ONLY; + @Getter + private static final int DEFAULT_MAX_FIELD_SIZE = 0; + @Getter + private static final long DEFAULT_LARGE_MAX_ROWS = 0; + @Getter + private static final boolean DEFAULT_DOING_ESCAPE_PROCESSING = true; + @Getter + // Pooling not supported. Default still needed for various jdbc methods. + private static final boolean DEFAULT_STATEMENT_POOLABLE = false; + @Getter + // Pooling not supported. Default still needed for various jdbc methods. + private static final boolean DEFAULT_PREPARED_STATEMENT_POOLABLE = false; + @Getter + // Pooling not supported. Default still needed for various jdbc methods. + private static final boolean DEFAULT_CALLABLE_STATEMENT_POOLABLE = false; + @Getter + private static final boolean DEFAULT_AUTOCOMMIT = true; + @Getter + private static final boolean DEFAULT_READ_ONLY = false; + @Getter + private static final int DEFAULT_RESULTSET_HOLDABILITY = ResultSet.CLOSE_CURSORS_AT_COMMIT; + @Getter + private static final String DEFAULT_HOST = "localhost"; + @Getter + private static final int DEFAULT_PORT = 20590; + @Getter + private static final String SQL_LANGUAGE_NAME = "sql"; + // Keys for properties + @Getter + private static final String USERNAME_KEY = "user"; + @Getter + @java.lang.SuppressWarnings( + "squid:S2068" + // Credentials should not be hard-coded: 'password' detected + // Justification: "password" is here the key to set the password in the connection parameters. + ) + private static final String PASSWORD_KEY = "password"; + @Getter + private static final String NAMESPACE_KEY = "namespace"; + @Getter + private static final String AUTOCOMMIT_KEY = "autocommit"; + @Getter + private static final String READ_ONLY_KEY = "readonly"; + @Getter + private static final String RESULT_SET_HOLDABILITY_KEY = "holdability"; + @Getter + private static final String NETWORK_TIMEOUT_KEY = "nwtimeout"; + @Getter + private static final String TRANSACTION_ISOLATION_KEY = "isolation"; + @Getter + private static final String TIMEZONE_KEY = "timezone"; + @Getter + private static final String STRICT_MODE_KEY = "strict"; + + + public static String getHoldabilityName( int resultSetHoldability ) throws PrismInterfaceServiceException { + switch ( resultSetHoldability ) { + case ResultSet.CLOSE_CURSORS_AT_COMMIT: + return "CLOSE"; + case ResultSet.HOLD_CURSORS_OVER_COMMIT: + return "HOLD"; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "The passed integer value does not match a result holdability." ); + } + + + public static String getTransactionIsolationName( int transactionIsolation ) throws PrismInterfaceServiceException { + switch ( transactionIsolation ) { + case Connection.TRANSACTION_READ_UNCOMMITTED: + return "DIRTY"; + case Connection.TRANSACTION_READ_COMMITTED: + return "COMMITTED"; + case Connection.TRANSACTION_SERIALIZABLE: + return "SERIALIZABLE"; + case Connection.TRANSACTION_REPEATABLE_READ: + return "REPEATABLE_READ"; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "The passed integer value does not match a transaction isolation level." ); + } + + + // Methods for input checking + private static Map> SUPPORTED_CONCURRENCIES = new HashMap<>(); + + + static { + SUPPORTED_CONCURRENCIES.put( ResultSet.TYPE_FORWARD_ONLY, Collections.singletonList( ResultSet.CONCUR_READ_ONLY ) ); + SUPPORTED_CONCURRENCIES.put( ResultSet.TYPE_SCROLL_INSENSITIVE, Collections.singletonList( ResultSet.CONCUR_READ_ONLY ) ); + } + + + private static final Set RESULT_SET_TYPES = new HashSet<>(); + + + static { + RESULT_SET_TYPES.add( ResultSet.TYPE_FORWARD_ONLY ); + RESULT_SET_TYPES.add( ResultSet.TYPE_SCROLL_INSENSITIVE ); + } + + + private static final Set RESULT_SET_CONCURRENCIES = new HashSet<>(); + + + static { + RESULT_SET_CONCURRENCIES.add( ResultSet.CONCUR_READ_ONLY ); + RESULT_SET_CONCURRENCIES.add( ResultSet.CONCUR_UPDATABLE ); + } + + + private static final Set RESULT_SET_HOLDABILITIES = new HashSet<>(); + + + static { + RESULT_SET_HOLDABILITIES.add( ResultSet.CLOSE_CURSORS_AT_COMMIT ); + } + + + private static final Set TRANSACTION_ISOLATION_LEVELS = new HashSet<>(); + + + static { + TRANSACTION_ISOLATION_LEVELS.add( Connection.TRANSACTION_READ_COMMITTED ); + } + + + private static final Set AUTO_GENERATED_KEYS = new HashSet<>(); + + + static { + AUTO_GENERATED_KEYS.add( Statement.RETURN_GENERATED_KEYS ); + AUTO_GENERATED_KEYS.add( Statement.NO_GENERATED_KEYS ); + } + + + private static final Set FETCH_DIRECTIONS = new HashSet<>(); + + + static { + FETCH_DIRECTIONS.add( ResultSet.FETCH_FORWARD ); + } + + + public static boolean isValidResultSetConcurrency( int resultSetType, int resultSetConcurrency ) { + List supportedConcurrencies = SUPPORTED_CONCURRENCIES.get( resultSetType ); + if ( supportedConcurrencies == null ) { + return false; + } + return supportedConcurrencies.contains( resultSetConcurrency ); + } + + + public static boolean isValidResultSetType( int resultSetType ) { + return RESULT_SET_TYPES.contains( resultSetType ); + } + + + public static boolean isValidResultSetConcurrency( int resultSetConcurrency ) { + return RESULT_SET_CONCURRENCIES.contains( resultSetConcurrency ); + } + + + public static boolean isValidResultSetHoldability( int resultSetHoldability ) { + return RESULT_SET_HOLDABILITIES.contains( resultSetHoldability ); + } + + + public static boolean isValidIsolationLevel( int transactionIsolationLevel ) { + return TRANSACTION_ISOLATION_LEVELS.contains( transactionIsolationLevel ); + } + + + public static void throwIfInvalid( int resultSetType, int resultSetConcurrency, int resultSetHoldability ) throws SQLException { + throwIfInvalid( resultSetType, resultSetConcurrency ); + if ( !isValidResultSetHoldability( resultSetHoldability ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for result set holdability." ); + } + } + + + public static void throwIfInvalid( int resultSetType, int resultSetConcurrency ) throws SQLException { + if ( !isValidResultSetType( resultSetType ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for result set type." ); + } + if ( !isValidResultSetConcurrency( resultSetConcurrency ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal value for result set concurrency." ); + } + if ( !isValidResultSetConcurrency( resultSetType, resultSetConcurrency ) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPTION_NOT_SUPPORTED, "The specified concurrency is not supported for the specified result set type" ); + } + } + + + public static boolean isValidAutogeneratedKeys( int autogeneratedKeys ) { + return AUTO_GENERATED_KEYS.contains( autogeneratedKeys ); + } + + + public static boolean isInvalidFetchDirection( int fetchDirection ) { + return !FETCH_DIRECTIONS.contains( fetchDirection ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/transport/PlainTransport.java b/src/main/java/org/polypheny/jdbc/transport/PlainTransport.java new file mode 100644 index 00000000..00b7218b --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/transport/PlainTransport.java @@ -0,0 +1,136 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.transport; + +import java.io.EOFException; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.StandardSocketOptions; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.channels.SocketChannel; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +public class PlainTransport implements Transport { + + private final static String VERSION = "plain-v1@polypheny.com"; + + protected final SocketChannel con; + private final Lock writeLock = new ReentrantLock(); + + + public PlainTransport( String host, int port ) throws IOException { + con = SocketChannel.open( new InetSocketAddress( host, port ) ); + con.setOption( StandardSocketOptions.TCP_NODELAY, true ); + exchangeVersion(); + } + + + private void exchangeVersion() throws IOException { + ByteBuffer length = ByteBuffer.allocate( 1 ); + readEntireBuffer( length ); + byte len = length.get(); + if ( len <= 0 ) { + throw new IOException( "Invalid version length" ); + } + ByteBuffer response = ByteBuffer.allocate( 1 + len ); // Leading size + response.put( len ); + readEntireBuffer( response ); + byte[] remoteVersion = new byte[len - 1]; // trailing newline + response.position( 1 ); + response.get( remoteVersion ); + if ( !Arrays.equals( VERSION.getBytes( StandardCharsets.US_ASCII ), remoteVersion ) ) { + String s = StandardCharsets.US_ASCII.decode( ByteBuffer.wrap( remoteVersion ) ).toString(); + if ( s.matches( "\\A[a-z0-9@.-]+\\z" ) ) { + throw new IOException( "Unsupported version: '" + s + "' expected '" + VERSION + "'" ); + } else { + throw new IOException( "Unsupported version" ); + } + } + if ( response.get() != (byte) 0x0a ) { + throw new IOException( "Invalid version message" ); + } + response.rewind(); + writeEntireBuffer( response ); + } + + + protected void writeEntireBuffer( ByteBuffer bb ) throws IOException { + writeLock.lock(); + try { + while ( bb.remaining() > 0 ) { + int i = con.write( bb ); + if ( i == -1 ) { + throw new EOFException(); + } + } + } finally { + writeLock.unlock(); + } + } + + + @Override + public void sendMessage( byte[] message ) throws IOException { + ByteBuffer bb = ByteBuffer.allocate( 8 + message.length ); + bb.order( ByteOrder.LITTLE_ENDIAN ); + bb.putLong( message.length ); + bb.put( message ); + bb.rewind(); + writeEntireBuffer( bb ); + } + + + protected void readEntireBuffer( ByteBuffer bb ) throws IOException { + while ( bb.remaining() > 0 ) { + int i = con.read( bb ); + if ( i == -1 ) { + throw new EOFException(); + } + } + bb.rewind(); + } + + + @Override + public byte[] receiveMessage() throws IOException { + ByteBuffer bb = ByteBuffer.allocate( 8 ); + readEntireBuffer( bb ); + bb.order( ByteOrder.LITTLE_ENDIAN ); // TODO Big endian like other network protocols? + long length = bb.getLong(); + if ( length == 0 ) { + throw new IOException( "Invalid message length" ); + } + bb = ByteBuffer.allocate( (int) length ); + readEntireBuffer( bb ); + return bb.array(); + } + + + @Override + public void close() { + try { + con.close(); + } catch ( IOException ignore ) { + // ignore + } + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcDatabaseMetaData.java b/src/main/java/org/polypheny/jdbc/transport/Transport.java similarity index 67% rename from src/main/java/org/polypheny/jdbc/PolyphenyJdbcDatabaseMetaData.java rename to src/main/java/org/polypheny/jdbc/transport/Transport.java index be5599bd..997805b5 100644 --- a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcDatabaseMetaData.java +++ b/src/main/java/org/polypheny/jdbc/transport/Transport.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2020 The Polypheny Project + * Copyright 2019-2024 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,9 +14,16 @@ * limitations under the License. */ -package org.polypheny.jdbc; +package org.polypheny.jdbc.transport; +import java.io.IOException; -public interface PolyphenyJdbcDatabaseMetaData extends java.sql.DatabaseMetaData { +public interface Transport { + + void sendMessage( byte[] message ) throws IOException; + + byte[] receiveMessage() throws IOException; + + void close(); } diff --git a/src/main/java/org/polypheny/jdbc/types/ArrayResultSet.java b/src/main/java/org/polypheny/jdbc/types/ArrayResultSet.java new file mode 100644 index 00000000..cf9d33af --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/ArrayResultSet.java @@ -0,0 +1,1395 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Map; +import org.apache.commons.lang3.ArrayUtils; +import org.polypheny.jdbc.meta.PolyphenyResultSetMetadata; + +public class ArrayResultSet implements ResultSet { + + ArrayList> data; + ArrayList current; + T lastRead; + PolyphenyResultSetMetadata metadata; + + int currentIndex = -1; + boolean isClosed = false; + + + public ArrayResultSet( ArrayList> data, PolyphenyResultSetMetadata metadata ) { + this.data = data; + this.metadata = metadata; + this.current = null; + this.lastRead = null; + } + + + private void throwIfClosed() throws SQLException { + if ( isClosed ) { + throw new SQLException( "This operation cannot be applied to a closed result set." ); + } + } + + + private Object accessValue( int column ) throws SQLException { + try { + lastRead = current.get( column - 1 ); + return lastRead; + } catch ( IndexOutOfBoundsException e ) { + throw new SQLException( "Column index out of bounds." ); + } + } + + + @Override + public boolean next() throws SQLException { + currentIndex++; + if ( currentIndex == data.size() ) { + current = null; + return false; + } + current = data.get( currentIndex ); + return true; + } + + + @Override + public void close() throws SQLException { + } + + + @Override + public boolean wasNull() throws SQLException { + throwIfClosed(); + return current == null; + } + + + @Override + public String getString( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof String ) { + return (String) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public boolean getBoolean( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Boolean ) { + return (Boolean) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public byte getByte( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Byte ) { + return (Byte) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public short getShort( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Short ) { + return (Short) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public int getInt( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Integer ) { + return (Integer) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public long getLong( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Long ) { + return (Long) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public float getFloat( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Float ) { + return (Float) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public double getDouble( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Double ) { + return (Double) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public BigDecimal getBigDecimal( int columnIndex, int scale ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof BigDecimal ) { + return (BigDecimal) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public byte[] getBytes( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Byte[] ) { + return ArrayUtils.toPrimitive( (Byte[]) lastRead ); + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public Date getDate( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Date ) { + return (Date) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public Time getTime( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Time ) { + return (Time) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public Timestamp getTimestamp( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Timestamp ) { + return (Timestamp) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public InputStream getAsciiStream( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof InputStream ) { + return (InputStream) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public InputStream getUnicodeStream( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof InputStream ) { + return (InputStream) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public InputStream getBinaryStream( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof InputStream ) { + return (InputStream) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public String getString( String columnLabel ) throws SQLException { + return getString( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public boolean getBoolean( String columnLabel ) throws SQLException { + return getBoolean( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public byte getByte( String columnLabel ) throws SQLException { + return getByte( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public short getShort( String columnLabel ) throws SQLException { + return getShort( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public int getInt( String columnLabel ) throws SQLException { + return getInt( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public long getLong( String columnLabel ) throws SQLException { + return getLong( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public float getFloat( String columnLabel ) throws SQLException { + return getFloat( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public double getDouble( String columnLabel ) throws SQLException { + return getDouble( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public BigDecimal getBigDecimal( String columnLabel, int scale ) throws SQLException { + return getBigDecimal( metadata.getColumnIndexFromLabel( columnLabel ), scale ); + } + + + @Override + public byte[] getBytes( String columnLabel ) throws SQLException { + return getBytes( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Date getDate( String columnLabel ) throws SQLException { + return getDate( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Time getTime( String columnLabel ) throws SQLException { + return getTime( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public Timestamp getTimestamp( String columnLabel ) throws SQLException { + return getTimestamp( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public InputStream getAsciiStream( String columnLabel ) throws SQLException { + return getAsciiStream( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public InputStream getUnicodeStream( String columnLabel ) throws SQLException { + return getUnicodeStream( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public InputStream getBinaryStream( String columnLabel ) throws SQLException { + return getBinaryStream( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public SQLWarning getWarnings() throws SQLException { + throw new SQLFeatureNotSupportedException( "Feature not supported" ); + } + + + @Override + public void clearWarnings() throws SQLException { + } + + + @Override + public String getCursorName() throws SQLException { + throw new SQLFeatureNotSupportedException( "Feature not supported" ); + } + + + @Override + public ResultSetMetaData getMetaData() throws SQLException { + return metadata; + } + + + @Override + public Object getObject( int columnIndex ) throws SQLException { + throwIfClosed(); + return accessValue( columnIndex ); + } + + + @Override + public Object getObject( String columnLabel ) throws SQLException { + return getObject( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public int findColumn( String columnLabel ) throws SQLException { + throwIfClosed(); + return metadata.getColumnIndexFromLabel( columnLabel ); + } + + + @Override + public Reader getCharacterStream( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof Reader ) { + return (Reader) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public Reader getCharacterStream( String columnLabel ) throws SQLException { + throwIfClosed(); + if ( accessValue( metadata.getColumnIndexFromLabel( columnLabel ) ) instanceof Reader ) { + return (Reader) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public BigDecimal getBigDecimal( int columnIndex ) throws SQLException { + throwIfClosed(); + if ( accessValue( columnIndex ) instanceof BigDecimal ) { + return (BigDecimal) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public BigDecimal getBigDecimal( String columnLabel ) throws SQLException { + return getBigDecimal( metadata.getColumnIndexFromLabel( columnLabel ) ); + } + + + @Override + public boolean isBeforeFirst() throws SQLException { + return currentIndex == -1; + } + + + @Override + public boolean isAfterLast() throws SQLException { + return currentIndex == data.size(); + } + + + @Override + public boolean isFirst() throws SQLException { + return currentIndex == 0; + } + + + @Override + public boolean isLast() throws SQLException { + return currentIndex == data.size() - 1; + } + + + @Override + public void beforeFirst() throws SQLException { + currentIndex = -1; + current = null; + } + + + @Override + public void afterLast() throws SQLException { + // This is not an off by one error. An index set to the length of an array is positioned after the last element. + currentIndex = data.size(); + current = null; + } + + + @Override + public boolean first() throws SQLException { + currentIndex = 0; + current = data.get( 0 ); + return true; + } + + + @Override + public boolean last() throws SQLException { + currentIndex = data.size() - 1; + current = data.get( data.size() - 1 ); + return true; + } + + + @Override + public int getRow() throws SQLException { + // JDBC starts enumeration at one + return currentIndex + 1; + } + + + @Override + public boolean absolute( int row ) throws SQLException { + row--; + if ( row >= data.size() ) { + currentIndex = data.size(); + current = null; + return false; + } + if ( row == -1 ) { + currentIndex = -1; + current = null; + return false; + } + if ( row < 0 ) { + return absolute( data.size() + row + 1 ); + } + currentIndex = row; + current = data.get( row ); + return true; + } + + + @Override + public boolean relative( int offset ) throws SQLException { + int newCurrent = currentIndex + offset; + if ( newCurrent < 0 ) { + currentIndex = -1; + current = null; + return false; + } + if ( newCurrent >= data.size() ) { + // This is not an off by one error. An index set to the length of an array is positioned after the last element. + currentIndex = data.size(); + current = null; + return false; + } + currentIndex = newCurrent; + current = data.get( currentIndex ); + return true; + } + + + @Override + public boolean previous() throws SQLException { + return relative( -1 ); + } + + + @Override + public void setFetchDirection( int fetchDirection ) throws SQLException { + throwIfClosed(); + if ( fetchDirection != ResultSet.FETCH_FORWARD ) { + throw new SQLException( "Illegal fetch direction for this result set" ); + } + } + + + @Override + public int getFetchDirection() throws SQLException { + return ResultSet.FETCH_FORWARD; + } + + + @Override + public void setFetchSize( int fetchSize ) throws SQLException { + if ( fetchSize < 0 ) { + throw new SQLException( "Illegal value for fetchSize" ); + } + } + + + @Override + public int getFetchSize() throws SQLException { + return 0; + } + + + @Override + public int getType() throws SQLException { + throwIfClosed(); + return ResultSet.TYPE_FORWARD_ONLY; + } + + + @Override + public int getConcurrency() throws SQLException { + throwIfClosed(); + return ResultSet.CONCUR_READ_ONLY; + } + + + @Override + public boolean rowUpdated() throws SQLException { + return false; + } + + + @Override + public boolean rowInserted() throws SQLException { + return false; + } + + + @Override + public boolean rowDeleted() throws SQLException { + return false; + } + + + @Override + public void updateNull( int i ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBoolean( int i, boolean b ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateByte( int i, byte b ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateShort( int i, short i1 ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateInt( int i, int i1 ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateLong( int i, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateFloat( int i, float v ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateDouble( int i, double v ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBigDecimal( int i, BigDecimal bigDecimal ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateString( int i, String s ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBytes( int i, byte[] bytes ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateDate( int i, Date date ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateTime( int i, Time time ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateTimestamp( int i, Timestamp timestamp ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateAsciiStream( int i, InputStream inputStream, int i1 ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBinaryStream( int i, InputStream inputStream, int i1 ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateCharacterStream( int i, Reader reader, int i1 ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateObject( int i, Object o, int i1 ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateObject( int i, Object o ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNull( String s ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBoolean( String s, boolean b ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateByte( String s, byte b ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateShort( String s, short i ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateInt( String s, int i ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateLong( String s, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateFloat( String s, float v ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateDouble( String s, double v ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBigDecimal( String s, BigDecimal bigDecimal ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateString( String s, String s1 ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBytes( String s, byte[] bytes ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateDate( String s, Date date ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateTime( String s, Time time ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateTimestamp( String s, Timestamp timestamp ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateAsciiStream( String s, InputStream inputStream, int i ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBinaryStream( String s, InputStream inputStream, int i ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateCharacterStream( String s, Reader reader, int i ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateObject( String s, Object o, int i ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateObject( String s, Object o ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void insertRow() throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateRow() throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void deleteRow() throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void refreshRow() throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void cancelRowUpdates() throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void moveToInsertRow() throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void moveToCurrentRow() throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Statement getStatement() throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Object getObject( int i, Map> map ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Ref getRef( int i ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Blob getBlob( int i ) throws SQLException { + throwIfClosed(); + if ( accessValue( i ) instanceof Blob ) { + return (Blob) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public Clob getClob( int i ) throws SQLException { + throwIfClosed(); + if ( accessValue( i ) instanceof Clob ) { + return (Clob) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public Array getArray( int i ) throws SQLException { + throwIfClosed(); + if ( accessValue( i ) instanceof Array ) { + return (Array) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public Object getObject( String s, Map> map ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Ref getRef( String s ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Blob getBlob( String s ) throws SQLException { + return getBlob( metadata.getColumnIndexFromLabel( s ) ); + } + + + @Override + public Clob getClob( String s ) throws SQLException { + return getClob( metadata.getColumnIndexFromLabel( s ) ); + } + + + @Override + public Array getArray( String s ) throws SQLException { + return getArray( metadata.getColumnIndexFromLabel( s ) ); + } + + + @Override + public Date getDate( int i, Calendar calendar ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Date getDate( String s, Calendar calendar ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Time getTime( int i, Calendar calendar ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Time getTime( String s, Calendar calendar ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Timestamp getTimestamp( int i, Calendar calendar ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public Timestamp getTimestamp( String s, Calendar calendar ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public URL getURL( int i ) throws SQLException { + throwIfClosed(); + if ( accessValue( i ) instanceof URL ) { + return (URL) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public URL getURL( String s ) throws SQLException { + return getURL( metadata.getColumnIndexFromLabel( s ) ); + } + + + @Override + public void updateRef( int i, Ref ref ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateRef( String s, Ref ref ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBlob( int i, Blob blob ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBlob( String s, Blob blob ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateClob( int i, Clob clob ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateClob( String s, Clob clob ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateArray( int i, Array array ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateArray( String s, Array array ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public RowId getRowId( int i ) throws SQLException { + throwIfClosed(); + if ( accessValue( i ) instanceof RowId ) { + return (RowId) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public RowId getRowId( String s ) throws SQLException { + return getRowId( metadata.getColumnIndexFromLabel( s ) ); + } + + + @Override + public void updateRowId( int i, RowId rowId ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateRowId( String s, RowId rowId ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public int getHoldability() throws SQLException { + throwIfClosed(); + return ResultSet.CLOSE_CURSORS_AT_COMMIT; + } + + + @Override + public boolean isClosed() throws SQLException { + return isClosed; + } + + + @Override + public void updateNString( int i, String s ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNString( String s, String s1 ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNClob( int i, NClob nClob ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNClob( String s, NClob nClob ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public NClob getNClob( int i ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public NClob getNClob( String s ) throws SQLException { + return getNClob( metadata.getColumnIndexFromLabel( s ) ); + } + + + @Override + public SQLXML getSQLXML( int i ) throws SQLException { + throwIfClosed(); + if ( accessValue( i ) instanceof SQLXML ) { + return (SQLXML) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public SQLXML getSQLXML( String s ) throws SQLException { + return getSQLXML( metadata.getColumnIndexFromLabel( s ) ); + } + + + @Override + public void updateSQLXML( int i, SQLXML sqlxml ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateSQLXML( String s, SQLXML sqlxml ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public String getNString( int i ) throws SQLException { + throwIfClosed(); + if ( accessValue( i ) instanceof String ) { + return (String) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public String getNString( String s ) throws SQLException { + return getNString( metadata.getColumnIndexFromLabel( s ) ); + } + + + @Override + public Reader getNCharacterStream( int i ) throws SQLException { + throwIfClosed(); + if ( accessValue( i ) instanceof Reader ) { + return (Reader) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public Reader getNCharacterStream( String s ) throws SQLException { + return getNCharacterStream( metadata.getColumnIndexFromLabel( s ) ); + } + + + @Override + public void updateNCharacterStream( int i, Reader reader, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNCharacterStream( String s, Reader reader, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateAsciiStream( int i, InputStream inputStream, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBinaryStream( int i, InputStream inputStream, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateCharacterStream( int i, Reader reader, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateAsciiStream( String s, InputStream inputStream, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBinaryStream( String s, InputStream inputStream, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateCharacterStream( String s, Reader reader, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBlob( int i, InputStream inputStream, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBlob( String s, InputStream inputStream, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateClob( int i, Reader reader, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateClob( String s, Reader reader, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNClob( int i, Reader reader, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNClob( String s, Reader reader, long l ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNCharacterStream( int i, Reader reader ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNCharacterStream( String s, Reader reader ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateAsciiStream( int i, InputStream inputStream ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBinaryStream( int i, InputStream inputStream ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateCharacterStream( int i, Reader reader ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateAsciiStream( String s, InputStream inputStream ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBinaryStream( String s, InputStream inputStream ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateCharacterStream( String s, Reader reader ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBlob( int i, InputStream inputStream ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateBlob( String s, InputStream inputStream ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateClob( int i, Reader reader ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateClob( String s, Reader reader ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNClob( int i, Reader reader ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public void updateNClob( String s, Reader reader ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Operation not supported" ); + } + + + @Override + public U getObject( int i, Class aClass ) throws SQLException { + throwIfClosed(); + if ( accessValue( i ) != null ) { + return (U) lastRead; + } + throw new SQLException( "Conversion not supported" ); + } + + + @Override + public T getObject( String s, Class aClass ) throws SQLException { + return getObject( metadata.getColumnIndexFromLabel( s ), aClass ); + } + + + @Override + public T unwrap( Class aClass ) throws SQLException { + if ( aClass.isInstance( this ) ) { + return aClass.cast( this ); + } + throw new SQLException( "Not a wrapper for " + aClass ); + } + + + @Override + public boolean isWrapperFor( Class aClass ) { + return aClass.isInstance( this ); + + } + +} diff --git a/src/main/java/org/polypheny/jdbc/types/Convertible.java b/src/main/java/org/polypheny/jdbc/types/Convertible.java new file mode 100644 index 00000000..eb358f98 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/Convertible.java @@ -0,0 +1,142 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Calendar; +import java.util.Map; + +public interface Convertible { + + boolean isNull(); + + + PolyDocument asDocument() throws SQLException; + + PolyInterval asInterval() throws SQLException; + + String asString() throws SQLException; + + + boolean asBoolean() throws SQLException; + + + byte asByte() throws SQLException; + + + short asShort() throws SQLException; + + + int asInt() throws SQLException; + + + long asLong() throws SQLException; + + + float asFloat() throws SQLException; + + + double asDouble() throws SQLException; + + + BigDecimal asBigDecimal() throws SQLException; + + + BigDecimal asBigDecimal( int scale ) throws SQLException; + + + byte[] asBytes() throws SQLException; + + + InputStream asAsciiStream() throws SQLException; + + + InputStream asUnicodeStream() throws SQLException; + + + InputStream asBinaryStream() throws SQLException; + + + Object asObject() throws SQLException; + + + Reader asCharacterStream() throws SQLException; + + + Blob asBlob() throws SQLException; + + + Clob asClob() throws SQLException; + + + Array asArray() throws SQLException; + + + Struct asStruct() throws SQLException; + + Date asDate() throws SQLException; + + Date asDate( Calendar calendar ) throws SQLException; + + Time asTime() throws SQLException; + + Time asTime( Calendar calendar ) throws SQLException; + + Timestamp asTimestamp() throws SQLException; + + Timestamp asTimestamp( Calendar calendar ) throws SQLException; + + Ref asRef() throws SQLException; + + RowId asRowId() throws SQLException; + + URL asUrl() throws SQLException; + + + NClob asNClob() throws SQLException; + + + SQLXML asSQLXML() throws SQLException; + + + String asNString() throws SQLException; + + + Reader asNCharacterStream() throws SQLException; + + Object asObject( Map> map ) throws SQLException; + + Object asObject( Calendar calendar ) throws SQLException; + + T asObject( Class aClass ) throws SQLException; + +} diff --git a/src/main/java/org/polypheny/jdbc/types/PolyArray.java b/src/main/java/org/polypheny/jdbc/types/PolyArray.java new file mode 100644 index 00000000..50f983f4 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/PolyArray.java @@ -0,0 +1,137 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import org.polypheny.jdbc.PolyphenyResultSet; +import org.polypheny.jdbc.meta.PolyphenyColumnMeta; +import org.polypheny.prism.ProtoPolyType; + +public class PolyArray implements Array { + + private final String protoBaseTypeName; + private final Object[] elements; + + + public PolyArray( String protoBaseTypeName, Object[] elements ) { + this.protoBaseTypeName = protoBaseTypeName; + Object[] shiftedElements = new Object[elements.length]; + int endIdx = elements.length; + System.arraycopy( elements, 0, shiftedElements, 0, endIdx ); + this.elements = shiftedElements; + } + + + public PolyArray( String protoBaseTypeName, List values ) throws SQLException { + this.protoBaseTypeName = protoBaseTypeName; + List objects = new ArrayList<>(); + for ( TypedValue v : values ) { + objects.add( v.asObject() ); + } + this.elements = objects.toArray( new Object[0] ); + } + + + private int longToInt( long value ) { + return Math.toIntExact( value ); + } + + + @Override + public String getBaseTypeName() { + return protoBaseTypeName; + } + + + @Override + public int getBaseType() { + return ProtoToJdbcTypeMap.getJdbcTypeFromProto( ProtoPolyType.valueOf( protoBaseTypeName ) ); + } + + + @Override + public Object getArray() { + return elements; + } + + + @Override + public Object getArray( Map> map ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Feature not supported" ); + } + + + @Override + public Object getArray( long index, int count ) { + return Arrays.copyOfRange( elements, longToInt( index - 1 ), count ); + } + + + @Override + public Object getArray( long index, int count, Map> map ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Feature not supported" ); + } + + + @Override + public ResultSet getResultSet() throws SQLException { + return getResultSet( 0, elements.length ); + } + + + @Override + public ResultSet getResultSet( Map> map ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Feature not supported" ); + } + + + @Override + public ResultSet getResultSet( long index, int count ) throws SQLException { + int jdbcBaseType = getBaseType(); + List columnMetas = new ArrayList<>(); + columnMetas.add( PolyphenyColumnMeta.fromSpecification( 0, "INDEX", "ARRAY", Types.INTEGER ) ); + columnMetas.add( PolyphenyColumnMeta.fromSpecification( 1, "VALUE", "ARRAY", jdbcBaseType ) ); + List> rows = new ArrayList<>(); + for ( int i = 1; i < elements.length; i++ ) { + List currentRow = new ArrayList<>(); + currentRow.add( TypedValue.fromInteger( i ) ); + currentRow.add( TypedValue.fromObject( elements[i], jdbcBaseType ) ); + rows.add( currentRow ); + } + return new PolyphenyResultSet( columnMetas, rows ); + } + + + @Override + public ResultSet getResultSet( long index, int count, Map> map ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Feature not supported" ); + } + + + @Override + public void free() { + } + +} diff --git a/src/main/java/org/polypheny/jdbc/types/PolyBlob.java b/src/main/java/org/polypheny/jdbc/types/PolyBlob.java new file mode 100644 index 00000000..802e2f26 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/PolyBlob.java @@ -0,0 +1,180 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.io.OutputStream; +import java.sql.Blob; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Arrays; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; + +public class PolyBlob implements Blob { + + /* + * This array should be replaced with an objet capable of either storing a collection of bytes larger than MAX_INT + * or some kind of streaming mechanism. + */ + byte[] value; + boolean isFreed; + + + public PolyBlob() { + this.isFreed = false; + } + + + public PolyBlob( byte[] bytes ) { + this.isFreed = false; + this.value = bytes; + } + + + private long positionToIndex( long position ) { + return position - 1; + } + + + private long indexToPosition( long index ) { + return index + 1; + } + + + private int longToInt( long value ) { + return Math.toIntExact( value ); + } + + + private void throwIfPositionOutOfBounds( long position ) throws SQLException { + /* jdbc starts enumeration by one */ + throwIfIndexOutOfBounds( positionToIndex( position ) ); + } + + + private void throwIfIndexOutOfBounds( long index ) throws SQLException { + if ( index < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Index out of bounds" ); + } + if ( index >= value.length ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Index out of bounds" ); + } + } + + + private void throwIfFreed() throws SQLException { + if ( isFreed ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Illegal operation on freed blob" ); + } + } + + + @Override + public long length() throws SQLException { + return value.length; + } + + + @Override + public byte[] getBytes( long pos, int length ) throws SQLException { + throwIfFreed(); + throwIfPositionOutOfBounds( pos ); + throwIfPositionOutOfBounds( pos + length - 1 ); + pos = positionToIndex( pos ); + return Arrays.copyOfRange( value, longToInt( pos ), length ); + } + + + @Override + public InputStream getBinaryStream() throws SQLException { + throwIfFreed(); + return new ByteArrayInputStream( value ); + } + + + @Override + public long position( byte[] bytes, long start ) throws SQLException { + /* Could efficiently be implemented using Knuth-Morris-Pratt-Algorithm */ + throw new SQLFeatureNotSupportedException( "Feature not implemented" ); + } + + + @Override + public long position( Blob blob, long start ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Feature not implemented" ); + } + + + @Override + public int setBytes( long pos, byte[] bytes ) throws SQLException { + return setBytes( pos, bytes, 0, bytes.length ); + } + + + @Override + public int setBytes( long pos, byte[] bytes, int offset, int len ) throws SQLException { + throwIfFreed(); + if ( value == null ) { + value = new byte[len]; + } + if ( positionToIndex( pos + len ) >= value.length ) { + value = Arrays.copyOf( value, longToInt( positionToIndex( pos + len ) ) ); + } + for ( int bytesWritten = 0; bytesWritten < len; bytesWritten++ ) { + int writeIndex = longToInt( positionToIndex( pos ) ) + bytesWritten; + value[writeIndex] = bytes[offset + bytesWritten]; + } + return len; + } + + + @Override + public OutputStream setBinaryStream( long pos ) throws SQLException { + throwIfFreed(); + throw new SQLFeatureNotSupportedException( "Feature not supported" ); + } + + + @Override + public void truncate( long len ) throws SQLException { + throwIfFreed(); + if ( len < 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Illegal argument for len" ); + } + len = Math.min( len, value.length ); + value = Arrays.copyOf( value, longToInt( len ) ); + } + + + @Override + public void free() throws SQLException { + this.isFreed = true; + } + + + @Override + public InputStream getBinaryStream( long pos, long len ) throws SQLException { + throwIfFreed(); + int from = longToInt( positionToIndex( pos ) ); + int to = longToInt( positionToIndex( pos + len ) ); + byte[] slice = Arrays.copyOfRange( value, from, to ); + return new ByteArrayInputStream( slice ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/types/PolyClob.java b/src/main/java/org/polypheny/jdbc/types/PolyClob.java new file mode 100644 index 00000000..8d2793cd --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/PolyClob.java @@ -0,0 +1,180 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Reader; +import java.io.StringReader; +import java.io.Writer; +import java.nio.charset.StandardCharsets; +import java.sql.Clob; +import java.sql.NClob; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; + +public class PolyClob implements Clob, NClob { + + String value; + boolean isFreed; + + + private void throwIfFreed() throws SQLException { + if ( isFreed ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Illegal operation on freed blob" ); + } + } + + + public PolyClob( String string ) { + this.isFreed = false; + this.value = string; + } + + + public PolyClob() { + } + + + private long positionToIndex( long position ) { + return position - 1; + } + + + private long indexToPosition( long index ) { + return index + 1; + } + + + private int longToInt( long value ) { + return Math.toIntExact( value ); + } + + + @Override + public long length() throws SQLException { + throwIfFreed(); + return value.length(); + } + + + @Override + public String getSubString( long pos, int length ) throws SQLException { + throwIfFreed(); + int startIndex = longToInt( positionToIndex( pos ) ); + return value.substring( startIndex, length ); + } + + + @Override + public Reader getCharacterStream() throws SQLException { + throwIfFreed(); + return new StringReader( value ); + } + + + @Override + public InputStream getAsciiStream() throws SQLException { + throwIfFreed(); + return new ByteArrayInputStream( value.getBytes( StandardCharsets.US_ASCII ) ); + } + + + @Override + public long position( String searchStr, long start ) throws SQLException { + throwIfFreed(); + int startIndex = longToInt( positionToIndex( start ) ); + return value.indexOf( searchStr, startIndex ); + } + + + @Override + public long position( Clob clob, long l ) throws SQLException { + throwIfFreed(); + throw new SQLFeatureNotSupportedException( "Feature not supported" ); + } + + + @Override + public int setString( long pos, String str ) throws SQLException { + throwIfFreed(); + replaceSection( longToInt( positionToIndex( pos ) ), str.length(), str ); + return str.length(); + } + + + private void replaceSection( int startIndex, int replacementLength, String replacement ) throws PrismInterfaceServiceException { + if ( value == null ) { + if ( startIndex > 0 ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.VALUE_ILLEGAL, "Can't replace section in empty string" ); + } + value = replacement; + return; + } + value = value.substring( 0, startIndex ) + replacement + value.substring( startIndex + replacementLength ); + } + + + @Override + public int setString( long pos, String str, int offset, int len ) throws SQLException { + throwIfFreed(); + int startIndex = longToInt( positionToIndex( offset ) ); + int endIndex = longToInt( positionToIndex( offset + len ) ); + String replacement = str.substring( startIndex, endIndex ); + return setString( pos, replacement ); + } + + + @Override + public OutputStream setAsciiStream( long pos ) throws SQLException { + throwIfFreed(); + throw new SQLFeatureNotSupportedException( "feature not supported" ); + } + + + @Override + public Writer setCharacterStream( long l ) throws SQLException { + throwIfFreed(); + throw new SQLFeatureNotSupportedException( "feature not supported" ); + } + + + @Override + public void truncate( long len ) throws SQLException { + throwIfFreed(); + value = value.substring( 0, longToInt( len ) ); + } + + + @Override + public void free() throws SQLException { + this.isFreed = true; + } + + + @Override + public Reader getCharacterStream( long pos, long length ) throws SQLException { + throwIfFreed(); + int startIndex = longToInt( positionToIndex( pos ) ); + String slice = value.substring( startIndex, startIndex + longToInt( length ) ); + return null; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/types/PolyDocument.java b/src/main/java/org/polypheny/jdbc/types/PolyDocument.java new file mode 100644 index 00000000..7e1bfdda --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/PolyDocument.java @@ -0,0 +1,70 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import java.sql.SQLException; +import java.util.HashMap; +import java.util.List; +import java.util.stream.Collectors; +import org.polypheny.jdbc.utils.ProtoUtils; +import org.polypheny.prism.ProtoDocument; +import org.polypheny.prism.ProtoEntry; +import org.polypheny.prism.ProtoValue; +import org.polypheny.prism.ProtoValue.ValueCase; + +public class PolyDocument extends HashMap { + + public PolyDocument() { + super(); + } + + + public PolyDocument( HashMap entries ) { + super( entries ); + } + + + public PolyDocument( ProtoDocument document ) { + super(); + document.getEntriesList().stream() + .filter( e -> e.getKey().getValueCase() == ValueCase.STRING ) + .forEach( e -> put( + e.getKey().getString().getString(), + new TypedValue( e.getValue() ) + ) ); + } + + + public ProtoDocument serialize() { + List protoEntries = entrySet().stream().map( entry -> { + ProtoValue protoKey = ProtoUtils.serializeAsProtoString( entry.getKey() ); + ProtoValue protoValue; + try { + protoValue = entry.getValue().serialize(); + } catch ( SQLException e ) { + throw new RuntimeException( "Should not be thrown. Unknown value encountered." ); + } + return ProtoEntry.newBuilder() + .setKey( protoKey ) + .setValue( protoValue ) + .build(); + } ).collect( Collectors.toList() ); + + return ProtoDocument.newBuilder().addAllEntries( protoEntries ).build(); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/types/PolyInterval.java b/src/main/java/org/polypheny/jdbc/types/PolyInterval.java new file mode 100644 index 00000000..aa3ca4c6 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/PolyInterval.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import lombok.Getter; + +public class PolyInterval { + + @Getter + private final long months; + @Getter + private final long milliseconds; + + + public PolyInterval( long months, long milliseconds ) { + this.months = months; + this.milliseconds = milliseconds; + } + + + private String plural( long count, String word ) { + return count + " " + (count != 1 ? word + "s" : word); + } + + + @Override + public String toString() { + return plural( months, "month" ) + plural( milliseconds, "milliseconds" ); + } + + + @Override + public boolean equals( Object o ) { + if ( o instanceof PolyInterval ) { + PolyInterval i = (PolyInterval) o; + return months == i.getMonths() && milliseconds == i.getMilliseconds(); + } + return false; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/types/PolyStruct.java b/src/main/java/org/polypheny/jdbc/types/PolyStruct.java new file mode 100644 index 00000000..1da45b4b --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/PolyStruct.java @@ -0,0 +1,56 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +public class PolyStruct implements Struct { + + List attributes; + String typeName; + + + public PolyStruct( String typeName, Object[] attributes ) { + this.typeName = typeName; + this.attributes = new ArrayList<>( Arrays.asList( attributes ) ); + } + + + @Override + public String getSQLTypeName() throws SQLException { + return typeName; + } + + + @Override + public Object[] getAttributes() throws SQLException { + return attributes.toArray(); + } + + + @Override + public Object[] getAttributes( Map> map ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Feature not supported" ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/types/ProtoToJdbcTypeMap.java b/src/main/java/org/polypheny/jdbc/types/ProtoToJdbcTypeMap.java new file mode 100644 index 00000000..f8f34b8b --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/ProtoToJdbcTypeMap.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import java.sql.Types; +import java.util.HashMap; +import java.util.Map; +import org.polypheny.prism.ProtoPolyType; + +public class ProtoToJdbcTypeMap { + + private static final Map PROTO_TYPE_TO_JDBC = new HashMap<>(); + + + static { + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.BOOLEAN, Types.BOOLEAN ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.TINYINT, Types.TINYINT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.SMALLINT, Types.SMALLINT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.INTEGER, Types.INTEGER ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.BIGINT, Types.BIGINT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.DECIMAL, Types.DECIMAL ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.REAL, Types.REAL ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.FLOAT, Types.FLOAT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.DOUBLE, Types.DOUBLE ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.DATE, Types.DATE ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.TIME, Types.TIME ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.TIMESTAMP, Types.TIMESTAMP ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.INTERVAL, Types.OTHER ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.CHAR, Types.CHAR ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.VARCHAR, Types.VARCHAR ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.TEXT, Types.VARCHAR ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.BINARY, Types.BINARY ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.VARBINARY, Types.VARBINARY ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.NULL, Types.NULL ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.ARRAY, Types.ARRAY ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.MAP, Types.JAVA_OBJECT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.DOCUMENT, Types.JAVA_OBJECT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.GRAPH, Types.JAVA_OBJECT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.NODE, Types.JAVA_OBJECT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.EDGE, Types.JAVA_OBJECT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.PATH, Types.JAVA_OBJECT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.IMAGE, Types.BINARY ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.VIDEO, Types.BINARY ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.AUDIO, Types.BINARY ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.FILE, Types.BINARY ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.DISTINCT, Types.DISTINCT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.STRUCTURED, Types.STRUCT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.OTHER, Types.OTHER ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.CURSOR, Types.REF_CURSOR ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.COLUMN_LIST, Types.OTHER + 2 ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.DYNAMIC_STAR, Types.JAVA_OBJECT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.GEOMETRY, Types.JAVA_OBJECT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.SYMBOL, Types.OTHER ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.JSON, Types.VARCHAR ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.MULTISET, Types.ARRAY ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.ANY, Types.JAVA_OBJECT ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.USER_DEFINED_TYPE, Types.OTHER ); + PROTO_TYPE_TO_JDBC.put( ProtoPolyType.ROW, Types.ROWID ); + } + + + public static int getJdbcTypeFromProto( ProtoPolyType ProtoPolyType ) { + Integer jdbcType = PROTO_TYPE_TO_JDBC.get( ProtoPolyType ); + if ( jdbcType == null ) { + throw new IllegalArgumentException( "Invalid proto value type: " + ProtoPolyType.name() + "." ); + } + return jdbcType; + } + +} diff --git a/src/main/java/org/polypheny/jdbc/types/TypedValue.java b/src/main/java/org/polypheny/jdbc/types/TypedValue.java new file mode 100644 index 00000000..6842348e --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/TypedValue.java @@ -0,0 +1,1432 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import com.google.protobuf.ByteString; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.io.StringReader; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.math.RoundingMode; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLInput; +import java.sql.SQLXML; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import lombok.Getter; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; +import org.polypheny.jdbc.properties.DriverProperties; +import org.polypheny.jdbc.utils.ProtoUtils; +import org.polypheny.jdbc.utils.TypedValueUtils; +import org.polypheny.prism.ProtoBigDecimal; +import org.polypheny.prism.ProtoBinary; +import org.polypheny.prism.ProtoBoolean; +import org.polypheny.prism.ProtoDate; +import org.polypheny.prism.ProtoDouble; +import org.polypheny.prism.ProtoFile; +import org.polypheny.prism.ProtoFloat; +import org.polypheny.prism.ProtoInteger; +import org.polypheny.prism.ProtoInterval; +import org.polypheny.prism.ProtoList; +import org.polypheny.prism.ProtoLong; +import org.polypheny.prism.ProtoNull; +import org.polypheny.prism.ProtoTime; +import org.polypheny.prism.ProtoTimestamp; +import org.polypheny.prism.ProtoValue; +import org.polypheny.prism.ProtoValue.ValueCase; + +public class TypedValue implements Convertible { + + private static final long MILLISECONDS_PER_DAY = 24 * 60 * 60 * 1000; + + private static final Set customTypes = new HashSet<>( Arrays.asList( + ValueCase.DOCUMENT, + ValueCase.INTERVAL + ) ); + + private ProtoValue serialized; + @Getter + private ProtoValue.ValueCase valueCase; + private boolean isSerialized = true; + + private Boolean booleanValue; + private Integer integerValue; + private Long bigintValue; + private Float floatValue; + private Double doubleValue; + private BigDecimal bigDecimalValue; + private byte[] binaryValue; + private Blob blobValue; + private Date dateValue; + private Time timeValue; + private Timestamp timestampValue; + private String varcharValue; + private Array arrayValue; + private RowId rowIdValue; + private Object otherValue; + + + public TypedValue( ProtoValue value ) { + this.serialized = value; + this.valueCase = serialized.getValueCase(); + } + + + private TypedValue() { + this.isSerialized = false; + } + + + public static TypedValue fromBoolean( boolean booleanValue ) { + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.BOOLEAN; + value.booleanValue = booleanValue; + return value; + } + + + public static TypedValue fromByte( byte byteValue ) { + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.INTEGER; + value.integerValue = (int) byteValue; + return value; + } + + + public static TypedValue fromShort( short shortValue ) { + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.INTEGER; + value.integerValue = (int) shortValue; + return value; + } + + + public static TypedValue fromInteger( int intValue ) { + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.INTEGER; + value.integerValue = intValue; + return value; + } + + + public static TypedValue fromLong( long bigintValue ) { + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.LONG; + value.bigintValue = bigintValue; + return value; + } + + + public static TypedValue fromFloat( float floatValue ) { + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.FLOAT; + value.floatValue = floatValue; + return value; + } + + + public static TypedValue fromDouble( double doubleValue ) { + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.DOUBLE; + value.doubleValue = doubleValue; + return value; + } + + + public static TypedValue fromBigDecimal( BigDecimal bigDecimalValue ) { + if ( bigDecimalValue == null ) { + return fromNull(); + } + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.BIG_DECIMAL; + value.bigDecimalValue = bigDecimalValue; + return value; + } + + + public static TypedValue fromString( String stringValue ) { + if ( stringValue == null ) { + return fromNull(); + } + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.STRING; + value.varcharValue = stringValue; + return value; + } + + + public static TypedValue fromBytes( byte[] binaryValue ) { + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.BINARY; + value.binaryValue = binaryValue; + return value; + } + + + public static TypedValue fromDate( Date dateValue ) { + if ( dateValue == null ) { + return fromNull(); + } + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.DATE; + value.dateValue = dateValue; + return value; + } + + + public static TypedValue fromDate( Date dateValue, Calendar calendar ) { + return fromDate( TypedValueUtils.getDateInCalendar( dateValue, calendar ) ); + } + + + public static TypedValue fromTime( Time timeValue ) { + if ( timeValue == null ) { + return fromNull(); + } + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.TIME; + value.timeValue = timeValue; + return value; + } + + + public static TypedValue fromTime( Time timeValue, Calendar calendar ) { + return fromTime( TypedValueUtils.getTimeInCalendar( timeValue, calendar ) ); + } + + + public static TypedValue fromTimestamp( Timestamp timestampValue ) { + if ( timestampValue == null ) { + return fromNull(); + } + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.TIMESTAMP; + value.timestampValue = timestampValue; + return value; + } + + + public static TypedValue fromTimestamp( Timestamp timestampValue, Calendar calendar ) { + return fromTimestamp( TypedValueUtils.getTimestampInCalendar( timestampValue, calendar ) ); + } + + + public static TypedValue fromAsciiStream( InputStream asciiStream, int length ) throws SQLException { + return fromAsciiStream( asciiStream ); + } + + + public static TypedValue fromAsciiStream( InputStream asciiStream, long length ) throws SQLException { + return fromAsciiStream( asciiStream ); + } + + + public static TypedValue fromAsciiStream( InputStream asciiStream ) throws SQLException { + try { + return fromString( new String( collectByteStream( asciiStream ), StandardCharsets.US_ASCII ) ); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.STREAM_ERROR, "Failed to read from ascii stream.", e ); + } + } + + + public static TypedValue fromUnicodeStream( InputStream unicodeStream, int length ) throws SQLException { + try { + return fromString( new String( collectByteStream( unicodeStream ), StandardCharsets.UTF_8 ) ); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.STREAM_ERROR, "Failed to read from unicode stream.", e ); + } + } + + + public static TypedValue fromBinaryStream( InputStream binaryStream, int length ) throws SQLException { + return fromBinaryStream( binaryStream ); + } + + + public static TypedValue fromBinaryStream( InputStream binaryStream, long length ) throws SQLException { + return fromBinaryStream( binaryStream ); + } + + + public static TypedValue fromBinaryStream( InputStream binaryStream ) throws SQLException { + try { + return fromBytes( collectByteStream( binaryStream ) ); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.STREAM_ERROR, "Failed to read from binary stream.", e ); + } + } + + + public static TypedValue fromCharacterStream( Reader characterStream, int length ) throws SQLException { + return fromCharacterStream( characterStream ); + } + + + public static TypedValue fromCharacterStream( Reader characterStream, long length ) throws SQLException { + return fromCharacterStream( characterStream ); + } + + + public static TypedValue fromCharacterStream( Reader characterStream ) throws SQLException { + try { + return fromString( collectCharacterStream( characterStream ) ); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.STREAM_ERROR, "Failed to read from character stream.", e ); + } + } + + + public static TypedValue fromRef( Ref refValue ) throws SQLException { + throw new SQLFeatureNotSupportedException( "Refs are not supported yet." ); + } + + + public static TypedValue fromDocument( PolyDocument document ) { + if ( document == null ) { + return fromNull(); + } + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.DOCUMENT; + value.otherValue = document; + return value; + } + + + public static TypedValue fromInterval( PolyInterval interval ) { + if ( interval == null ) { + return fromNull(); + } + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.INTERVAL; + value.otherValue = interval; + return value; + } + + + public static TypedValue fromBlob( Blob blobValue ) { + if ( blobValue == null ) { + return fromNull(); + } + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.FILE; + value.blobValue = blobValue; + return value; + } + + + public static TypedValue fromBlob( InputStream binaryStream ) throws SQLException { + try { + return fromBlob( new PolyBlob( collectByteStream( binaryStream ) ) ); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.STREAM_ERROR, "Failed to read blob form binary stream.", e ); + } + } + + + public static TypedValue fromBlob( InputStream binaryStream, long length ) throws SQLException { + return fromBlob( binaryStream ); + } + + + public static TypedValue fromNull() { + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.NULL; + return value; + } + + + public static TypedValue fromClob( Clob clobValue ) throws SQLException { + try { + return fromString( collectCharacterStream( clobValue.getCharacterStream() ) ); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.STREAM_ERROR, "Failed to read data from clob.", e ); + } + } + + + public static TypedValue fromClob( Reader reader ) throws SQLException { + try { + return fromString( collectCharacterStream( reader ) ); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.STREAM_ERROR, "Failed to read data from streamed clob.", e ); + } + } + + + public static TypedValue fromClob( Reader reader, long length ) throws SQLException { + return fromClob( reader ); + } + + + public static TypedValue fromArray( Array arrayValue ) { + if ( arrayValue == null ) { + return fromNull(); + } + TypedValue value = new TypedValue(); + value.valueCase = ValueCase.LIST; + value.arrayValue = arrayValue; + return value; + } + + + public static TypedValue fromUrl( URL urlValue ) throws SQLException { + throw new SQLFeatureNotSupportedException( "URLs are not supported yet." ); + } + + + public static TypedValue fromRowId( RowId rowIdValue ) throws SQLFeatureNotSupportedException { + throw new SQLFeatureNotSupportedException( "RowIds are not supported yet." ); + } + + + public static TypedValue fromObject( Object value ) throws SQLException { + try { + return TypedValueUtils.buildTypedValueFromObject( value ); + } catch ( ParseException | SQLFeatureNotSupportedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "Conversion from object failed.", e ); + } + } + + + public static TypedValue fromObject( Object value, int targetSqlType ) throws SQLException { + try { + return TypedValueUtils.buildTypedValueFromObject( value, targetSqlType ); + } catch ( ParseException | SQLFeatureNotSupportedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "Conversion from object failed.", e ); + } + } + + + public static TypedValue fromObject( Object value, int targetSqlType, int scaleOrLength ) throws SQLFeatureNotSupportedException { + throw new SQLFeatureNotSupportedException( "This feature is not supported yet." ); + } + + + public static TypedValue fromNString( String stringValue ) { + return fromString( stringValue ); + } + + + public static TypedValue fromNCharacterStream( Reader character ) throws SQLException { + return fromCharacterStream( character ); + } + + + public static TypedValue fromNCharacterStream( Reader characterStream, long length ) throws SQLException { + return fromCharacterStream( characterStream, length ); + } + + + public static TypedValue fromNClob( NClob nClobValue ) throws SQLException { + return fromClob( nClobValue.getCharacterStream() ); + } + + + public static TypedValue fromNClob( Reader characterStream ) throws SQLException { + return fromClob( characterStream ); + } + + + public static TypedValue fromNClob( Reader characterStream, int length ) throws SQLException { + return fromClob( characterStream, length ); + } + + + public static TypedValue fromNClob( Reader characterStream, long length ) throws SQLException { + return fromClob( characterStream, length ); + } + + + public static TypedValue fromSQLXML( SQLXML sqlxmlValue ) throws SQLException { + throw new SQLFeatureNotSupportedException( "SQLXML is not yet supported." ); + } + + + public static TypedValue fromStruct( Struct value ) throws SQLFeatureNotSupportedException { + throw new SQLFeatureNotSupportedException( "Structs are not yet supported." ); + } + + + @Override + public boolean isNull() { + return valueCase == ValueCase.NULL; + } + + + public boolean isUdt() { + //TODO: adjust when user defined types are supported + return false; + } + + + public int getLength() { + if ( isSerialized ) { + deserialize(); + } + switch ( valueCase ) { + case BINARY: + return binaryValue.length; + case STRING: + return varcharValue.length(); + } + return 0; + } + + + public TypedValue getTrimmed( int length ) { + switch ( valueCase ) { + case BINARY: + byte[] binaryData = Arrays.copyOfRange( binaryValue, 0, length ); + return TypedValue.fromBytes( binaryData ); + case STRING: + String string = varcharValue.substring( 0, length ); + return TypedValue.fromString( string ); + } + return this; + } + + + @Override + public String asString() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( varcharValue != null ) { + return varcharValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type CHAR or VARCHAR." ); + } + + + @Override + public boolean asBoolean() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( booleanValue != null ) { + return booleanValue; + } + if ( varcharValue != null ) { + if ( varcharValue.equals( "0" ) ) { + return false; + } + if ( varcharValue.equals( "1" ) ) { + return true; + } + } + if ( integerValue != null ) { + if ( integerValue == 0 ) { + return false; + } + if ( integerValue == 1 ) { + return true; + } + } + if ( bigintValue != null ) { + if ( bigintValue == 0 ) { + return false; + } + if ( bigintValue == 1 ) { + return true; + } + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type BOOLEAN." ); + } + + + @Override + public byte asByte() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( integerValue != null ) { + return integerValue.byteValue(); + } + if ( bigintValue != null ) { + return bigintValue.byteValue(); + } + if ( isNull() ) { + return 0; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type TINYINT, SMALLINT, INTEGER or BIGINT." ); + } + + + @Override + public short asShort() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( integerValue != null ) { + return integerValue.shortValue(); + } + if ( bigintValue != null ) { + return bigintValue.shortValue(); + } + if ( isNull() ) { + return 0; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type TINYINT, SMALLINT, INTEGER or BIGINT." ); + } + + + @Override + public int asInt() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( integerValue != null ) { + return integerValue; + } + if ( bigintValue != null ) { + return bigintValue.intValue(); + } + if ( bigDecimalValue != null ) { + return bigDecimalValue.intValue(); + } + if ( isNull() ) { + return 0; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type TINYINT, SMALLINT, INTEGER or BIGINT." ); + } + + + @Override + public long asLong() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( bigintValue != null ) { + return bigintValue; + } + if ( integerValue != null ) { + return integerValue; + } + if ( isNull() ) { + return 0; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type TINYINT, SMALLINT, INTEGER or BIGINT." ); + } + + + @Override + public float asFloat() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( floatValue != null ) { + return floatValue; + } + if ( doubleValue != null ) { + return doubleValue.floatValue(); + } + if ( bigDecimalValue != null ) { + return bigDecimalValue.floatValue(); + } + if ( isNull() ) { + return 0; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type REAL, FLOT or DOUBLE." ); + } + + + @Override + public double asDouble() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( doubleValue != null ) { + return doubleValue.doubleValue(); + } + if ( floatValue != null ) { + return floatValue.doubleValue(); + } + if ( bigDecimalValue != null ) { + return bigDecimalValue.doubleValue(); + } + if ( isNull() ) { + return 0; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type REAL, FLOT or DOUBLE." ); + } + + + @Override + public BigDecimal asBigDecimal() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( bigDecimalValue != null ) { + return bigDecimalValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type DECIMAL." ); + } + + + @Override + @Deprecated + public BigDecimal asBigDecimal( int scale ) throws SQLException { + return asBigDecimal().setScale( scale, RoundingMode.HALF_EVEN ); + } + + + @Override + public byte[] asBytes() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( binaryValue != null ) { + return binaryValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type BINARY or VARBINARY." ); + } + + + @Override + public InputStream asAsciiStream() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( varcharValue != null ) { + return new ByteArrayInputStream( varcharValue.getBytes( StandardCharsets.US_ASCII ) ); + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type CHAR or VARCHAR." ); + } + + + @Override + @Deprecated + public InputStream asUnicodeStream() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( varcharValue != null ) { + return new ByteArrayInputStream( varcharValue.getBytes( StandardCharsets.UTF_8 ) ); + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type CHAR or VARCHAR." ); + } + + + @Override + public InputStream asBinaryStream() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( blobValue != null ) { + return blobValue.getBinaryStream(); + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not streamable." ); + } + + + @Override + public PolyDocument asDocument() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( otherValue != null ) { + return (PolyDocument) otherValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type DOCUMENT." ); + } + + + @Override + public PolyInterval asInterval() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( otherValue != null ) { + return (PolyInterval) otherValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type INTERVAL." ); + } + + + @Override + public Object asObject() throws SQLException { + switch ( valueCase ) { + case BOOLEAN: + return asBoolean(); + case INTEGER: + return asInt(); + case LONG: + return asLong(); + case BIG_DECIMAL: + return asBigDecimal(); + case FLOAT: + return asFloat(); + case DOUBLE: + return asDouble(); + case DATE: + return asDate(); + case TIME: + return asTime(); + case TIMESTAMP: + return asTimestamp(); + case INTERVAL: + return asInterval(); + case STRING: + return asString(); + case BINARY: + return asBytes(); + case NULL: + return null; + case LIST: + return asArray(); + case DOCUMENT: + return asDocument(); + case FILE: + return asBlob(); + default: + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value has unknown type and thus can not be returned." ); + } + } + + + @Override + public Object asObject( Calendar calendar ) throws SQLException { + switch ( valueCase ) { + case BOOLEAN: + return asBoolean(); + case INTEGER: + return asInt(); + case LONG: + return asLong(); + case BIG_DECIMAL: + return asBigDecimal(); + case FLOAT: + return asFloat(); + case DOUBLE: + return asDouble(); + case DATE: + return asDate( calendar ); + case TIME: + return asTime( calendar ); + case TIMESTAMP: + return asTimestamp( calendar ); + case INTERVAL: + return asInterval(); + case STRING: + return asString(); + case BINARY: + return asBytes(); + case NULL: + return null; + case LIST: + return asArray(); + case DOCUMENT: + return asDocument(); + case FILE: + return asBlob(); + default: + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value has unknown type and thus can not be returned." ); + } + } + + + @Override + public Reader asCharacterStream() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( varcharValue != null ) { + return new StringReader( varcharValue ); + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type CHAR or VARCHAR." ); + } + + + @Override + public Blob asBlob() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( blobValue != null ) { + return blobValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type FILE, AUDIO, VIDEO or IMAGE." ); + } + + + @Override + public Clob asClob() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( varcharValue != null ) { + return new PolyClob( varcharValue ); + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type CHAR or VARCHAR." ); + } + + + @Override + public Array asArray() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( arrayValue != null ) { + return arrayValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type ARRAY." ); + } + + + @Override + public Struct asStruct() throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "No type retrievable as a struct exists in Polypheny." ); + } + + + @Override + public Date asDate() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( dateValue != null ) { + return dateValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type DATE." ); + } + + + @Override + public Date asDate( Calendar calendar ) throws SQLException { + return TypedValueUtils.getDateInCalendar( asDate(), calendar ); + } + + + @Override + public Time asTime() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( timeValue != null ) { + return timeValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type TIME." ); + } + + + @Override + public Time asTime( Calendar calendar ) throws SQLException { + return TypedValueUtils.getTimeInCalendar( asTime(), calendar ); + } + + + @Override + public Timestamp asTimestamp() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( timestampValue != null ) { + return timestampValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type TIMESTAMP." ); + } + + + @Override + public Timestamp asTimestamp( Calendar calendar ) throws SQLException { + return TypedValueUtils.getTimestampInCalendar( asTimestamp(), calendar ); + } + + + @Override + public Ref asRef() throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "No type retrievable as a reference exists in Polypheny." ); + } + + + @Override + public RowId asRowId() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( rowIdValue != null ) { + return rowIdValue; + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type ROW_ID." ); + } + + + @Override + public URL asUrl() throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "No type retrievable as a url exists in Polypheny." ); + } + + + @Override + public NClob asNClob() throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( varcharValue != null ) { + return new PolyClob( varcharValue ); + } + if ( isNull() ) { + return null; + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type FILE, AUDIO, VIDEO or IMAGE." ); + } + + + @Override + public SQLXML asSQLXML() throws SQLException { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "No type retrievable as SQLXML exists in Polypheny." ); + } + + + @Override + public String asNString() throws SQLException { + return asString(); + } + + + @Override + public Reader asNCharacterStream() throws SQLException { + return asCharacterStream(); + } + + + @Override + public Object asObject( Map> map ) throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( otherValue == null || !(otherValue instanceof UDTPrototype) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type USER_DEFINED_TYPE." ); + } + UDTPrototype prototype = (UDTPrototype) otherValue; + Class udtClass = map.get( prototype.getTypeName() ); + return buildFromUdtPrototype( udtClass, prototype ); + } + + + @Override + public T asObject( Class aClass ) throws SQLException { + if ( isSerialized ) { + deserialize(); + } + if ( otherValue == null || !(otherValue instanceof UDTPrototype) ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "This value is not of type USER_DEFINED_TYPE." ); + } + return aClass.cast( buildFromUdtPrototype( aClass, (UDTPrototype) otherValue ) ); + } + + + private Object buildFromUdtPrototype( Class udtClass, UDTPrototype prototype ) throws SQLException { + if ( udtClass == null ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "Type-map contains no type for internal type " + prototype.getTypeName() ); + } + try { + Constructor udtConstructor = udtClass.getConstructor( SQLInput.class, String.class ); + return udtConstructor.newInstance( prototype, prototype.getTypeName() ); + } catch ( NoSuchMethodException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.MISSING_INTERFACE, "The type contained in the type map does not implement the SQLInput interface required for udt construction" ); + } catch ( InvocationTargetException | InstantiationException | IllegalAccessException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.UDT_CONSTRUCTION_FAILED, "Construction of user defined type failed", e ); + } + } + + + private static byte[] collectByteStream( InputStream stream ) throws IOException { + ByteArrayOutputStream buffer = new ByteArrayOutputStream(); + int frameLength; + byte[] frame = new byte[4]; + while ( (frameLength = stream.read( frame, 0, frame.length )) != -1 ) { + buffer.write( frame, 0, frameLength ); + } + buffer.flush(); + return buffer.toByteArray(); + } + + + private static String collectCharacterStream( Reader reader ) throws IOException { + char[] readBuffer = new char[8 * 1024]; + StringBuilder buffer = new StringBuilder(); + int bufferIndex; + while ( (bufferIndex = reader.read( readBuffer, 0, readBuffer.length )) != -1 ) { + buffer.append( readBuffer, 0, bufferIndex ); + } + reader.close(); + return buffer.toString(); + } + + + private void deserialize() { + try { + switch ( valueCase ) { + case BOOLEAN: + booleanValue = serialized.getBoolean().getBoolean(); + break; + case INTEGER: + integerValue = serialized.getInteger().getInteger(); + break; + case LONG: + bigintValue = serialized.getLong().getLong(); + break; + case BINARY: + binaryValue = serialized.getBinary().getBinary().toByteArray(); + break; + case DATE: + dateValue = new Date( serialized.getDate().getDate() * MILLISECONDS_PER_DAY ); + break; + case DOUBLE: + doubleValue = serialized.getDouble().getDouble(); + break; + case FLOAT: + floatValue = serialized.getFloat().getFloat(); + break; + case NULL: + break; + case STRING: + varcharValue = serialized.getString().getString(); + break; + case TIME: + timeValue = new Time( serialized.getTime().getTime() ); + break; + case TIMESTAMP: + timestampValue = new Timestamp( serialized.getTimestamp().getTimestamp() ); + break; + case BIG_DECIMAL: + bigDecimalValue = getBigDecimal( serialized.getBigDecimal().getUnscaledValue(), serialized.getBigDecimal().getScale() ); + break; + case LIST: + arrayValue = getArray( serialized ); + break; + case INTERVAL: + otherValue = getInterval( serialized.getInterval() ); + break; + case DOCUMENT: + otherValue = new PolyDocument( serialized.getDocument() ); + break; + case FILE: + blobValue = new PolyBlob( serialized.getFile().getBinary().toByteArray() ); + break; + default: + throw new RuntimeException( "Cannot deserialize ProtoValue of case " + valueCase ); + } + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } + } + + + public ProtoValue serialize() throws SQLException { + switch ( valueCase ) { + case BOOLEAN: + return serializeAsProtoBoolean(); + case INTEGER: + return serializeAsProtoInteger(); + case LONG: + return serializeAsProtoLong(); + case BIG_DECIMAL: + return serializeAsProtoBigDecimal(); + case FLOAT: + return serializeAsProtoFloat(); + case DOUBLE: + return serializeAsProtoDouble(); + case DATE: + return serializeAsProtoDate(); + case TIME: + return serializeAsProtoTime(); + case TIMESTAMP: + return serializeAsTimestamp(); + case INTERVAL: + return serializeAsInterval(); + case STRING: + return serializeAsProtoString(); + case BINARY: + return serializeAsProtoBinary(); + case NULL: + return serializeAsProtoNull(); + case LIST: + return serializeAsProtoList(); + case FILE: + return serializeAsProtoFile(); + case DOCUMENT: + return serializeAsProtoDocument(); + + } + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DATA_TYPE_MISMATCH, "Failed to serialize unknown type: " + valueCase.name() ); + } + + + private ProtoValue serializeAsProtoFile() throws SQLException { + try { + ProtoFile protoFile = ProtoFile.newBuilder() + .setBinary( ByteString.copyFrom( collectByteStream( blobValue.getBinaryStream() ) ) ) + .build(); + return ProtoValue.newBuilder() + .setFile( protoFile ) + .build(); + } catch ( IOException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.STREAM_ERROR, "Failed to read bytes from blob." ); + } + } + + + private ProtoValue serializeAsProtoDocument() { + return ProtoValue.newBuilder() + .setDocument( ((PolyDocument) otherValue).serialize() ) + .build(); + } + + + private ProtoValue serializeAsInterval() { + PolyInterval interval = (PolyInterval) otherValue; + ProtoInterval protoInterval = ProtoInterval.newBuilder() + .setMonths( interval.getMonths() ) + .setMilliseconds( interval.getMilliseconds() ) + .build(); + return ProtoValue.newBuilder() + .setInterval( protoInterval ) + .build(); + } + + + private ProtoValue serializeAsProtoList() throws SQLException { + List elements = new ArrayList<>(); + for ( Object object : (Object[]) arrayValue.getArray() ) { + elements.add( TypedValue.fromObject( object ).serialize() ); + } + ProtoList protoList = ProtoList.newBuilder() + .addAllValues( elements ) + .build(); + return ProtoValue.newBuilder() + .setList( protoList ) + .build(); + } + + + private ProtoValue serializeAsProtoDouble() { + ProtoDouble protoDouble = ProtoDouble.newBuilder() + .setDouble( doubleValue ) + .build(); + return ProtoValue.newBuilder() + .setDouble( protoDouble ) + .build(); + } + + + private ProtoValue serializeAsProtoFloat() { + ProtoFloat protoFloat = ProtoFloat.newBuilder() + .setFloat( floatValue ) + .build(); + return ProtoValue.newBuilder() + .setFloat( protoFloat ) + .build(); + } + + + private ProtoValue serializeAsProtoLong() { + ProtoLong protoLong = ProtoLong.newBuilder() + .setLong( bigintValue ) + .build(); + return ProtoValue.newBuilder() + .setLong( protoLong ) + .build(); + } + + + private ProtoValue serializeAsProtoBigDecimal() { + ProtoBigDecimal protoBigDecimal = ProtoBigDecimal.newBuilder() + .setUnscaledValue( ByteString.copyFrom( bigDecimalValue.unscaledValue().toByteArray() ) ) + .setScale( bigDecimalValue.scale() ) + .build(); + return ProtoValue.newBuilder() + .setBigDecimal( protoBigDecimal ) + .build(); + } + + + private ProtoValue serializeAsProtoDate() { + long milliseconds = dateValue.getTime(); + milliseconds += DriverProperties.getDEFAULT_TIMEZONE().getOffset( milliseconds ); + ProtoDate protoDate = ProtoDate.newBuilder() + .setDate( milliseconds / MILLISECONDS_PER_DAY ) + .build(); + return ProtoValue.newBuilder() + .setDate( protoDate ) + .build(); + } + + + private ProtoValue serializeAsProtoString() { + return ProtoUtils.serializeAsProtoString( varcharValue ); + } + + + private ProtoValue serializeAsProtoTime() { + long ofDay = timeValue.getTime(); + ofDay += DriverProperties.getDEFAULT_TIMEZONE().getOffset( ofDay ); + ProtoTime protoTime = ProtoTime.newBuilder() + .setTime( (int) ofDay ) + .build(); + return ProtoValue.newBuilder() + .setTime( protoTime ) + .build(); + } + + + private ProtoValue serializeAsTimestamp() { + long milliseconds = timestampValue.getTime(); + milliseconds += DriverProperties.getDEFAULT_TIMEZONE().getOffset( milliseconds ); + ProtoTimestamp protoTimestamp = ProtoTimestamp.newBuilder() + .setTimestamp( milliseconds ) + .build(); + return ProtoValue.newBuilder() + .setTimestamp( protoTimestamp ) + .build(); + } + + + private ProtoValue serializeAsProtoBinary() { + ProtoBinary protoBinary = ProtoBinary.newBuilder() + .setBinary( ByteString.copyFrom( binaryValue ) ) + .build(); + return ProtoValue.newBuilder() + .setBinary( protoBinary ) + .build(); + } + + + private ProtoValue serializeAsProtoNull() { + return ProtoValue.newBuilder() + .setNull( ProtoNull.newBuilder().build() ) + .build(); + } + + + private ProtoValue serializeAsProtoBoolean() { + ProtoBoolean protoBoolean = ProtoBoolean.newBuilder() + .setBoolean( booleanValue ) + .build(); + return ProtoValue.newBuilder() + .setBoolean( protoBoolean ) + .build(); + } + + + private ProtoValue serializeAsProtoInteger() { + ProtoInteger protoInteger = ProtoInteger.newBuilder() + .setInteger( integerValue ) + .build(); + return ProtoValue.newBuilder() + .setInteger( protoInteger ) + .build(); + } + + + private static BigDecimal getBigDecimal( ByteString unscaledValue, int scale ) { + BigInteger value = new BigInteger( unscaledValue.toByteArray() ); + return new BigDecimal( value, scale ); + } + + + private static Array getArray( ProtoValue value ) throws SQLException { + String baseType = value.getValueCase().name(); + List values = value.getList().getValuesList().stream() + .map( TypedValue::new ) + .collect( Collectors.toList() ); + return new PolyArray( baseType, values ); + } + + + private static PolyInterval getInterval( ProtoInterval interval ) { + return new PolyInterval( interval.getMonths(), interval.getMilliseconds() ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/types/UDTPrototype.java b/src/main/java/org/polypheny/jdbc/types/UDTPrototype.java new file mode 100644 index 00000000..bf99972b --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/types/UDTPrototype.java @@ -0,0 +1,252 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLInput; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.ArrayList; +import lombok.Getter; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; + +public class UDTPrototype implements SQLInput { + + @Getter + private final String typeName; + private final ArrayList values; + private int currentIndex; + private boolean lastValueWasNull; + private boolean isFinalized; + + + public UDTPrototype( String typeName, ArrayList values ) { + this.typeName = typeName; + this.values = values; + this.currentIndex = -1; + this.lastValueWasNull = true; + this.isFinalized = false; + } + + + public UDTPrototype( String typeName ) { + this.typeName = typeName; + this.values = new ArrayList<>(); + this.currentIndex = -1; + this.lastValueWasNull = true; + this.isFinalized = false; + } + + + public void addValue( TypedValue value ) throws SQLException { + if ( isFinalized ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Can't add values to finalized prototype." ); + } + values.add( value ); + } + + + private TypedValue getNextValue() throws SQLException { + if ( !isFinalized ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.OPERATION_ILLEGAL, "Can't read value from unfinalized prototype." ); + } + currentIndex++; + if ( currentIndex >= values.size() ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.UDT_REACHED_END, "Reached end of udt value stream." ); + } else { + TypedValue currentValue = values.get( currentIndex ); + lastValueWasNull = currentValue.isNull(); + return currentValue; + } + } + + + @Override + public String readString() throws SQLException { + return getNextValue().asString(); + } + + + @Override + public boolean readBoolean() throws SQLException { + return getNextValue().asBoolean(); + } + + + @Override + public byte readByte() throws SQLException { + return getNextValue().asByte(); + } + + + @Override + public short readShort() throws SQLException { + return getNextValue().asShort(); + } + + + @Override + public int readInt() throws SQLException { + return getNextValue().asInt(); + } + + + @Override + public long readLong() throws SQLException { + return getNextValue().asLong(); + } + + + @Override + public float readFloat() throws SQLException { + return getNextValue().asFloat(); + } + + + @Override + public double readDouble() throws SQLException { + return getNextValue().asDouble(); + } + + + @Override + public BigDecimal readBigDecimal() throws SQLException { + return getNextValue().asBigDecimal(); + } + + + @Override + public byte[] readBytes() throws SQLException { + return getNextValue().asBytes(); + } + + + @Override + public Date readDate() throws SQLException { + return getNextValue().asDate(); + } + + + @Override + public Time readTime() throws SQLException { + return getNextValue().asTime(); + } + + + @Override + public Timestamp readTimestamp() throws SQLException { + return getNextValue().asTimestamp(); + } + + + @Override + public Reader readCharacterStream() throws SQLException { + return getNextValue().asCharacterStream(); + } + + + @Override + public InputStream readAsciiStream() throws SQLException { + return getNextValue().asAsciiStream(); + } + + + @Override + public InputStream readBinaryStream() throws SQLException { + return getNextValue().asBinaryStream(); + } + + + @Override + public Object readObject() throws SQLException { + return getNextValue().asObject(); + } + + + @Override + public Ref readRef() throws SQLException { + return getNextValue().asRef(); + } + + + @Override + public Blob readBlob() throws SQLException { + return getNextValue().asBlob(); + } + + + @Override + public Clob readClob() throws SQLException { + return getNextValue().asClob(); + } + + + @Override + public Array readArray() throws SQLException { + return getNextValue().asArray(); + } + + + @Override + public boolean wasNull() throws SQLException { + return lastValueWasNull; + } + + + @Override + public URL readURL() throws SQLException { + return getNextValue().asUrl(); + } + + + @Override + public NClob readNClob() throws SQLException { + return getNextValue().asNClob(); + } + + + @Override + public String readNString() throws SQLException { + return getNextValue().asNString(); + } + + + @Override + public SQLXML readSQLXML() throws SQLException { + return getNextValue().asSQLXML(); + } + + + @Override + public RowId readRowId() throws SQLException { + return getNextValue().asRowId(); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/utils/CallbackQueue.java b/src/main/java/org/polypheny/jdbc/utils/CallbackQueue.java new file mode 100644 index 00000000..65b58f54 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/utils/CallbackQueue.java @@ -0,0 +1,99 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.utils; + +import java.util.LinkedList; +import java.util.Queue; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Function; +import org.polypheny.jdbc.PrismInterfaceErrors; +import org.polypheny.jdbc.PrismInterfaceServiceException; +import org.polypheny.prism.Response; + +public class CallbackQueue { + + private final Lock queueLock = new ReentrantLock(); + private final Condition hasNext = queueLock.newCondition(); + private final Condition isCompleted = queueLock.newCondition(); + private boolean bIsCompleted = false; + private final Queue messageQueue = new LinkedList<>(); + private final Function extractResponse; + private PrismInterfaceServiceException propagatedException; + + + public CallbackQueue( Function extractResponse ) { + this.extractResponse = extractResponse; + } + + + public void awaitCompletion() throws InterruptedException { + queueLock.lock(); + while ( !bIsCompleted ) { + isCompleted.await(); + } + } + + + public T takeNext() throws PrismInterfaceServiceException { + queueLock.lock(); + while ( messageQueue.isEmpty() ) { + try { + hasNext.await(); + } catch ( InterruptedException e ) { + throw new PrismInterfaceServiceException( PrismInterfaceErrors.DRIVER_THREADING_ERROR, "Awaiting next response failed.", e ); + } + throwReceivedException(); + } + T message = messageQueue.remove(); + queueLock.unlock(); + return message; + } + + + private void throwReceivedException() throws PrismInterfaceServiceException { + if ( propagatedException != null ) { + throw propagatedException; + } + } + + + public void onNext( Response message ) { + queueLock.lock(); + messageQueue.add( extractResponse.apply( message ) ); + hasNext.signal(); + queueLock.unlock(); + } + + + public void onError( Throwable propagatedException ) { + queueLock.lock(); + this.propagatedException = new PrismInterfaceServiceException( propagatedException ); + hasNext.signal(); + queueLock.unlock(); + } + + + public void onCompleted() { + queueLock.lock(); + bIsCompleted = true; + isCompleted.signal(); + queueLock.unlock(); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/utils/ProtoUtils.java b/src/main/java/org/polypheny/jdbc/utils/ProtoUtils.java new file mode 100644 index 00000000..7cfa640e --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/utils/ProtoUtils.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.utils; + +import java.sql.SQLException; +import java.util.List; +import java.util.stream.Collectors; +import org.polypheny.jdbc.types.TypedValue; +import org.polypheny.prism.ProtoString; +import org.polypheny.prism.ProtoValue; + +public class ProtoUtils { + + public static ProtoValue serializeAsProtoString( String string ) { + ProtoString protoString = ProtoString.newBuilder() + .setString( string ) + .build(); + return ProtoValue.newBuilder() + .setString( protoString ) + .build(); + } + + + public static List serializeParameterList( List values ) { + return values.stream().map( v -> { + try { + return v.serialize(); + } catch ( SQLException e ) { + throw new RuntimeException( "Should not be thrown. Encountered an unknown type during serialization." ); + } + } ).collect( Collectors.toList() ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/utils/TypedValueUtils.java b/src/main/java/org/polypheny/jdbc/utils/TypedValueUtils.java new file mode 100644 index 00000000..350b9a41 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/utils/TypedValueUtils.java @@ -0,0 +1,955 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.utils; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLXML; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.OffsetDateTime; +import java.time.OffsetTime; +import java.time.ZoneOffset; +import java.util.Calendar; +import java.util.List; +import java.util.TimeZone; +import java.util.stream.Collectors; +import org.polypheny.jdbc.types.PolyDocument; +import org.polypheny.jdbc.types.PolyInterval; +import org.polypheny.jdbc.types.ProtoToJdbcTypeMap; +import org.polypheny.jdbc.types.TypedValue; +import org.polypheny.prism.ProtoPolyType; +import org.polypheny.prism.Row; + +public class TypedValueUtils { + + + private static SimpleDateFormat SQL_DATE_FORMAT; + private static SimpleDateFormat SQL_TIME_FORMAT; + + + static { + SimpleDateFormat simpleDateFormat = new SimpleDateFormat( "dd MMM yyyy" ); + simpleDateFormat.setTimeZone( TimeZone.getTimeZone( "UTC" ) ); + SQL_DATE_FORMAT = simpleDateFormat; + + SimpleDateFormat simpleDateFormat1 = new SimpleDateFormat( "HH:mm:ss" ); + simpleDateFormat.setTimeZone( TimeZone.getTimeZone( "UTC" ) ); + SQL_TIME_FORMAT = simpleDateFormat1; + } + + + public static Time getTimeFromString( String string ) throws ParseException { + return new Time( SQL_TIME_FORMAT.parse( string ).getTime() ); + } + + + public static Date getDateFromString( String string ) throws ParseException { + return Date.valueOf( string ); + } + + + public static boolean getBooleanFromNumber( Number number ) { + return number.byteValue() == 1; + } + + + public static boolean getBooleanFromString( String string ) { + return string.equals( "1" ) || string.equalsIgnoreCase( "true" ); + } + + + public static Number getNumberFromBoolean( Boolean bool ) { + if ( bool ) { + return 1; + } + return 0; + } + + + public static String getOneZeroStringFromBoolean( Boolean bool ) { + if ( bool ) { + return "1"; + } + return "0"; + } + + + public static Date getDateFromTimestamp( Timestamp timestamp ) { + return new Date( timestamp.getTime() ); + } + + + public static Date getDateInCalendar( Date date, Calendar calendar ) { + return new Date( getTimeLongInCalendar( date.getTime(), calendar ) ); + } + + + public static Time getTimeFromTimestamp( Timestamp timestamp ) { + return new Time( timestamp.getTime() ); + } + + + public static Time getTimeInCalendar( Time time, Calendar calendar ) { + return new Time( getTimeLongInCalendar( time.getTime(), calendar ) ); + } + + + private static Time getTimeFromOffsetTime( OffsetTime offsetTime ) { + return Time.valueOf( offsetTime.toLocalTime() ); + } + + + private static Timestamp getTimestampFromOffsetDateTime( OffsetDateTime offsetDateTime ) { + return Timestamp.valueOf( offsetDateTime.atZoneSameInstant( ZoneOffset.UTC ).toLocalDateTime() ); + } + + + public static Timestamp getTimestampFromTime( Time value ) { + return new Timestamp( value.getTime() ); + } + + + public static Timestamp getTimestampFromDate( Date value ) { + return new Timestamp( value.getTime() ); + } + + + public static Timestamp getTimestampFromString( String value ) { + return Timestamp.valueOf( value ); + } + + + public static Timestamp getTimestampInCalendar( Timestamp timestamp, Calendar calendar ) { + return new Timestamp( getTimeLongInCalendar( timestamp.getTime(), calendar ) ); + } + + + private static long getTimeLongInCalendar( long value, Calendar calendar ) { + return value - calendar.getTimeZone().getOffset( value ); + } + + + public static List> buildRows( List rows ) { + return rows.stream() + .map( TypedValueUtils::buildRow ) + .collect( Collectors.toList() ); + } + + + public static List buildRow( Row row ) { + return row.getValuesList().stream() + .map( TypedValue::new ) + .collect( Collectors.toList() ); + } + + + public static int getJdbcTypeFromPolyTypeName( String polyTypeName ) { + return ProtoToJdbcTypeMap.getJdbcTypeFromProto( ProtoPolyType.valueOf( polyTypeName ) ); + } + + + public static TypedValue buildTypedValueFromObject( Object value ) throws SQLException, ParseException { + if ( value == null ) { + return TypedValue.fromNull(); + } + if ( value instanceof String ) { + return buildTypedValueFromObject( value, Types.VARCHAR ); + } + if ( value instanceof BigDecimal ) { + return buildTypedValueFromObject( value, Types.NUMERIC ); + } + if ( value instanceof Boolean ) { + return buildTypedValueFromObject( value, Types.BOOLEAN ); + } + if ( value instanceof Byte ) { + return buildTypedValueFromObject( value, Types.TINYINT ); + } + if ( value instanceof Short ) { + return buildTypedValueFromObject( value, Types.SMALLINT ); + } + if ( value instanceof Integer ) { + return buildTypedValueFromObject( value, Types.INTEGER ); + } + if ( value instanceof Long ) { + return buildTypedValueFromObject( value, Types.BIGINT ); + } + if ( value instanceof Float ) { + return buildTypedValueFromObject( value, Types.REAL ); + } + if ( value instanceof Double ) { + return buildTypedValueFromObject( value, Types.DOUBLE ); + } + if ( value instanceof byte[] ) { + return buildTypedValueFromObject( value, Types.BINARY ); + } + if ( value instanceof BigInteger ) { + //requires conversion + return buildTypedValueFromObject( value, Types.BIGINT ); + } + if ( value instanceof Date ) { + return buildTypedValueFromObject( value, Types.DATE ); + } + if ( value instanceof Time ) { + return buildTypedValueFromObject( value, Types.TIME ); + } + if ( value instanceof Timestamp ) { + return buildTypedValueFromObject( value, Types.TIMESTAMP ); + } + if ( value instanceof NClob ) { + // extends NClob + return buildTypedValueFromObject( value, Types.NCLOB ); + } + if ( value instanceof Clob ) { + return buildTypedValueFromObject( value, Types.CLOB ); + } + if ( value instanceof Blob ) { + return buildTypedValueFromObject( value, Types.BLOB ); + } + if ( value instanceof Array ) { + return buildTypedValueFromObject( value, Types.ARRAY ); + } + if ( value instanceof Struct ) { + return buildTypedValueFromObject( value, Types.STRUCT ); + } + if ( value instanceof Ref ) { + return buildTypedValueFromObject( value, Types.REF ); + } + if ( value instanceof URL ) { + return buildTypedValueFromObject( value, Types.DATALINK ); + } + if ( value instanceof RowId ) { + return buildTypedValueFromObject( value, Types.ROWID ); + } + if ( value instanceof SQLXML ) { + return buildTypedValueFromObject( value, Types.SQLXML ); + } + if ( value instanceof Calendar ) { + // requires conversion + return buildTypedValueFromObject( value, Types.TIMESTAMP ); + } + if ( value instanceof java.util.Date ) { + // requires conversion + return buildTypedValueFromObject( value, Types.TIMESTAMP ); + } + if ( value instanceof LocalDate ) { + // requires conversion + return buildTypedValueFromObject( value, Types.DATE ); + } + if ( value instanceof LocalTime ) { + // requires conversion + return buildTypedValueFromObject( value, Types.TIME ); + } + if ( value instanceof LocalDateTime ) { + //requires conversion + return buildTypedValueFromObject( value, Types.TIMESTAMP ); + } + if ( value instanceof OffsetTime ) { + return buildTypedValueFromObject( value, Types.TIME_WITH_TIMEZONE ); + } + if ( value instanceof OffsetDateTime ) { + return buildTypedValueFromObject( value, Types.TIMESTAMP_WITH_TIMEZONE ); + } + if ( value instanceof PolyInterval ) { + return TypedValue.fromInterval( (PolyInterval) value ); + } + if ( value instanceof PolyDocument ) { + return TypedValue.fromDocument( (PolyDocument) value ); + } + return buildTypedValueFromObject( value, Types.JAVA_OBJECT ); + } + + + public static TypedValue buildTypedValueFromObject( Object value, int targetSqlType ) throws ParseException, SQLException { + if ( value == null ) { + return TypedValue.fromNull(); + } + if ( value instanceof String ) { + return buildTypedValueFromString( (String) value, targetSqlType ); + } + if ( value instanceof BigDecimal ) { + return buildTypedValueFromBigDecimal( (BigDecimal) value, targetSqlType ); + } + if ( value instanceof Boolean ) { + return buildTypedValueFromBoolean( (Boolean) value, targetSqlType ); + } + if ( value instanceof Byte ) { + return buildTypedValueFromByte( (Byte) value, targetSqlType ); + } + if ( value instanceof Short ) { + return buildTypedValueFromShort( (Short) value, targetSqlType ); + } + if ( value instanceof Integer ) { + return buildTypedValueFromInteger( (Integer) value, targetSqlType ); + } + if ( value instanceof Long ) { + return buildTypedValueFromLong( (Long) value, targetSqlType ); + } + if ( value instanceof Float ) { + return buildTypedValueFromFloat( (Float) value, targetSqlType ); + } + if ( value instanceof Double ) { + return buildTypedValueFromDouble( (Double) value, targetSqlType ); + } + if ( value instanceof byte[] ) { + return buildTypedValueFromBytes( (byte[]) value, targetSqlType ); + } + if ( value instanceof BigInteger ) { + //requires conversion + return buildTypedValueFromBigInteger( (BigInteger) value, targetSqlType ); + } + if ( value instanceof Date ) { + return buildTypedValueFromDate( (Date) value, targetSqlType ); + } + if ( value instanceof Time ) { + return buildTypedValueFromTime( (Time) value, targetSqlType ); + } + if ( value instanceof Timestamp ) { + return buildTypedValueFromTimestamp( (Timestamp) value, targetSqlType ); + } + if ( value instanceof NClob ) { + // extends NClob + return buildTypedValueFromNClob( (NClob) value, targetSqlType ); + } + if ( value instanceof Clob ) { + return buildTypedValueFromClob( (Clob) value, targetSqlType ); + } + if ( value instanceof Blob ) { + return buildTypedValueFromBlob( (Blob) value, targetSqlType ); + } + if ( value instanceof Array ) { + return buildTypedValueFromArray( (Array) value, targetSqlType ); + } + if ( value instanceof Struct ) { + return buildTypedValueFromStruct( (Struct) value, targetSqlType ); + } + if ( value instanceof Ref ) { + return buildTypedValueFromRef( (Ref) value, targetSqlType ); + } + if ( value instanceof URL ) { + return buildTypedValueFromURL( (URL) value, targetSqlType ); + } + if ( value instanceof RowId ) { + return buildTypedValueFromRowId( (RowId) value, targetSqlType ); + } + if ( value instanceof SQLXML ) { + return buildTypedValueFromSQXML( (SQLXML) value, targetSqlType ); + } + if ( value instanceof Calendar ) { + // requires conversion + return buildTypedValueFromCalendar( (Calendar) value, targetSqlType ); + } + if ( value instanceof java.util.Date ) { + // requires conversion + return buildTypedValueFromDate( (java.util.Date) value, targetSqlType ); + } + if ( value instanceof LocalDate ) { + // requires conversion + return buildTypedValueFromLocalDate( (LocalDate) value, targetSqlType ); + } + if ( value instanceof LocalTime ) { + // requires conversion + return buildTypedValueFromLocalTime( (LocalTime) value, targetSqlType ); + } + if ( value instanceof LocalDateTime ) { + //requires conversion + return buildTypedValueFromLocalDateTime( (LocalDateTime) value, targetSqlType ); + } + if ( value instanceof OffsetTime ) { + return buildTypedValueFromOffsetTime( (OffsetTime) value, targetSqlType ); + } + if ( value instanceof OffsetDateTime ) { + return buildTypedValueFromOffsetDateTime( (OffsetDateTime) value, targetSqlType ); + } + if ( value instanceof PolyInterval ) { + return TypedValue.fromInterval( (PolyInterval) value ); + } + if ( value instanceof PolyDocument ) { + return TypedValue.fromDocument( (PolyDocument) value ); + } + return buildTypedValueFromJavaObject( value, targetSqlType ); + } + + + private static TypedValue buildTypedValueFromJavaObject( Object value, int targetSqlType ) throws ParseException, SQLException { + if ( targetSqlType != Types.JAVA_OBJECT ) { + throw new ParseException( "Can't parse Object as type " + targetSqlType, 0 ); + } + return TypedValue.fromObject( value ); + } + + + private static TypedValue buildTypedValueFromOffsetDateTime( OffsetDateTime value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TIMESTAMP_WITH_TIMEZONE: + return TypedValue.fromTimestamp( getTimestampFromOffsetDateTime( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse OffsetDateTime as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromOffsetTime( OffsetTime value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TIME_WITH_TIMEZONE: + return TypedValue.fromTime( getTimeFromOffsetTime( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse OffsetTime as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromLocalDateTime( LocalDateTime value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TIME: + return TypedValue.fromTime( Time.valueOf( value.toLocalTime() ) ); + case Types.DATE: + return TypedValue.fromDate( Date.valueOf( value.toLocalDate() ) ); + case Types.TIMESTAMP: + return TypedValue.fromTimestamp( Timestamp.valueOf( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse LocalDateTime as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromLocalTime( LocalTime value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TIME: + return TypedValue.fromTime( Time.valueOf( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse LocalTime as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromLocalDate( LocalDate value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.DATE: + return TypedValue.fromDate( Date.valueOf( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse LocalTime as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromDate( java.util.Date value, int targetSqlType ) throws ParseException, SQLFeatureNotSupportedException { + switch ( targetSqlType ) { + case Types.TIME: + return TypedValue.fromTime( new Time( value.getTime() ) ); + case Types.DATE: + return TypedValue.fromDate( new Date( value.getTime() ) ); + case Types.TIMESTAMP: + return TypedValue.fromTimestamp( new Timestamp( value.getTime() ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + case Types.ARRAY: + throw new SQLFeatureNotSupportedException( "Parsing of Date as an Array is not supported" ); + } + throw new ParseException( "Can't parse Date as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromCalendar( Calendar value, int targetSqlType ) throws ParseException, SQLFeatureNotSupportedException { + switch ( targetSqlType ) { + case Types.TIME: + return TypedValue.fromTime( new Time( value.getTimeInMillis() ) ); + case Types.DATE: + return TypedValue.fromDate( new Date( value.getTimeInMillis() ) ); + case Types.TIMESTAMP: + return TypedValue.fromTimestamp( new Timestamp( value.getTimeInMillis() ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + case Types.ARRAY: + throw new SQLFeatureNotSupportedException( "Parsing of Calendar as an Array is not supported" ); + } + throw new ParseException( "Can't parse Calendar as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromSQXML( SQLXML value, int targetSqlType ) throws ParseException, SQLException { + if ( targetSqlType != Types.SQLXML ) { + throw new ParseException( "Can't parse SQLXML as type " + targetSqlType, 0 ); + } + return TypedValue.fromSQLXML( value ); + } + + + private static TypedValue buildTypedValueFromRowId( RowId value, int targetSqlType ) throws ParseException, SQLFeatureNotSupportedException { + if ( targetSqlType != Types.ROWID ) { + throw new ParseException( "Can't parse RowId as type " + targetSqlType, 0 ); + } + return TypedValue.fromRowId( value ); + } + + + private static TypedValue buildTypedValueFromURL( URL value, int targetSqlType ) throws ParseException, SQLException { + if ( targetSqlType != Types.DATALINK ) { + throw new ParseException( "Can't parse URL as type " + targetSqlType, 0 ); + } + return TypedValue.fromUrl( value ); + } + + + private static TypedValue buildTypedValueFromRef( Ref value, int targetSqlType ) throws ParseException, SQLException { + if ( targetSqlType != Types.REF ) { + throw new ParseException( "Can't parse Ref as type " + targetSqlType, 0 ); + } + return TypedValue.fromRef( value ); + } + + + private static TypedValue buildTypedValueFromStruct( Struct value, int targetSqlType ) throws ParseException, SQLFeatureNotSupportedException { + if ( targetSqlType != Types.STRUCT ) { + throw new ParseException( "Can't parse Struct as type " + targetSqlType, 0 ); + } + return TypedValue.fromStruct( value ); + } + + + private static TypedValue buildTypedValueFromArray( Array value, int targetSqlType ) throws ParseException { + if ( targetSqlType != Types.ARRAY ) { + throw new ParseException( "Can't parse Array as type " + targetSqlType, 0 ); + } + return TypedValue.fromArray( value ); + } + + + private static TypedValue buildTypedValueFromBlob( Blob value, int targetSqlType ) throws ParseException { + if ( targetSqlType != Types.BLOB ) { + throw new ParseException( "Can't parse Blob as type " + targetSqlType, 0 ); + } + return TypedValue.fromBlob( value ); + } + + + private static TypedValue buildTypedValueFromClob( Clob value, int targetSqlType ) throws ParseException, SQLException { + if ( targetSqlType != Types.CLOB ) { + throw new ParseException( "Can't parse Clob as type " + targetSqlType, 0 ); + } + return TypedValue.fromClob( value ); + } + + + private static TypedValue buildTypedValueFromNClob( NClob value, int targetSqlType ) throws ParseException, SQLException { + if ( targetSqlType != Types.NCLOB ) { + throw new ParseException( "Can't parse NClob as type " + targetSqlType, 0 ); + } + return TypedValue.fromNClob( value ); + } + + + private static TypedValue buildTypedValueFromTimestamp( Timestamp value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TIME: + return TypedValue.fromTime( getTimeFromTimestamp( value ) ); + case Types.DATE: + return TypedValue.fromDate( getDateFromTimestamp( value ) ); + case Types.TIMESTAMP: + return TypedValue.fromTimestamp( value ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse Timestamp as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromTime( Time value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TIME: + return TypedValue.fromTime( value ); + case Types.TIMESTAMP: + return TypedValue.fromTimestamp( getTimestampFromTime( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( SQL_TIME_FORMAT.format( value ) ); + } + throw new ParseException( "Can't parse Time as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromDate( Date value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.DATE: + return TypedValue.fromDate( value ); + case Types.TIMESTAMP: + return TypedValue.fromTimestamp( getTimestampFromDate( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( SQL_DATE_FORMAT.format( value ) ); + } + throw new ParseException( "Can't parse Date as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromBigInteger( BigInteger value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.BIGINT: + return TypedValue.fromLong( value.longValue() ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse BigInteger as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromBytes( byte[] value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.BINARY: + case Types.VARBINARY: + case Types.LONGVARBINARY: + return TypedValue.fromBytes( value ); + } + throw new ParseException( "Can't parse byte[] as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromDouble( Double value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TINYINT: + return TypedValue.fromByte( value.byteValue() ); + case Types.SMALLINT: + return TypedValue.fromShort( value.shortValue() ); + case Types.INTEGER: + return TypedValue.fromInteger( value.intValue() ); + case Types.BIGINT: + return TypedValue.fromLong( value.longValue() ); + case Types.REAL: + return TypedValue.fromFloat( value.floatValue() ); + case Types.FLOAT: + case Types.DOUBLE: + // according to jdbc spec double should be used for jdbc float + return TypedValue.fromDouble( value ); + case Types.DECIMAL: + case Types.NUMERIC: + return TypedValue.fromBigDecimal( new BigDecimal( value ) ); + case Types.BIT: + case Types.BOOLEAN: + return TypedValue.fromBoolean( getBooleanFromNumber( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse Double as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromFloat( Float value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TINYINT: + return TypedValue.fromByte( value.byteValue() ); + case Types.SMALLINT: + return TypedValue.fromShort( value.shortValue() ); + case Types.INTEGER: + return TypedValue.fromInteger( value.intValue() ); + case Types.BIGINT: + return TypedValue.fromLong( value.longValue() ); + case Types.REAL: + return TypedValue.fromFloat( value ); + case Types.FLOAT: + case Types.DOUBLE: + // according to jdbc spec double should be used for jdbc float + return TypedValue.fromDouble( value ); + case Types.DECIMAL: + case Types.NUMERIC: + return TypedValue.fromBigDecimal( new BigDecimal( value ) ); + case Types.BIT: + case Types.BOOLEAN: + return TypedValue.fromBoolean( getBooleanFromNumber( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse Float as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromLong( Long value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TINYINT: + return TypedValue.fromByte( value.byteValue() ); + case Types.SMALLINT: + return TypedValue.fromShort( value.shortValue() ); + case Types.INTEGER: + return TypedValue.fromInteger( value.intValue() ); + case Types.BIGINT: + return TypedValue.fromLong( value ); + case Types.REAL: + return TypedValue.fromFloat( value ); + case Types.FLOAT: + case Types.DOUBLE: + // according to jdbc spec double should be used for jdbc float + return TypedValue.fromDouble( value ); + case Types.DECIMAL: + case Types.NUMERIC: + return TypedValue.fromBigDecimal( new BigDecimal( value ) ); + case Types.BIT: + case Types.BOOLEAN: + return TypedValue.fromBoolean( getBooleanFromNumber( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse Long as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromInteger( Integer value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TINYINT: + return TypedValue.fromByte( value.byteValue() ); + case Types.SMALLINT: + return TypedValue.fromShort( value.shortValue() ); + case Types.INTEGER: + return TypedValue.fromInteger( value ); + case Types.BIGINT: + return TypedValue.fromLong( value ); + case Types.REAL: + return TypedValue.fromFloat( value ); + case Types.FLOAT: + case Types.DOUBLE: + // according to jdbc spec double should be used for jdbc float + return TypedValue.fromDouble( value ); + case Types.DECIMAL: + case Types.NUMERIC: + return TypedValue.fromBigDecimal( new BigDecimal( value ) ); + case Types.BIT: + case Types.BOOLEAN: + return TypedValue.fromBoolean( getBooleanFromNumber( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse Integer as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromShort( Short value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TINYINT: + return TypedValue.fromByte( value.byteValue() ); + case Types.SMALLINT: + return TypedValue.fromShort( value ); + case Types.INTEGER: + return TypedValue.fromInteger( value ); + case Types.BIGINT: + return TypedValue.fromLong( value ); + case Types.REAL: + return TypedValue.fromFloat( value ); + case Types.FLOAT: + case Types.DOUBLE: + // according to jdbc spec double should be used for jdbc float + return TypedValue.fromDouble( value ); + case Types.DECIMAL: + case Types.NUMERIC: + return TypedValue.fromBigDecimal( new BigDecimal( value ) ); + case Types.BIT: + case Types.BOOLEAN: + return TypedValue.fromBoolean( getBooleanFromNumber( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse Short as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromByte( Byte value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TINYINT: + return TypedValue.fromByte( value ); + case Types.SMALLINT: + return TypedValue.fromShort( value ); + case Types.INTEGER: + return TypedValue.fromInteger( value ); + case Types.BIGINT: + return TypedValue.fromLong( value ); + case Types.REAL: + return TypedValue.fromFloat( value ); + case Types.FLOAT: + case Types.DOUBLE: + // according to jdbc spec double should be used for jdbc float + return TypedValue.fromDouble( value ); + case Types.DECIMAL: + case Types.NUMERIC: + return TypedValue.fromBigDecimal( new BigDecimal( value ) ); + case Types.BIT: + case Types.BOOLEAN: + return TypedValue.fromBoolean( getBooleanFromNumber( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse Byte as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromBoolean( Boolean value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TINYINT: + return TypedValue.fromByte( getNumberFromBoolean( value ).byteValue() ); + case Types.SMALLINT: + return TypedValue.fromShort( getNumberFromBoolean( value ).shortValue() ); + case Types.INTEGER: + return TypedValue.fromInteger( getNumberFromBoolean( value ).intValue() ); + case Types.BIGINT: + return TypedValue.fromLong( getNumberFromBoolean( value ).longValue() ); + case Types.REAL: + return TypedValue.fromFloat( getNumberFromBoolean( value ).floatValue() ); + case Types.FLOAT: + case Types.DOUBLE: + // according to jdbc spec double should be used for jdbc float + return TypedValue.fromDouble( getNumberFromBoolean( value ).doubleValue() ); + case Types.DECIMAL: + case Types.NUMERIC: + return TypedValue.fromBigDecimal( new BigDecimal( getNumberFromBoolean( value ).intValue() ) ); + case Types.BIT: + case Types.BOOLEAN: + return TypedValue.fromBoolean( value ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( getOneZeroStringFromBoolean( value ) ); + } + throw new ParseException( "Can't parse Boolean as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromBigDecimal( BigDecimal value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TINYINT: + return TypedValue.fromByte( value.byteValue() ); + case Types.SMALLINT: + return TypedValue.fromShort( value.shortValue() ); + case Types.INTEGER: + return TypedValue.fromInteger( value.intValue() ); + case Types.BIGINT: + return TypedValue.fromLong( value.longValue() ); + case Types.REAL: + return TypedValue.fromFloat( value.floatValue() ); + case Types.FLOAT: + case Types.DOUBLE: + // according to jdbc spec double should be used for jdbc float + return TypedValue.fromDouble( value.doubleValue() ); + case Types.DECIMAL: + case Types.NUMERIC: + return TypedValue.fromBigDecimal( value ); + case Types.BIT: + case Types.BOOLEAN: + return TypedValue.fromBoolean( value.intValue() != 0 ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + return TypedValue.fromString( value.toString() ); + } + throw new ParseException( "Can't parse BigDecimal as type " + targetSqlType, 0 ); + } + + + private static TypedValue buildTypedValueFromString( String value, int targetSqlType ) throws ParseException { + switch ( targetSqlType ) { + case Types.TINYINT: + return TypedValue.fromByte( Byte.parseByte( value ) ); + case Types.SMALLINT: + return TypedValue.fromShort( Short.parseShort( value ) ); + case Types.INTEGER: + return TypedValue.fromInteger( Integer.parseInt( value ) ); + case Types.BIGINT: + return TypedValue.fromLong( Long.parseLong( value ) ); + case Types.REAL: + return TypedValue.fromFloat( Float.parseFloat( value ) ); + case Types.FLOAT: + case Types.DOUBLE: + // according to jdbc spec double should be used for jdbc float + return TypedValue.fromDouble( Double.parseDouble( value ) ); + case Types.DECIMAL: + case Types.NUMERIC: + return TypedValue.fromBigDecimal( new BigDecimal( value ) ); + case Types.BIT: + case Types.BOOLEAN: + return TypedValue.fromBoolean( getBooleanFromString( value ) ); + case Types.CHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + case Types.NCHAR: + case Types.NVARCHAR: + case Types.LONGNVARCHAR: + return TypedValue.fromString( value ); + case Types.BINARY: + case Types.VARBINARY: + case Types.LONGVARBINARY: + return TypedValue.fromBytes( value.getBytes( StandardCharsets.UTF_8 ) ); + case Types.DATE: + return TypedValue.fromDate( getDateFromString( value ) ); + case Types.TIME: + return TypedValue.fromTime( getTimeFromString( value ) ); + case Types.TIMESTAMP: + return TypedValue.fromTimestamp( getTimestampFromString( value ) ); + } + throw new ParseException( "Can't parse String as type " + targetSqlType, 0 ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/utils/VersionUtil.java b/src/main/java/org/polypheny/jdbc/utils/VersionUtil.java new file mode 100644 index 00000000..f08188f9 --- /dev/null +++ b/src/main/java/org/polypheny/jdbc/utils/VersionUtil.java @@ -0,0 +1,77 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.utils; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; +import lombok.extern.slf4j.Slf4j; + +/** + * Utility class for accessing version information stored in a properties file generated by a gradle task. + */ +@Slf4j +public class VersionUtil { + + private static final String VERSION_FILE = "polypheny-jdbc-driver-version.properties"; + private static final String API_VERSION_PROPERTIES = "prism-api-version.properties"; + + public static final int MAJOR; + public static final int MINOR; + public static final String QUALIFIER; + public static final String BUILD_TIMESTAMP; + public static final String VERSION_STRING; + + public static final int MAJOR_API_VERSION; + public static final int MINOR_API_VERSION; + public static final String API_VERSION_STRING; + + + static { + Properties properties = new Properties(); + try ( InputStream inputStream = VersionUtil.class.getClassLoader().getResourceAsStream( VERSION_FILE ) ) { + if ( inputStream != null ) { + properties.load( inputStream ); + MAJOR = Integer.parseInt( properties.getProperty( "major" ) ); + MINOR = Integer.parseInt( properties.getProperty( "minor" ) ); + QUALIFIER = properties.getProperty( "qualifier" ); + BUILD_TIMESTAMP = properties.getProperty( "buildTimestamp" ); + VERSION_STRING = properties.getProperty( "version" ); + } else { + throw new FileNotFoundException( "The version properties could not be found." ); + } + } catch ( IOException e ) { + throw new RuntimeException( "Error loading version properties", e ); + } + + properties = new Properties(); + try ( InputStream inputStream = VersionUtil.class.getClassLoader().getResourceAsStream( API_VERSION_PROPERTIES ) ) { + if ( inputStream != null ) { + properties.load( inputStream ); + API_VERSION_STRING = properties.getProperty( "version" ); + MAJOR_API_VERSION = Integer.parseInt( properties.getProperty( "majorVersion" ) ); + MINOR_API_VERSION = Integer.parseInt( properties.getProperty( "minorVersion" ) ); + } else { + throw new FileNotFoundException( "The prism api version properties could not be found." ); + } + } catch ( IOException e ) { + throw new RuntimeException( "Error loading API version properties", e ); + } + } + +} diff --git a/src/main/resources/META-INF/services/java.sql.Driver b/src/main/resources/META-INF/services/java.sql.Driver index 2050010b..dca3b31f 100644 --- a/src/main/resources/META-INF/services/java.sql.Driver +++ b/src/main/resources/META-INF/services/java.sql.Driver @@ -1 +1 @@ -org.polypheny.jdbc.Driver +org.polypheny.jdbc.PolyphenyDriver diff --git a/src/test/java/org/polypheny/jdbc/BlockingTest.java b/src/test/java/org/polypheny/jdbc/BlockingTest.java new file mode 100644 index 00000000..fd7444f0 --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/BlockingTest.java @@ -0,0 +1,62 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.jupiter.api.Test; + +public class BlockingTest { + + Connection con; + + + @Test + void connectAndDisconnect() throws SQLException { + con = DriverManager.getConnection( "jdbc:polypheny://127.0.0.1:20590", "pa", "" ); + con.close(); + } + + + @Test + void execAndDisconnect() throws SQLException { + con = DriverManager.getConnection( "jdbc:polypheny://127.0.0.1:20590", "pa", "" ); + try ( Statement s = con.createStatement() ) { + s.execute( "DROP TABLE IF EXISTS t" ); + s.execute( "CREATE TABLE t(id INTEGER PRIMARY KEY, a INTEGER NOT NULL)" ); + s.execute( "INSERT INTO t(id, a) VALUES (1, 1), (2, 2), (3, 3)" ); + s.execute( "DROP TABLE IF EXISTS t" ); + } + con.close(); + } + + + @Test + void failAndDisconnect() throws SQLException { + con = DriverManager.getConnection( "jdbc:polypheny://127.0.0.1:20590", "pa", "" ); + try ( Statement s = con.createStatement() ) { + s.execute( "DROP TABLE IF EXISTS t" ); + assertThrows( SQLException.class, () -> s.execute( "INSERT INTO t(id, a) VALUES (1, 1), (2, 2), (3, 3)" ) ); + } + con.close(); + } + +} diff --git a/src/test/java/org/polypheny/jdbc/ConnectionStringTest.java b/src/test/java/org/polypheny/jdbc/ConnectionStringTest.java new file mode 100644 index 00000000..de0f1088 --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/ConnectionStringTest.java @@ -0,0 +1,285 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.sql.SQLException; +import java.util.HashMap; +import java.util.Properties; +import org.junit.jupiter.api.Test; +import org.polypheny.jdbc.properties.PropertyUtils; + +public class ConnectionStringTest { + + @Test() + public void connectionString_String__null() { + assertThrows( SQLException.class, () -> new ConnectionString( null ) ); + } + + + @Test() + public void connectionString_String__Empty() { + assertThrows( SQLException.class, () -> new ConnectionString( "" ) ); + } + + + @Test() + public void connectionString_String__NoJdbcSchema() { + final String url = "polypheny://username:password@host:20569/database?k1=v1&k2=v2"; + assertThrows( SQLException.class, () -> new ConnectionString( url ) ); + } + + + @Test() + public void connectionString_String__NoPolyphenySubSchema() { + final String url = "jdbc://username:password@host:20569/database?k1=v1&k2=v2"; + assertThrows( SQLException.class, () -> new ConnectionString( url ) ); + } + + + @Test() + public void connectionString_String__WrongSubSchema() throws Exception { + final String url = "jdbc:foo://username:password@host:20569/database?k1=v1&k2=v2"; + assertThrows( SQLException.class, () -> new ConnectionString( url ) ); + } + + + @Test + public void connectionString_String__MissingCredentials() throws Exception { + final String expectedTarget = "host:20590"; + + final String url = "jdbc:polypheny://host:20590"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expectedTarget, cs.getTarget() ); + } + + + @Test + public void connectionString_String__MissingPortTrailingSlash() throws Exception { + final String expectedTarget = "host:20590"; + + final String url = "jdbc:polypheny://host/"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expectedTarget, cs.getTarget() ); + } + + + @Test + public void connectionString_String__NoPassword() throws Exception { + final String expectedUsername = "username"; + + final String url = "jdbc:polypheny://username@localhost:20569"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expectedUsername, cs.getUser() ); + } + + + @Test + public void connectionString_String__AcceptableUrl() throws Exception { + final HashMap expected = new HashMap<>(); + expected.put( PropertyUtils.getUSERNAME_KEY(), "username" ); + expected.put( PropertyUtils.getPASSWORD_KEY(), "password" ); + expected.put( PropertyUtils.getNAMESPACE_KEY(), "namespace" ); + expected.put( "k1", "v1" ); + expected.put( "k2", "v2" ); + final String expectedTarget = "localhost:20569"; + + final String url = "jdbc:polypheny://username:password@localhost:20569/namespace?k1=v1&k2=v2"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expected, cs.getParameters() ); + assertEquals( expectedTarget, cs.getTarget() ); + } + + + @Test + public void connectionString_String__AcceptableUrlNoNamespace() throws Exception { + final HashMap expected = new HashMap<>(); + expected.put( PropertyUtils.getUSERNAME_KEY(), "username" ); + expected.put( PropertyUtils.getPASSWORD_KEY(), "password" ); + expected.put( "k1", "v1" ); + expected.put( "k2", "v2" ); + final String expectedTarget = "localhost:20569"; + + final String url = "jdbc:polypheny://username:password@localhost:20569?k1=v1&k2=v2"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expected, cs.getParameters() ); + assertEquals( expectedTarget, cs.getTarget() ); + } + + + @Test + public void connectionString_String__ColonInPassword() throws Exception { + final HashMap expected = new HashMap<>(); + expected.put( PropertyUtils.getUSERNAME_KEY(), "username" ); + expected.put( PropertyUtils.getPASSWORD_KEY(), "pass:word" ); + expected.put( PropertyUtils.getNAMESPACE_KEY(), "namespace" ); + expected.put( "k1", "v1" ); + expected.put( "k2", "v2" ); + final String expectedTarget = "localhost:20569"; + + final String url = "jdbc:polypheny://username:pass:word@localhost:20569/namespace?k1=v1&k2=v2"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expected, cs.getParameters() ); + assertEquals( expectedTarget, cs.getTarget() ); + } + + + @Test() + public void connectionString_String__MissingValue() { + final String url = "jdbc:polypheny://username:pass:word@localhost:20569/namespace?k1=v1&k2"; + assertThrows( SQLException.class, () -> new ConnectionString( url ) ); + } + + + @Test() + public void connectionString_String__MisplacedAt() { + final String url = "jdbc:polypheny://username:password@localhost:20569/namespace?k1@v1&k2"; + assertThrows( SQLException.class, () -> new ConnectionString( url ) ); + } + + + @Test() + public void connectionString_String__MisplacedAt2() throws Exception { + final String url = "jdbc:polypheny://username@password:localhost:20569/namespace?k1@v1&k2"; + assertThrows( SQLException.class, () -> new ConnectionString( url ) ); + } + + + @Test + public void connectionString_String__AcceptableNewStyleUrlHttp() throws SQLException { + final HashMap expected = new HashMap<>(); + expected.put( PropertyUtils.getUSERNAME_KEY(), "username" ); + expected.put( PropertyUtils.getPASSWORD_KEY(), "password" ); + expected.put( PropertyUtils.getNAMESPACE_KEY(), "namespace" ); + expected.put( "k1", "v1" ); + expected.put( "k2", "v2" ); + final String expectedTarget = "localhost:20569"; + + final String url = "jdbc:polypheny:http://username:password@localhost:20569/namespace?k1=v1&k2=v2"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expected, cs.getParameters() ); + assertEquals( expectedTarget, cs.getTarget() ); + } + + + @Test + public void connectionString_String__AcceptableNewStyleUrlHttps() throws SQLException { + final HashMap expected = new HashMap<>(); + expected.put( PropertyUtils.getUSERNAME_KEY(), "username" ); + expected.put( PropertyUtils.getPASSWORD_KEY(), "password" ); + expected.put( PropertyUtils.getNAMESPACE_KEY(), "namespace" ); + expected.put( "k1", "v1" ); + expected.put( "k2", "v2" ); + final String expectedTarget = "localhost:20569"; + + final String url = "jdbc:polypheny:https://username:password@localhost:20569/namespace?k1=v1&k2=v2"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expected, cs.getParameters() ); + assertEquals( expectedTarget, cs.getTarget() ); + } + + + @Test + public void connectionString_String__SchemaOnly() throws Exception { + final HashMap expected = new HashMap<>(); + final String target = PropertyUtils.getDEFAULT_HOST() + ":" + PropertyUtils.getDEFAULT_PORT(); + + final String url = "jdbc:polypheny://"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expected, cs.getParameters() ); + assertEquals( target, cs.getTarget() ); + } + + + @Test + public void connectionString_String__NoPort() throws Exception { + final String expectedTarget = "host:" + PropertyUtils.getDEFAULT_PORT(); + final String url = "jdbc:polypheny://username:password@host/database?k1=v1&k2=v2"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expectedTarget, cs.getTarget() ); + } + + + @Test + public void connectionString_String_Parameters__null() throws Exception { + final HashMap expected = new HashMap<>(); + expected.put( PropertyUtils.getUSERNAME_KEY(), "username" ); + expected.put( PropertyUtils.getPASSWORD_KEY(), "password" ); + expected.put( PropertyUtils.getNAMESPACE_KEY(), "namespace" ); + expected.put( "k1", "v1" ); + expected.put( "k2", "v2" ); + + final String url = "jdbc:polypheny://username:password@localhost:20569/namespace?k1=v1&k2=v2"; + final ConnectionString cs = new ConnectionString( url, null ); + + assertEquals( expected, cs.getParameters() ); + } + + + @Test + public void connectionString_String_Parameters__ImportAndOverwrite() throws SQLException { + final HashMap expected = new HashMap<>(); + expected.put( PropertyUtils.getUSERNAME_KEY(), "urlUsername" ); + expected.put( PropertyUtils.getPASSWORD_KEY(), "urlPassword" ); + expected.put( PropertyUtils.getNAMESPACE_KEY(), "mapNamespace" ); + expected.put( "k1", "v1" ); + expected.put( "k2", "v2" ); + expected.put( "k3", "v3" ); + expected.put( "k4", "v4" ); + + final Properties properties = new Properties(); + properties.setProperty( PropertyUtils.getUSERNAME_KEY(), "mapUsername" ); + properties.setProperty( PropertyUtils.getPASSWORD_KEY(), "mapPassword" ); + properties.setProperty( PropertyUtils.getNAMESPACE_KEY(), "mapNamespace" ); + properties.setProperty( "k1", "v1" ); + properties.setProperty( "k2", "v2" ); + + final String url = "jdbc:polypheny://urlUsername:urlPassword@localhost:20569/?k3=v3&k4=v4"; + final ConnectionString cs = new ConnectionString( url, properties ); + + assertEquals( expected, cs.getParameters() ); + } + + + @Test + public void connectionString_String__Ipv6() throws SQLException { + final String expectedTarget = "[7ed0:1b1d:0058:6765:253f:2f64:6406:063c]:12345"; + final String ip = "[7ed0:1b1d:0058:6765:253f:2f64:6406:063c]"; + final int port = 12345; + + final String url = "jdbc:polypheny://username:password@[7ed0:1b1d:0058:6765:253f:2f64:6406:063c]:12345/database?k1=v1&k2=v2"; + final ConnectionString cs = new ConnectionString( url ); + + assertEquals( expectedTarget, cs.getTarget() ); + assertEquals( ip, cs.getHost() ); + assertEquals( port, cs.getPort() ); + } + +} diff --git a/src/test/java/org/polypheny/jdbc/ConnectionTest.java b/src/test/java/org/polypheny/jdbc/ConnectionTest.java new file mode 100644 index 00000000..a36283e1 --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/ConnectionTest.java @@ -0,0 +1,151 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Statement; +import java.util.Properties; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class ConnectionTest { + + Connection con; + + + @BeforeEach + void createConnection() throws SQLException { + con = DriverManager.getConnection( "jdbc:polypheny://127.0.0.1:20590", "pa", "" ); + try ( Statement statement = con.createStatement() ) { + statement.execute( "DROP TABLE IF EXISTS t" ); + statement.execute( "CREATE TABLE t(id INTEGER PRIMARY KEY, a INTEGER NOT NULL)" ); + } + } + + + @AfterEach + void closeConnection() throws SQLException { + con.close(); + } + + + @Test + void testCommit() throws SQLException { + con.setAutoCommit( false ); + try ( Statement statement = con.createStatement() ) { + statement.execute( "INSERT INTO t(id, a) VALUES (1, 1), (2, 2), (3, 3)" ); + con.commit(); + ResultSet resultSet = statement.executeQuery( "SELECT * FROM t" ); + int count = 0; + while ( resultSet.next() ) { + count++; + } + assertEquals( count, 3 ); + } + } + + + @Test + void testRollback() throws SQLException { + con.setAutoCommit( false ); + try ( Statement statement = con.createStatement() ) { + statement.execute( "INSERT INTO t(id, a) VALUES (1, 1), (2, 2), (3, 3)" ); + con.rollback(); + ResultSet resultSet = statement.executeQuery( "SELECT * FROM t" ); + assertFalse( resultSet.next() ); + } + } + + + @Test + void testCloseWithOpenStatements() throws SQLException { + try ( Statement statement = con.createStatement() ) { + con.close(); + assertTrue( statement.isClosed() ); + } + } + + + @Test + void testCheckConnection() throws SQLException { + con.isValid( 0 ); + assertThrows( SQLException.class, () -> con.isValid( -1 ) ); + } + + + @Test + void testClientProperties() throws SQLException { + Properties info = con.getClientInfo(); + con.setClientInfo( info ); + } + + + @Test + void testMetaData() throws SQLException { + DatabaseMetaData meta = con.getMetaData(); + meta.getURL(); + meta.getDatabaseProductName(); + meta.getCatalogs(); + meta.getTableTypes(); + meta.getTypeInfo(); + meta.getColumns( "public", ".*", ".*", ".*" ); + meta.getStringFunctions(); + meta.getSystemFunctions(); + meta.getTimeDateFunctions(); + meta.getNumericFunctions(); + meta.getSQLKeywords(); + } + + + @Test + void testMetaDataNotStrict() throws SQLException { + try ( Connection con = DriverManager.getConnection( "jdbc:polypheny://127.0.0.1:20590?strict=false", "pa", "" ) ) { + DatabaseMetaData meta = con.getMetaData(); + meta.getProcedures( "public", ".*", ".*" ); + meta.getFunctions( "public", ".*", ".*" ); + meta.getSchemas( "public", ".*" ); + } + } + + + @Test + void testUnimplemented() throws SQLException { + DatabaseMetaData meta = con.getMetaData(); + assertThrows( SQLFeatureNotSupportedException.class, meta::getClientInfoProperties ); + assertThrows( SQLFeatureNotSupportedException.class, () -> meta.getUDTs( "public", ".*", ".*", null ) ); + } + + + @Test + void properShutdown() throws SQLException { + // This test requires that there is only one active Driver instance (otherwise we will pick up the Thread name of another Connection) + con.close(); + assertFalse( Thread.getAllStackTraces().keySet().stream().map( Thread::getName ).anyMatch( n -> n.equals( "PrismInterfaceResponseHandler" ) ) ); + } + +} diff --git a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcConnection.java b/src/test/java/org/polypheny/jdbc/DriverPropertyInfoTest.java similarity index 63% rename from src/main/java/org/polypheny/jdbc/PolyphenyJdbcConnection.java rename to src/test/java/org/polypheny/jdbc/DriverPropertyInfoTest.java index 85d151fc..5d798252 100644 --- a/src/main/java/org/polypheny/jdbc/PolyphenyJdbcConnection.java +++ b/src/test/java/org/polypheny/jdbc/DriverPropertyInfoTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2020 The Polypheny Project + * Copyright 2019-2024 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,7 +16,15 @@ package org.polypheny.jdbc; +import java.sql.SQLException; +import org.junit.jupiter.api.Test; -public interface PolyphenyJdbcConnection extends java.sql.Connection { +public class DriverPropertyInfoTest { + + @Test + public void propertyInfo_defaultValues() throws SQLException { + String url = "jdbc:polypheny://host:20590"; + new PolyphenyDriver().getPropertyInfo( url, null ); + } } diff --git a/src/test/java/org/polypheny/jdbc/DriverTest.java b/src/test/java/org/polypheny/jdbc/DriverTest.java deleted file mode 100644 index 1f9db639..00000000 --- a/src/test/java/org/polypheny/jdbc/DriverTest.java +++ /dev/null @@ -1,537 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.jdbc; - - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.sql.Connection; -import java.sql.SQLException; -import java.util.Collections; -import java.util.Properties; -import org.apache.calcite.avatica.remote.Driver.Serialization; -import org.apache.calcite.avatica.remote.MockJsonService; -import org.apache.calcite.avatica.remote.MockProtobufService; -import org.apache.calcite.avatica.server.AvaticaJsonHandler; -import org.apache.calcite.avatica.server.AvaticaProtobufHandler; -import org.apache.calcite.avatica.server.HttpServer; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; - - -/** - * - */ -public class DriverTest { - - - private static final Driver DRIVER = new Driver(); - - - @BeforeClass - public static void setUpClass() { - } - - - @AfterClass - public static void tearDownClass() { - } - - - @Before - public void setUp() { - } - - - @After - public void tearDown() { - } - - - @Test - public void acceptsURL_String__CorrectDriverSchema() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( Driver.DRIVER_URL_SCHEMA ); - - assertEquals( expected, actual ); - } - - - @Test(expected = SQLException.class) - public void acceptsURL_null() throws Exception { - final boolean actual = DRIVER.acceptsURL( null ); - fail( "No SQLException thrown" ); - } - - - @Test - public void acceptsURL_EmptyString() throws Exception { - final boolean expected = false; - final boolean actual = DRIVER.acceptsURL( "" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__NoJdbcSchema() throws Exception { - final boolean expected = false; - final boolean actual = DRIVER.acceptsURL( "polypheny://username:password@host:20569/database?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__NoPolyphenySubSchema() throws Exception { - final boolean expected = false; - final boolean actual = DRIVER.acceptsURL( "jdbc://username:password@host:20569/database?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__WrongSubSchema() throws Exception { - final boolean expected = false; - final boolean actual = DRIVER.acceptsURL( "jdbc:foo://username:password@host:20569/database?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__AcceptableUrl() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@host:20569/database?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__AcceptableUrl_NewUrlStyle() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny:http://username:password@host:20569/database?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__AcceptableUrl_NewUrlStyleHttps() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny:https://username:password@host:20569/database?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__AcceptableUrlNoPassword() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username@host:20569/database?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__AcceptableUrlNoParameters() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@host:20569/database" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__AcceptableUrlNoDatabase() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@host:20569?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__AcceptableUrlNoPort() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@host/database?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__AcceptableUrlNoHost() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@:20569/database?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__AcceptableUrlNoUsernamePassword() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://host:20569/database?k1=v1&k2=v2" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__AcceptableUrlDefaultsOnly() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny:///" ); - - assertEquals( expected, actual ); - } - - - @Test - public void acceptsURL_String__MalformedParameter() throws Exception { - final boolean expected = true; - final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@host:20569/database?k1=v1&k2" ); // k2 is ignored! - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__AcceptableUrl() throws Exception { - final Properties expected = new Properties(); - expected.setProperty( Driver.PROPERTY_USERNAME_KEY, "username" ); - expected.setProperty( Driver.PROPERTY_PASSWORD_KEY, "password" ); - expected.setProperty( Driver.PROPERTY_HOST_KEY, "localhost" ); - expected.setProperty( Driver.PROPERTY_PORT_KEY, "20569" ); - expected.setProperty( Driver.PROPERTY_DATABASE_KEY, "database" ); - expected.setProperty( "k1", "v1" ); - expected.setProperty( "k2", "v2" ); - expected.setProperty( Driver.PROPERTY_URL_KEY, "http://localhost:20569/" ); - expected.setProperty( Driver.PROPERTY_SERIALIZATION, Driver.DEFAULT_SERIALIZATION ); - - final Properties actual = DRIVER.parseUrl( "jdbc:polypheny://username:password@localhost:20569/database?k1=v1&k2=v2", null ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__AcceptableUrlNewUrlStyleHttp() throws Exception { - final Properties expected = new Properties(); - expected.setProperty( Driver.PROPERTY_USERNAME_KEY, "username" ); - expected.setProperty( Driver.PROPERTY_PASSWORD_KEY, "password" ); - expected.setProperty( Driver.PROPERTY_HOST_KEY, "localhost" ); - expected.setProperty( Driver.PROPERTY_PORT_KEY, "20569" ); - expected.setProperty( Driver.PROPERTY_DATABASE_KEY, "database" ); - expected.setProperty( "k1", "v1" ); - expected.setProperty( "k2", "v2" ); - expected.setProperty( Driver.PROPERTY_URL_KEY, "http://localhost:20569/" ); - expected.setProperty( Driver.PROPERTY_SERIALIZATION, Driver.DEFAULT_SERIALIZATION ); - - final Properties actual = DRIVER.parseUrl( "jdbc:polypheny:http://username:password@localhost:20569/database?k1=v1&k2=v2", null ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__AcceptableUrlNewUrlStyleHttps() throws Exception { - final Properties expected = new Properties(); - expected.setProperty( Driver.PROPERTY_USERNAME_KEY, "username" ); - expected.setProperty( Driver.PROPERTY_PASSWORD_KEY, "password" ); - expected.setProperty( Driver.PROPERTY_HOST_KEY, "localhost" ); - expected.setProperty( Driver.PROPERTY_PORT_KEY, "20569" ); - expected.setProperty( Driver.PROPERTY_DATABASE_KEY, "database" ); - expected.setProperty( "k1", "v1" ); - expected.setProperty( "k2", "v2" ); - expected.setProperty( Driver.PROPERTY_URL_KEY, "https://localhost:20569/" ); - expected.setProperty( Driver.PROPERTY_SERIALIZATION, Driver.DEFAULT_SERIALIZATION ); - - final Properties actual = DRIVER.parseUrl( "jdbc:polypheny:https://username:password@localhost:20569/database?k1=v1&k2=v2", null ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__AcceptableUrlNoHost() throws Exception { - final String expected = "localhost"; - final String actual = DRIVER - .parseUrl( "jdbc:polypheny://username:password@:20569/database?k1=v1&k2=v2", null ) - .getProperty( Driver.PROPERTY_HOST_KEY ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__AcceptableUrl_AllDefaults() throws Exception { - final Properties expected = new Properties(); - expected.setProperty( Driver.PROPERTY_HOST_KEY, Driver.DEFAULT_HOST ); - expected.setProperty( Driver.PROPERTY_PORT_KEY, Integer.toString( Driver.DEFAULT_PORT ) ); - expected.setProperty( Driver.PROPERTY_URL_KEY, Driver.DEFAULT_TRANSPORT_SCHEMA + "//" + Driver.DEFAULT_HOST + ":" + Driver.DEFAULT_PORT + "/" ); - expected.setProperty( Driver.PROPERTY_SERIALIZATION, Driver.DEFAULT_SERIALIZATION ); - - final Properties actual = DRIVER.parseUrl( "jdbc:polypheny://", null ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__AcceptableUrl_AllDefaults_JsonWireProtocol() throws Exception { - final String expected = Serialization.JSON.name(); - final String actual = DRIVER.parseUrl( "jdbc:polypheny://" + "?" + Driver.PROPERTY_SERIALIZATION + "=json", null ).getProperty( Driver.PROPERTY_SERIALIZATION ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__AcceptableUrl_AllDefaults_ProtobufWireProtocol() throws Exception { - final String expected = Serialization.PROTOBUF.name(); - final String actual = DRIVER.parseUrl( "jdbc:polypheny://" + "?" + Driver.PROPERTY_SERIALIZATION + "=protobuf", null ).getProperty( Driver.PROPERTY_SERIALIZATION ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__AcceptableUrl_AllDefaults_ProtobufWireProtocol2() throws Exception { - final String expected = Serialization.PROTOBUF.name(); - final String actual = DRIVER.parseUrl( "jdbc:polypheny://" + "?" + Driver.PROPERTY_SERIALIZATION + "=proto", null ).getProperty( Driver.PROPERTY_SERIALIZATION ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__AcceptableUrl_AllDefaults_ProtobufWireProtocol3() throws Exception { - final String expected = Serialization.PROTOBUF.name(); - final String actual = DRIVER.parseUrl( "jdbc:polypheny://" + "?" + Driver.PROPERTY_SERIALIZATION + "=proto3", null ).getProperty( Driver.PROPERTY_SERIALIZATION ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String__AcceptableUrlNoPort() throws Exception { - final int expected = Driver.DEFAULT_PORT; - final Properties connectionProperties = DRIVER.parseUrl( "jdbc:polypheny://username:password@host/database?k1=v1&k2=v2", new Properties() ); - final int actual = Integer.parseInt( connectionProperties.getProperty( "port" ) ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_Properties__AcceptableUrl_OverrideHost() throws Exception { - final String expected = "someother-host"; - final Properties info = new Properties(); - info.setProperty( Driver.PROPERTY_HOST_KEY, expected ); - - final String actual = DRIVER - .parseUrl( "jdbc:polypheny://username:password@localhost:20569/database?k1=v1&k2=v2", info ) - .getProperty( Driver.PROPERTY_HOST_KEY ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__AcceptableUrl_SetViaUrlParam() throws Exception { - final Properties expected = new Properties(); - expected.setProperty( Driver.PROPERTY_USERNAME_KEY, "username" ); - expected.setProperty( Driver.PROPERTY_PASSWORD_KEY, "secret" ); - expected.setProperty( Driver.PROPERTY_HOST_KEY, "localhost" ); - expected.setProperty( Driver.PROPERTY_PORT_KEY, "20569" ); - expected.setProperty( Driver.PROPERTY_DATABASE_KEY, "database" ); - expected.setProperty( "k1", "v1" ); - expected.setProperty( "k2", "v2" ); - expected.setProperty( Driver.PROPERTY_URL_KEY, "http://localhost:20569/" ); - expected.setProperty( Driver.PROPERTY_SERIALIZATION, Driver.DEFAULT_SERIALIZATION ); - - final Properties actual = DRIVER.parseUrl( "jdbc:polypheny://username@localhost:20569/database?k1=v1&k2=v2&" + Driver.PROPERTY_PASSWORD_KEY + "=secret", null ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__DeprecatedParameterKeys() throws Exception { - final Properties expected = new Properties(); - expected.setProperty( Driver.PROPERTY_HOST_KEY, Driver.DEFAULT_HOST ); - expected.setProperty( Driver.PROPERTY_PORT_KEY, Integer.toString( Driver.DEFAULT_PORT ) ); - expected.setProperty( Driver.PROPERTY_URL_KEY, Driver.DEFAULT_URL ); - - expected.setProperty( "wire_protocol", "wire_protocol" ); - expected.setProperty( Driver.PROPERTY_SERIALIZATION, "WIRE_PROTOCOL" ); - - final Properties actual = DRIVER.parseUrl( "jdbc:polypheny:///?" - + "wire_protocol=wire_protocol" - , null ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__DeprecatedParameterKeysOverwrite() throws Exception { - final Properties expected = new Properties(); - expected.setProperty( Driver.PROPERTY_HOST_KEY, Driver.DEFAULT_HOST ); - expected.setProperty( Driver.PROPERTY_PORT_KEY, Integer.toString( Driver.DEFAULT_PORT ) ); - expected.setProperty( Driver.PROPERTY_URL_KEY, Driver.DEFAULT_URL ); - - expected.setProperty( "wire_protocol", "wire_protocol" ); - expected.setProperty( Driver.PROPERTY_SERIALIZATION, "SERIALIZATION" ); - - final Properties actual = DRIVER.parseUrl( "jdbc:polypheny:///?" - + "wire_protocol=wire_protocol" + "&" + "serialization=serialization" - , null ); - - assertEquals( expected, actual ); - } - - - @Test - public void parseUrl_String_null__DeprecatedParameterKeysOverwrite2() throws Exception { - final Properties expected = new Properties(); - expected.setProperty( Driver.PROPERTY_HOST_KEY, Driver.DEFAULT_HOST ); - expected.setProperty( Driver.PROPERTY_PORT_KEY, Integer.toString( Driver.DEFAULT_PORT ) ); - expected.setProperty( Driver.PROPERTY_URL_KEY, Driver.DEFAULT_URL ); - - expected.setProperty( "wire_protocol", "wire_protocol" ); - expected.setProperty( Driver.PROPERTY_SERIALIZATION, "SERIALIZATION" ); - - final Properties actual = DRIVER.parseUrl( "jdbc:polypheny:///?" - + "serialization=serialization" + "&" + "wire_protocol=wire_protocol" - , null ); - - assertEquals( expected, actual ); - } - - - @Test(expected = SQLException.class) - public void connect_null_null() throws Exception { - DRIVER.connect( null, null ); - fail( "No SQLException thrown" ); - } - - - @Test - public void connect_EmptyString_null() throws Exception { - final Connection expected = null; - final Connection actual = DRIVER.connect( "", null ); - - assertEquals( expected, actual ); - } - - - @Test - public void connect_String_null__WrongSchema() throws Exception { - final Connection expected = null; - final Connection actual = DRIVER.connect( "foo:polypheny://username:password@localhost:20569/database?k1=v1&k2=v2", null ); - - assertEquals( expected, actual ); - } - - - @Test - public void connect_String_null__WrongSubSchema() throws Exception { - final Connection expected = null; - final Connection actual = DRIVER.connect( "jdbc:foo://username:password@localhost:20569/database?k1=v1&k2=v2", null ); - - assertEquals( expected, actual ); - } - - - @Test - public void connect_String_null__ProtobufWireProtocol() throws Exception { - final int port = Driver.DEFAULT_PORT; - - HttpServer server = null; - try { - server = new HttpServer( port, new AvaticaProtobufHandler( new MockProtobufService( "" ) { - @Override - public Response _apply( Request request ) { - if ( request instanceof OpenConnectionRequest ) { - /* - * The connectionId sent by the driver is randomly generated and cannot be known upfront, i.e., when the service is created. - * Therefore, we have to intercept here. - */ - return new OpenConnectionResponse(); - } - return super._apply( request ); - } - } ) ); - server.start(); - - final Connection actual = DRIVER.connect( "jdbc:polypheny://localhost:" + port + "/?" + Driver.PROPERTY_SERIALIZATION + "=" + "protobuf", null ); - actual.close(); - - assertTrue( true ); // No exception --> pass - } finally { - if ( server != null ) { - server.stop(); - } - } - } - - - @Test - public void connect_String_null__JsonWireProtocol() throws Exception { - final int port = Driver.DEFAULT_PORT; - - HttpServer server = null; - try { - server = new HttpServer( port, new AvaticaJsonHandler( new MockJsonService( Collections.emptyMap() ) { - @Override - public String apply( String request ) { - /* - * The connectionId sent by the driver is randomly generated and cannot be known upfront, i.e., when the service is created. - * Therefore, we have to intercept here, check the requests and send responses back on our own - */ - if ( request.startsWith( "{\"request\":\"openConnection\",\"connectionId\":\"" ) ) { - return "{\"response\":\"openConnection\"}"; - } - if ( request.startsWith( "{\"request\":\"closeConnection\",\"connectionId\":\"" ) ) { - return "{\"response\":\"closeConnection\"}"; - } - throw new RuntimeException( "No response for " + request ); - } - } ) ); - server.start(); - - final Connection actual = DRIVER.connect( "jdbc:polypheny://localhost:" + port + "/?" + Driver.PROPERTY_SERIALIZATION + "=" + "json", null ); - actual.close(); - - assertTrue( true ); // No exception --> pass - } finally { - if ( server != null ) { - server.stop(); - } - } - } -} diff --git a/src/test/java/org/polypheny/jdbc/MetaScrollerTest.java b/src/test/java/org/polypheny/jdbc/MetaScrollerTest.java new file mode 100644 index 00000000..7e760a88 --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/MetaScrollerTest.java @@ -0,0 +1,555 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import org.junit.jupiter.api.Test; +import org.polypheny.jdbc.meta.MetaScroller; + +public class MetaScrollerTest { + + private static final List TEST_DATA_EMPTY = Collections.emptyList(); + private static final List TEST_DATA_FOUR = new ArrayList<>(); + + + static { + TEST_DATA_FOUR.add( 1 ); + TEST_DATA_FOUR.add( 2 ); + TEST_DATA_FOUR.add( 3 ); + TEST_DATA_FOUR.add( 4 ); + } + + + @Test + public void indexInitPosition__empty_beforeFirst() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertTrue( scroller.isBeforeFirst() ); + assertFalse( scroller.hasCurrent() ); + } + + + @Test + public void indexInitPosition__empty_notFirst() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertFalse( scroller.isFirst() ); + } + + + @Test + public void indexInitPosition__empty_notAfterLastFirst() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertTrue( scroller.isAfterLast() ); + } + + + @Test + public void indexInitPosition__empty_notLast() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertTrue( scroller.isLast() ); + } + + + @Test + public void getRowInit__empty_0() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void currentInit__empty_null() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertNull( scroller.current() ); + } + + + @Test + public void next__empty_false() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertFalse( scroller.next() ); + } + + + @Test + public void nextGetRow__empty_0() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void nextCurrent__empty_null() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertNull( scroller.current() ); + } + + + @Test + public void previous__empty_false() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertFalse( scroller.previous() ); + } + + + @Test + public void previousGetRow__empty_0() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void previousCurrent__empty_null() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertNull( scroller.current() ); + } + + + @Test + public void absolute0__empty_false() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertFalse( scroller.absolute( 0 ) ); + } + + + @Test + public void absolute0Current__empty_null() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertNull( scroller.current() ); + } + + + @Test + public void absolute0IsBefore__empty_true() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + scroller.absolute( 0 ); + assertTrue( scroller.isBeforeFirst() ); + } + + + @Test + public void absolute5IsAfter__empty_false() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertFalse( scroller.absolute( 5 ) ); + } + + + @Test + public void absolute5IsAfter__empty_true() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + scroller.absolute( 5 ); + assertTrue( scroller.isAfterLast() ); + } + + + @Test + public void absoluteMinus5IsBefore__empty_true() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + scroller.absolute( -5 ); + assertTrue( scroller.isBeforeFirst() ); + } + + + @Test + public void absoluteMinus5IsBefore__empty_false() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertFalse( scroller.absolute( -5 ) ); + } + + + @Test + public void relative1IsAfter__empty_true() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + scroller.relative( 1 ); + assertTrue( scroller.isAfterLast() ); + } + + + @Test + public void relative1__empty_false() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertFalse( scroller.relative( 1 ) ); + } + + + @Test + public void relativeMinus1__empty_false() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + assertFalse( scroller.relative( -1 ) ); + } + + + @Test + public void relative1Current__empty_null() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + scroller.relative( 1 ); + assertNull( scroller.current() ); + } + + + @Test + public void relativeMinus1Current__empty_null() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + scroller.relative( -1 ); + assertNull( scroller.current() ); + } + + + @Test + public void relative1GetRow__empty_1() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + scroller.relative( 1 ); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void relativeMinus1GetRow__empty_0() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_EMPTY ); + scroller.relative( -1 ); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void next__data_true() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + assertTrue( scroller.next() ); + } + + + @Test + public void nextGetRow__data_1() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + assertEquals( 1, scroller.getRow() ); + } + + + @Test + public void nextCurrent__data_1() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + assertEquals( Integer.valueOf( 1 ), scroller.current() ); + } + + + @Test + public void atFirst_data_1() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + assertTrue( scroller.isFirst() ); + } + + + @Test + public void atLast_data_1() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.absolute( 4 ); + assertTrue( scroller.isLast() ); + } + + + @Test + public void previous__data_false() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + assertFalse( scroller.previous() ); + } + + + @Test + public void previousGetRow__data_0() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.previous(); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void previousCurrent__data_null() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.previous(); + assertNull( scroller.current() ); + } + + + @Test + public void nextOverflow__data_false() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + scroller.next(); + scroller.next(); + assertFalse( scroller.next() ); + } + + + @Test + public void nextOverflowCurrent__data_null() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + scroller.next(); + scroller.next(); + scroller.next(); + assertNull( scroller.current() ); + } + + + @Test + public void nextOverflowGetRow__data_0() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + scroller.next(); + scroller.next(); + scroller.next(); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void nextCurrent__data_value() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + for ( int i = 0; i < 4; i++ ) { + scroller.next(); + assertEquals( Integer.valueOf( i + 1 ), scroller.current() ); + } + } + + + @Test + public void rel0Invalid__data_false() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + assertFalse( scroller.relative( 0 ) ); + } + + + @Test + public void rel0Valid__data_false() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + assertTrue( scroller.relative( 0 ) ); + } + + + @Test + public void rel0InvalidCurrent__data_null() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.relative( 0 ); + assertNull( scroller.current() ); + } + + + @Test + public void rel0ValidCurrent__data_1() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.relative( 0 ); + assertEquals( Integer.valueOf( 1 ), scroller.current() ); + } + + + @Test + public void relIncOverflowValid__data_false() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + assertFalse( scroller.relative( 3 ) ); + } + + + @Test + public void relDecUnderflowValid__data_false() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + assertFalse( scroller.relative( -3 ) ); + } + + + @Test + public void relIncOverflowValue__data_null() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + scroller.relative( 3 ); + assertNull( scroller.current() ); + } + + + @Test + public void relDecUnderflowValue__data_null() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + scroller.relative( -3 ); + assertNull( scroller.current() ); + } + + + @Test + public void relIncOverflowAfterLast__data_true() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + scroller.relative( 3 ); + assertTrue( scroller.isAfterLast() ); + } + + + @Test + public void relDecUnderflowBeforeFirst__data_true() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + scroller.relative( -3 ); + assertTrue( scroller.isBeforeFirst() ); + } + + + @Test + public void relIncOverflowGetRow__data_0() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + scroller.relative( 3 ); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void relDecUnderflowGetRow__data_0() throws SQLException { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.next(); + scroller.next(); + scroller.relative( -3 ); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void relIncValid__data_false() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + assertTrue( scroller.relative( 3 ) ); + } + + + @Test + public void relDecwValid__data_false() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.absolute( 4 ); + assertTrue( scroller.relative( -3 ) ); + } + + + @Test + public void relIncValue__data_4() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.relative( 4 ); + assertEquals( Integer.valueOf( 4 ), scroller.current() ); + } + + + @Test + public void relDecValue__data_1() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.absolute( 4 ); + scroller.relative( -3 ); + assertEquals( Integer.valueOf( 1 ), scroller.current() ); + } + + + @Test + public void absOverflowValue__data_null() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.absolute( 5 ); + assertNull( scroller.current() ); + } + + + @Test + public void absUnderflowValue__data_true() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.relative( -5 ); + assertNull( scroller.current() ); + } + + + @Test + public void absUnderflowBeforeFirst__data_true() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.relative( -5 ); + assertTrue( scroller.isBeforeFirst() ); + } + + + @Test + public void absOverflowAfterLast__data_true() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.absolute( 5 ); + assertTrue( scroller.isAfterLast() ); + } + + + @Test + public void absOverflowGetRow__data_0() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.absolute( 5 ); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void absUnderflowGetRow__data_0() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.absolute( -5 ); + assertEquals( 0, scroller.getRow() ); + } + + + @Test + public void absReverseAccessValue__data_3() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.absolute( -2 ); + assertEquals( Integer.valueOf( 3 ), scroller.current() ); + } + + + @Test + public void absReverseAccessGetRow__data_3() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + scroller.absolute( -2 ); + assertEquals( 3, scroller.getRow() ); + } + + + @Test + public void absReverseAccessValid__data_3() { + MetaScroller scroller = new MetaScroller<>( TEST_DATA_FOUR ); + assertTrue( scroller.absolute( -2 ) ); + } + +} diff --git a/src/test/java/org/polypheny/jdbc/PolyConnectionTest.java b/src/test/java/org/polypheny/jdbc/PolyConnectionTest.java new file mode 100644 index 00000000..8858807d --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/PolyConnectionTest.java @@ -0,0 +1,107 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.sql.SQLException; +import org.junit.jupiter.api.Test; +import org.polypheny.jdbc.meta.PolyphenyDatabaseMetadata; +import org.polypheny.jdbc.properties.PolyphenyConnectionProperties; + +public class PolyConnectionTest { + + private PolyConnection connection; + + private PolyphenyConnectionProperties properties; + + private PolyphenyDatabaseMetadata databaseMetaData; + + + @Test + public void getAutoCommitWhenConnectionIsNotClosed() { + properties = mock( PolyphenyConnectionProperties.class ); + databaseMetaData = mock( PolyphenyDatabaseMetadata.class ); + connection = new PolyConnection( properties, databaseMetaData ); + + assertDoesNotThrow( () -> connection.getAutoCommit() ); + + verify( properties, times( 1 ) ).isAutoCommit(); + } + + + @Test + public void getAutoCommitWhenConnectionIsClosedThenThrowException() throws SQLException { + properties = mock( PolyphenyConnectionProperties.class ); + PrismInterfaceClient prismInterfaceClient = mock( PrismInterfaceClient.class ); + when( properties.getPrismInterfaceClient() ).thenReturn( prismInterfaceClient ); + databaseMetaData = mock( PolyphenyDatabaseMetadata.class ); + connection = new PolyConnection( properties, databaseMetaData ); + connection.close(); + + assertThrows( SQLException.class, () -> connection.getAutoCommit() ); + } + + + @Test + public void setAutoCommitWhenConnectionIsClosedThenThrowException() throws SQLException { + properties = mock( PolyphenyConnectionProperties.class ); + PrismInterfaceClient prismInterfaceClient = mock( PrismInterfaceClient.class ); + when( properties.getPrismInterfaceClient() ).thenReturn( prismInterfaceClient ); + databaseMetaData = mock( PolyphenyDatabaseMetadata.class ); + connection = new PolyConnection( properties, databaseMetaData ); + connection.close(); + + assertThrows( SQLException.class, () -> connection.setAutoCommit( true ) ); + } + + + @Test + public void setAutoCommitToTrue() throws SQLException { + properties = mock( PolyphenyConnectionProperties.class ); + databaseMetaData = mock( PolyphenyDatabaseMetadata.class ); + connection = new PolyConnection( properties, databaseMetaData ); + connection.setAutoCommit( false ); + + when( properties.isAutoCommit() ).thenReturn( false ); + + assertDoesNotThrow( () -> connection.setAutoCommit( true ) ); + + verify( properties, times( 1 ) ).setAutoCommit( true ); + } + + + @Test + public void setAutoCommitToFalse() throws PrismInterfaceServiceException { + properties = mock( PolyphenyConnectionProperties.class ); + databaseMetaData = mock( PolyphenyDatabaseMetadata.class ); + connection = new PolyConnection( properties, databaseMetaData ); + + when( properties.isAutoCommit() ).thenReturn( true ); + when( properties.getNetworkTimeout() ).thenReturn( 5000 ); + + assertDoesNotThrow( () -> connection.setAutoCommit( false ) ); + + verify( properties, times( 1 ) ).setAutoCommit( false ); + } + +} diff --git a/src/test/java/org/polypheny/jdbc/PolyphenyColumnMetaTest.java b/src/test/java/org/polypheny/jdbc/PolyphenyColumnMetaTest.java new file mode 100644 index 00000000..68b6a7d9 --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/PolyphenyColumnMetaTest.java @@ -0,0 +1,368 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.sql.ResultSetMetaData; +import java.sql.Types; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.polypheny.jdbc.meta.PolyphenyColumnMeta; +import org.polypheny.jdbc.types.ProtoToJdbcTypeMap; +import org.polypheny.prism.ColumnMeta; +import org.polypheny.prism.ProtoPolyType; +import org.polypheny.prism.TypeMeta; + +public class PolyphenyColumnMetaTest { + + private static ColumnMeta protoColumnMeta; + private static ColumnMeta negatedColumnMeta; + private static PolyphenyColumnMeta specifiedColumnMeta; + + // parameters used to generate test data + private static final boolean IS_NULLABLE = true; + private static final int COLUMN_INDEX = 42; + private static final int LENGTH = 43; + private static final int PRECISION = 44; + private static final int SCALE = 44; + private static final String COLUMN_LABEL = "COLUMN_LABEL"; + private static final String COLUMN_NAME = "COLUMN_NAME"; + private static final String ENTITY_NAME = "ENTITY_NAME"; + private static final String SCHEMA_NAME = "SCHEMA_NAME"; + private static final String NAMESPACE_NAME = "NAMESPACE_NAME"; + private static final ProtoPolyType VALUE_TYPE = ProtoPolyType.BIGINT; + private static final int JDBC_TYPE = Types.NCLOB; + + + @BeforeAll + public static void setUpClass() { + TypeMeta typeMeta = TypeMeta.newBuilder() + .setProtoValueType( VALUE_TYPE ) + .build(); + protoColumnMeta = ColumnMeta.newBuilder() + .setColumnIndex( COLUMN_INDEX ) + .setIsNullable( IS_NULLABLE ) + .setLength( LENGTH ) + .setColumnLabel( COLUMN_LABEL ) + .setColumnName( COLUMN_NAME ) + .setPrecision( PRECISION ) + .setEntityName( ENTITY_NAME ) + .setSchemaName( SCHEMA_NAME ) + .setTypeMeta( typeMeta ) + .setScale( SCALE ) + .setNamespace( NAMESPACE_NAME ) + .build(); + negatedColumnMeta = ColumnMeta.newBuilder() + .setColumnIndex( COLUMN_INDEX ) + .setIsNullable( !IS_NULLABLE ) + .setLength( LENGTH ) + .setColumnLabel( COLUMN_LABEL ) + .setColumnName( COLUMN_NAME ) + .setPrecision( PRECISION ) + .setEntityName( ENTITY_NAME ) + .setSchemaName( SCHEMA_NAME ) + .setTypeMeta( typeMeta ) + .setScale( SCALE ) + .setNamespace( NAMESPACE_NAME ) + .build(); + specifiedColumnMeta = PolyphenyColumnMeta.fromSpecification( COLUMN_INDEX, COLUMN_LABEL, ENTITY_NAME, JDBC_TYPE ); + } + + + @Test + public void protoConstructor__ColumnMeta_Ordinal() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( COLUMN_INDEX, meta.getOrdinal() ); + } + + + @Test + public void protoConstructor__ColumnMeta_AutoIncrement() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertFalse( meta.isAutoIncrement() ); + } + + + @Test + public void protoConstructor__ColumnMeta_CaseSensitive() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertTrue( meta.isCaseSensitive() ); + } + + + @Test + public void protoConstructor__ColumnMeta_Searchable() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertFalse( meta.isSearchable() ); + } + + + @Test + public void protoConstructor__ColumnMeta_Currency() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertFalse( meta.isCurrency() ); + } + + + @Test + public void protoConstructor__ColumnMeta_Nullable() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( ResultSetMetaData.columnNullable, meta.getNullable() ); + + PolyphenyColumnMeta negatedMeta = new PolyphenyColumnMeta( negatedColumnMeta ); + assertEquals( ResultSetMetaData.columnNoNulls, negatedMeta.getNullable() ); + } + + + @Test + public void protoConstructor__ColumnMeta_Signed() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertFalse( meta.isSigned() ); + } + + + @Test + public void protoConstructor__ColumnMeta_DisplaySize() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( LENGTH, meta.getDisplaySize() ); + } + + + @Test + public void protoConstructor__ColumnMeta_ColumnLabel() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( COLUMN_LABEL, meta.getColumnLabel() ); + } + + + @Test + public void protoConstructor__ColumnMeta_ColumnName() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( COLUMN_NAME, meta.getColumnName() ); + } + + + @Test + public void protoConstructor__ColumnMeta_Namespace() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( NAMESPACE_NAME, meta.getNamespace() ); + } + + + @Test + public void protoConstructor__ColumnMeta_Precision() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( PRECISION, meta.getPrecision() ); + } + + + @Test + public void protoConstructor__ColumnMeta_Scale() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( 1, meta.getScale() ); + } + + + @Test + public void protoConstructor__ColumnMeta_TableName() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( ENTITY_NAME, meta.getTableName() ); + } + + + @Test + public void protoConstructor__ColumnMeta_CatalogName() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( "", meta.getCatalogName() ); + } + + + @Test + public void protoConstructor__ColumnMeta_ReadOnly() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertFalse( meta.isReadOnly() ); + } + + + @Test + public void protoConstructor__ColumnMeta_Writable() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertFalse( meta.isWritable() ); + } + + + @Test + public void protoConstructor__ColumnMeta_DefinitelyWritable() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertFalse( meta.isDefinitelyWritable() ); + } + + + @Test + public void protoConstructor__ColumnMeta_ColumnClassName() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + assertEquals( "", meta.getColumnClassName() ); + } + + + @Test + public void protoConstructor__ColumnMeta_SqlType() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + int expected = ProtoToJdbcTypeMap.getJdbcTypeFromProto( VALUE_TYPE ); + assertEquals( expected, meta.getSqlType() ); + } + + + @Test + public void protoConstructor__ColumnMeta_FieldTypeName() { + PolyphenyColumnMeta meta = new PolyphenyColumnMeta( protoColumnMeta ); + String expected = VALUE_TYPE.name(); + assertEquals( expected, meta.getPolyphenyFieldTypeName() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_Ordinal() { + assertEquals( COLUMN_INDEX, specifiedColumnMeta.getOrdinal() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_AutoIncrement() { + assertFalse( specifiedColumnMeta.isAutoIncrement() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_CaseSensitive() { + assertTrue( specifiedColumnMeta.isCaseSensitive() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_Searchable() { + assertFalse( specifiedColumnMeta.isSearchable() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_Currency() { + assertFalse( specifiedColumnMeta.isCurrency() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_Nullable() { + assertEquals( ResultSetMetaData.columnNullable, specifiedColumnMeta.getNullable() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_Signed() { + assertFalse( specifiedColumnMeta.isSigned() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_DisplaySize() { + assertEquals( -1, specifiedColumnMeta.getDisplaySize() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_ColumnLabel() { + assertEquals( COLUMN_LABEL, specifiedColumnMeta.getColumnLabel() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_ColumnName() { + assertEquals( COLUMN_LABEL, specifiedColumnMeta.getColumnName() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_Namespace() { + assertNull( specifiedColumnMeta.getNamespace() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_Precision() { + assertEquals( -1, specifiedColumnMeta.getPrecision() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_Scale() { + assertEquals( 1, specifiedColumnMeta.getScale() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_TableName() { + assertEquals( ENTITY_NAME, specifiedColumnMeta.getTableName() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_CatalogName() { + assertEquals( "", specifiedColumnMeta.getCatalogName() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_ReadOnly() { + assertFalse( specifiedColumnMeta.isReadOnly() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_Writable() { + assertFalse( specifiedColumnMeta.isWritable() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_DefinitelyWritable() { + assertFalse( specifiedColumnMeta.isDefinitelyWritable() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_ColumnClassName() { + assertEquals( "", specifiedColumnMeta.getColumnClassName() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_SqlType() { + assertEquals( JDBC_TYPE, specifiedColumnMeta.getSqlType() ); + } + + + @Test + public void fromSpecification__Ordinal_ColumnName_EntityName_JdbcType_FieldTypeName() { + assertEquals( "", specifiedColumnMeta.getPolyphenyFieldTypeName() ); + } + +} diff --git a/src/test/java/org/polypheny/jdbc/PolyphenyDriverTest.java b/src/test/java/org/polypheny/jdbc/PolyphenyDriverTest.java new file mode 100644 index 00000000..d2f37cef --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/PolyphenyDriverTest.java @@ -0,0 +1,384 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.fail; + +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Properties; +import org.junit.jupiter.api.Test; +import org.polypheny.jdbc.properties.DriverProperties; +import org.polypheny.jdbc.properties.PropertyUtils; + +public class PolyphenyDriverTest { + + private static final PolyphenyDriver DRIVER = new PolyphenyDriver(); + + + @Test() + public void getParentLoggerThrowsException() { + assertThrows( SQLFeatureNotSupportedException.class, DRIVER::getParentLogger ); + } + + + @Test + public void jdbcCompliantWhenDriverIsJdbcCompliant() { + boolean jdbcCompliant = DRIVER.jdbcCompliant(); + assertEquals( DriverProperties.isJDBC_COMPLIANT(), jdbcCompliant ); + } + + + @Test + public void getMinorVersionReturnsCorrectVersion() { + int expectedMinorVersion = DriverProperties.getDRIVER_MINOR_VERSION(); + int actualMinorVersion = DRIVER.getMinorVersion(); + + assertEquals( expectedMinorVersion, actualMinorVersion ); + } + + + @Test + public void getMajorVersionReturnsCorrectVersion() { + int expectedMajorVersion = DriverProperties.getDRIVER_MAJOR_VERSION(); + int actualMajorVersion = DRIVER.getMajorVersion(); + + assertEquals( expectedMajorVersion, actualMajorVersion ); + } + + + @Test + public void getPropertyInfoWithValidUrlAndProperties() { + String url = "jdbc:polypheny://testuser:testpassword@localhost:20591/database"; + + try { + DriverPropertyInfo[] propertyInfo = DRIVER.getPropertyInfo( url, null ); + + assertEquals( 7, propertyInfo.length ); + + assertEquals( "user", propertyInfo[0].name ); + assertEquals( "testuser", propertyInfo[0].value ); + assertEquals( "Specifies the username for authentication. If not specified, the database uses the default user.", propertyInfo[0].description ); + assertFalse( propertyInfo[0].required ); + + assertEquals( "password", propertyInfo[1].name ); + assertEquals( "testpassword", propertyInfo[1].value ); + assertEquals( "Specifies the password associated with the given username. If not specified the database assumes that the user does not have a password.", propertyInfo[1].description ); + assertFalse( propertyInfo[1].required ); + + assertEquals( "autocommit", propertyInfo[2].name ); + assertEquals( "true", propertyInfo[2].value ); + assertEquals( "Determines if each SQL statement is treated as a transaction.", propertyInfo[2].description ); + assertArrayEquals( new String[]{ "true", "false" }, propertyInfo[2].choices ); + + assertEquals( "readonly", propertyInfo[3].name ); + assertEquals( "false", propertyInfo[3].value ); + assertEquals( "Indicates if the connection is in read-only mode. Currently ignored, reserved for future use.", propertyInfo[3].description ); + assertArrayEquals( new String[]{ "true", "false" }, propertyInfo[3].choices ); + + assertEquals( "holdability", propertyInfo[4].name ); + assertEquals( "CLOSE", propertyInfo[4].value ); + assertEquals( "Specifies the holdability of ResultSet objects.", propertyInfo[4].description ); + assertArrayEquals( new String[]{ "HOLD", "CLOSE" }, propertyInfo[4].choices ); + + assertEquals( "isolation", propertyInfo[5].name ); + assertEquals( "COMMITTED", propertyInfo[5].value ); + assertEquals( "Indicates the transaction isolation level.", propertyInfo[5].description ); + assertArrayEquals( new String[]{ "COMMITTED", "DIRTY", "SERIALIZABLE", "REPEATABLE_READ" }, propertyInfo[5].choices ); + + assertEquals( "nwtimeout", propertyInfo[6].name ); + assertEquals( "0", propertyInfo[6].value ); + assertEquals( "Specifies the network timeout in seconds. Corresponds to the JDBC network timeout.", propertyInfo[6].description ); + + } catch ( SQLException e ) { + fail( "An exception occurred: " + e.getMessage() ); + } + } + + + @Test + public void getPropertyInfoWithDefaultValuesWhenPropertiesNotProvided() { + String url = "jdbc:polypheny://localhost:20591/database"; + Properties properties = new Properties(); + + try { + DriverPropertyInfo[] infoProperties = DRIVER.getPropertyInfo( url, properties ); + + assertEquals( 7, infoProperties.length ); + + assertEquals( PropertyUtils.getUSERNAME_KEY(), infoProperties[0].name ); + assertNull( infoProperties[0].value ); + assertEquals( "Specifies the username for authentication. If not specified, the database uses the default user.", infoProperties[0].description ); + assertFalse( infoProperties[0].required ); + + assertEquals( PropertyUtils.getPASSWORD_KEY(), infoProperties[1].name ); + assertNull( infoProperties[1].value ); + assertEquals( "Specifies the password associated with the given username. If not specified the database assumes that the user does not have a password.", infoProperties[1].description ); + assertFalse( infoProperties[1].required ); + + assertEquals( PropertyUtils.getAUTOCOMMIT_KEY(), infoProperties[2].name ); + assertEquals( String.valueOf( PropertyUtils.isDEFAULT_AUTOCOMMIT() ), infoProperties[2].value ); + assertEquals( "Determines if each SQL statement is treated as a transaction.", infoProperties[2].description ); + assertArrayEquals( new String[]{ "true", "false" }, infoProperties[2].choices ); + + assertEquals( PropertyUtils.getREAD_ONLY_KEY(), infoProperties[3].name ); + assertEquals( String.valueOf( PropertyUtils.isDEFAULT_READ_ONLY() ), infoProperties[3].value ); + assertEquals( "Indicates if the connection is in read-only mode. Currently ignored, reserved for future use.", infoProperties[3].description ); + assertArrayEquals( new String[]{ "true", "false" }, infoProperties[3].choices ); + + assertEquals( PropertyUtils.getRESULT_SET_HOLDABILITY_KEY(), infoProperties[4].name ); + assertEquals( PropertyUtils.getHoldabilityName( PropertyUtils.getDEFAULT_RESULTSET_HOLDABILITY() ), infoProperties[4].value ); + assertEquals( "Specifies the holdability of ResultSet objects.", infoProperties[4].description ); + assertArrayEquals( new String[]{ "HOLD", "CLOSE" }, infoProperties[4].choices ); + + assertEquals( PropertyUtils.getTRANSACTION_ISOLATION_KEY(), infoProperties[5].name ); + assertEquals( PropertyUtils.getTransactionIsolationName( PropertyUtils.getDEFAULT_TRANSACTION_ISOLATION() ), infoProperties[5].value ); + assertEquals( "Indicates the transaction isolation level.", infoProperties[5].description ); + assertArrayEquals( new String[]{ "COMMITTED", "DIRTY", "SERIALIZABLE", "REPEATABLE_READ" }, infoProperties[5].choices ); + + assertEquals( PropertyUtils.getNETWORK_TIMEOUT_KEY(), infoProperties[6].name ); + assertEquals( String.valueOf( PropertyUtils.getDEFAULT_NETWORK_TIMEOUT() ), infoProperties[6].value ); + assertEquals( "Specifies the network timeout in seconds. Corresponds to the JDBC network timeout.", infoProperties[6].description ); + + } catch ( SQLException e ) { + fail( "An exception occurred: " + e.getMessage() ); + } + } + + + @Test + public void getPropertyInfoWithUserSpecifiedValuesWhenPropertiesProvided() { + String url = "jdbc:polypheny://localhost:20591/database"; + Properties properties = new Properties(); + properties.setProperty( "user", "testuser" ); + properties.setProperty( "password", "testpassword" ); + properties.setProperty( "autocommit", "false" ); + properties.setProperty( "readonly", "true" ); + properties.setProperty( "holdability", "HOLD" ); + properties.setProperty( "isolation", "DIRTY" ); + properties.setProperty( "nwtimeout", "10" ); + + try { + DriverPropertyInfo[] propertyInfo = DRIVER.getPropertyInfo( url, properties ); + + assertEquals( 7, propertyInfo.length ); + + assertEquals( "user", propertyInfo[0].name ); + assertEquals( "testuser", propertyInfo[0].value ); + assertEquals( "Specifies the username for authentication. If not specified, the database uses the default user.", propertyInfo[0].description ); + assertFalse( propertyInfo[0].required ); + + assertEquals( "password", propertyInfo[1].name ); + assertEquals( "testpassword", propertyInfo[1].value ); + assertEquals( "Specifies the password associated with the given username. If not specified the database assumes that the user does not have a password.", propertyInfo[1].description ); + assertFalse( propertyInfo[1].required ); + + assertEquals( "autocommit", propertyInfo[2].name ); + assertEquals( "false", propertyInfo[2].value ); + assertEquals( "Determines if each SQL statement is treated as a transaction.", propertyInfo[2].description ); + assertArrayEquals( new String[]{ "true", "false" }, propertyInfo[2].choices ); + + assertEquals( "readonly", propertyInfo[3].name ); + assertEquals( "true", propertyInfo[3].value ); + assertEquals( "Indicates if the connection is in read-only mode. Currently ignored, reserved for future use.", propertyInfo[3].description ); + assertArrayEquals( new String[]{ "true", "false" }, propertyInfo[3].choices ); + + assertEquals( "holdability", propertyInfo[4].name ); + assertEquals( "HOLD", propertyInfo[4].value ); + assertEquals( "Specifies the holdability of ResultSet objects.", propertyInfo[4].description ); + assertArrayEquals( new String[]{ "HOLD", "CLOSE" }, propertyInfo[4].choices ); + + assertEquals( "isolation", propertyInfo[5].name ); + assertEquals( "DIRTY", propertyInfo[5].value ); + assertEquals( "Indicates the transaction isolation level.", propertyInfo[5].description ); + assertArrayEquals( new String[]{ "COMMITTED", "DIRTY", "SERIALIZABLE", "REPEATABLE_READ" }, propertyInfo[5].choices ); + + assertEquals( "nwtimeout", propertyInfo[6].name ); + assertEquals( "10", propertyInfo[6].value ); + assertEquals( "Specifies the network timeout in seconds. Corresponds to the JDBC network timeout.", propertyInfo[6].description ); + + } catch ( SQLException e ) { + fail( "An exception occurred: " + e.getMessage() ); + } + } + + + @Test + public void acceptsURL_String__CorrectDriverSchema() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( DriverProperties.getDRIVER_URL_SCHEMA() ); + + assertEquals( expected, actual ); + } + + + @Test() + public void acceptsURL_null() { + assertThrows( SQLException.class, () -> DRIVER.acceptsURL( null ) ); + } + + + @Test + public void acceptsURL_EmptyString() throws Exception { + final boolean expected = false; + final boolean actual = DRIVER.acceptsURL( "" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__NoJdbcSchema() throws Exception { + final boolean expected = false; + final boolean actual = DRIVER.acceptsURL( "polypheny://username:password@host:20569/database?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__NoPolyphenySubSchema() throws Exception { + final boolean expected = false; + final boolean actual = DRIVER.acceptsURL( "jdbc://username:password@host:20569/database?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__WrongSubSchema() throws Exception { + final boolean expected = false; + final boolean actual = DRIVER.acceptsURL( "jdbc:foo://username:password@host:20569/database?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__AcceptableUrl() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@host:20569/database?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__MissingCredentials() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://host:20569" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__AcceptableUrl_NewUrlStyle() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny:http://username:password@host:20569/database?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__AcceptableUrl_NewUrlStyleHttps() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny:https://username:password@host:20569/database?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__AcceptableUrlNoPassword() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username@host:20569/database?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__AcceptableUrlNoParameters() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@host:20569/database" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__AcceptableUrlNoDatabase() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@host:20569?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__AcceptableUrlNoPort() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@host/database?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__AcceptableUrlNoHost() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@:20569/database?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__AcceptableUrlNoUsernamePassword() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://host:20569/database?k1=v1&k2=v2" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__AcceptableUrlDefaultsOnly() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny:///" ); + + assertEquals( expected, actual ); + } + + + @Test + public void acceptsURL_String__MalformedParameter() throws Exception { + final boolean expected = true; + final boolean actual = DRIVER.acceptsURL( "jdbc:polypheny://username:password@host:20569/database?k1=v1&k2" ); // k2 is ignored! + + assertEquals( expected, actual ); + } + +} diff --git a/src/test/java/org/polypheny/jdbc/QueryTest.java b/src/test/java/org/polypheny/jdbc/QueryTest.java new file mode 100644 index 00000000..0b74f492 --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/QueryTest.java @@ -0,0 +1,108 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.polypheny.jdbc.multimodel.DocumentResult; +import org.polypheny.jdbc.multimodel.PolyStatement; +import org.polypheny.jdbc.multimodel.Result; +import org.polypheny.jdbc.multimodel.Result.ResultType; +import org.polypheny.jdbc.types.PolyDocument; + +public class QueryTest { + + private static final String MQL_LANGUAGE_NAME = "mongo"; + private static final String TEST_QUERY = "db.customers.find({});"; + + + @BeforeAll + public static void setup() throws SQLException, ClassNotFoundException { + TestHelper.insertTestData(); + } + + + @Test + public void thisOneWorks() throws SQLException { + try ( + Connection connection = TestHelper.getConnection(); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery( "SELECT * FROM customers" ) + ) { + while ( resultSet.next() ) { + // Process the result set... + } + } + } + + + @Test + public void simpleRelationalTest() { + try ( Connection connection = TestHelper.getConnection() ) { + if ( !connection.isWrapperFor( PolyConnection.class ) ) { + fail( "Driver must support unwrapping to PolyphenyConnection" ); + } + PolyStatement polyStatement = connection.unwrap( PolyConnection.class ).createPolyStatement(); + Result result = polyStatement.execute( "public", "sql", "SELECT * FROM customers" ); + assertEquals( ResultType.RELATIONAL, result.getResultType() ); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } + } + + + @Test + public void simpleMqlTest() { + try ( Connection connection = TestHelper.getConnection() ) { + if ( !connection.isWrapperFor( PolyConnection.class ) ) { + fail( "Driver must support unwrapping to PolyphenyConnection" ); + } + PolyStatement polyStatement = connection.unwrap( PolyConnection.class ).createPolyStatement(); + Result result = polyStatement.execute( "public", MQL_LANGUAGE_NAME, TEST_QUERY ); + assertEquals( ResultType.DOCUMENT, result.getResultType() ); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } + } + + + @Test + public void mqlDataRetrievalTest() { + try ( Connection connection = TestHelper.getConnection() ) { + if ( !connection.isWrapperFor( PolyConnection.class ) ) { + fail( "Driver must support unwrapping to PolyphenyConnection" ); + } + PolyStatement polyStatement = connection.unwrap( PolyConnection.class ).createPolyStatement(); + Result result = polyStatement.execute( "public", MQL_LANGUAGE_NAME, TEST_QUERY ); + DocumentResult docs = result.unwrap( DocumentResult.class ); + for ( PolyDocument doc : docs ) { + } + assertEquals( ResultType.DOCUMENT, result.getResultType() ); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } + } + +} diff --git a/src/test/java/org/polypheny/jdbc/StatementTest.java b/src/test/java/org/polypheny/jdbc/StatementTest.java new file mode 100644 index 00000000..4eb4ab56 --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/StatementTest.java @@ -0,0 +1,256 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +public class StatementTest { + + Connection con; + + + @BeforeEach + void createConnection() throws SQLException { + con = TestHelper.getConnection(); + try ( Statement statement = con.createStatement() ) { + statement.execute( "DROP TABLE IF EXISTS t" ); + statement.execute( "CREATE TABLE t(id INTEGER PRIMARY KEY, a INTEGER NOT NULL)" ); + } + } + + + @AfterEach + void closeConnection() throws SQLException { + con.close(); + } + + + @Test + void testPrepareStatement() throws SQLException { + try ( PreparedStatement p = con.prepareStatement( "INSERT INTO t(id, a) VALUES (?, ?)" ) ) { + p.setInt( 1, 4 ); + p.setInt( 2, 4 ); + p.execute(); + } + try ( Statement s = con.createStatement() ) { + ResultSet resultSet = s.executeQuery( "SELECT id, a FROM t WHERE id = 4" ); + assertTrue( resultSet.next() ); + assertEquals( 4, resultSet.getInt( 1 ) ); + assertEquals( 4, resultSet.getInt( 1 ) ); + assertFalse( resultSet.next() ); + } + } + + + @Test + void testMoreThanOneExecute() throws SQLException { + try ( PreparedStatement p = con.prepareStatement( "INSERT INTO t(id, a) VALUES (?, ?)" ) ) { + p.setInt( 1, 4 ); + p.setInt( 2, 4 ); + p.execute(); + p.setInt( 1, 5 ); + p.setInt( 2, 5 ); + p.execute(); + } + } + + + @Test + void testStatementSingleExecCleanup() throws SQLException { + try ( Statement p = con.createStatement() ) { + p.execute( "INSERT INTO t(id, a) VALUES (1, 4)" ); + } + try ( Statement statement = con.createStatement() ) { + statement.execute( "DROP TABLE IF EXISTS t" ); + } + } + + + @Test + void testStatementMultipleExecCleanup() throws SQLException { + try ( Statement p = con.createStatement() ) { + p.execute( "INSERT INTO t(id, a) VALUES (1, 4)" ); + p.execute( "INSERT INTO t(id, a) VALUES (2, 4)" ); + } + try ( Statement statement = con.createStatement() ) { + statement.execute( "DROP TABLE IF EXISTS t" ); + } + } + + + @Test + void testPreparedStatementSingleExecCleanup() throws SQLException { + try ( PreparedStatement p = con.prepareStatement( "INSERT INTO t(id, a) VALUES (?, ?)" ) ) { + p.setInt( 1, 4 ); + p.setInt( 2, 4 ); + p.execute(); + } + try ( Statement statement = con.createStatement() ) { + statement.execute( "DROP TABLE IF EXISTS t" ); + } + } + + + @Test + void testMultipleStatements() throws SQLException { + try ( PreparedStatement p = con.prepareStatement( "INSERT INTO t(id, a) VALUES (?, ?)" ) ) { + p.setInt( 1, 4 ); + p.setInt( 2, 4 ); + p.execute(); + try ( Statement s = con.createStatement() ) { + s.execute( "INSERT INTO t(id, a) VALUES (5, 5)" ); + s.execute( "INSERT INTO t(id, a) VALUES (6, 6)" ); + } + p.setInt( 1, 7 ); + p.setInt( 2, 7 ); + p.execute(); + con.close(); + } + } + + + @Test + void testMultipleStatements2() throws SQLException { + try ( Statement s1 = con.createStatement() ) { + s1.execute( "INSERT INTO t(id, a) VALUES (1, 4)" ); + s1.execute( "INSERT INTO t(id, a) VALUES (2, 4)" ); + try ( Statement s2 = con.createStatement() ) { + s2.execute( "INSERT INTO t(id, a) VALUES (3, 5)" ); + s2.execute( "INSERT INTO t(id, a) VALUES (4, 6)" ); + } + s1.execute( "INSERT INTO t(id, a) VALUES (5, 7)" ); + s1.execute( "INSERT INTO t(id, a) VALUES (6, 7)" ); + con.close(); + } + } + + + @Test + void testPreparedStatementDualExecCleanup() throws SQLException { + try ( PreparedStatement p = con.prepareStatement( "INSERT INTO t(id, a) VALUES (?, ?)" ) ) { + p.setInt( 1, 4 ); + p.setInt( 2, 4 ); + p.execute(); + p.setInt( 1, 5 ); + p.setInt( 2, 5 ); + p.execute(); + } + try ( Statement statement = con.createStatement() ) { + statement.execute( "DROP TABLE IF EXISTS t" ); + } + } + + + @Test + void testPreparedStatementBatchExecCleanup() throws SQLException { + try ( PreparedStatement p = con.prepareStatement( "INSERT INTO t(id, a) VALUES (?, ?)" ) ) { + p.setInt( 1, 4 ); + p.setInt( 2, 4 ); + p.addBatch(); + p.setInt( 1, 5 ); + p.setInt( 2, 5 ); + p.addBatch(); + p.executeBatch(); + } + try ( Statement statement = con.createStatement() ) { + statement.execute( "DROP TABLE IF EXISTS t" ); + } + } + + + @Test + void testPreparedStatementDualExecUpdate() throws SQLException { + try ( PreparedStatement p = con.prepareStatement( "INSERT INTO t(id, a) VALUES (?, ?)" ) ) { + p.setInt( 1, 4 ); + p.setInt( 2, 4 ); + p.execute(); + p.setInt( 1, 5 ); + p.setInt( 2, 5 ); + p.execute(); + } + try ( Statement statement = con.createStatement() ) { + statement.execute( "SELECT * FROM t" ); + } + } + + + @ParameterizedTest() + @ValueSource(ints = { 99, 100, 101 }) + void testFetch( int n ) throws SQLException { + for ( int i = 0; i < n; i++ ) { + try ( PreparedStatement p = con.prepareStatement( "INSERT INTO t(id, a) VALUES (?, ?)" ) ) { + p.setInt( 1, i ); + p.setInt( 2, i ); + p.addBatch(); + p.executeBatch(); + } + } + + try ( Statement s = con.createStatement() ) { + ResultSet res = s.executeQuery( "SELECT * FROM t" ); + int count = 0; + while ( res.next() ) { + // Consume all results + count++; + } + assertEquals( n, count ); + } + + } + + + @Test + void testLargeBatch() throws SQLException { + try ( Statement s = con.createStatement() ) { + s.addBatch( "INSERT INTO t(id, a) VALUES (1, 1)" ); + s.addBatch( "INSERT INTO t(id, a) VALUES (2, 2)" ); + s.addBatch( "INSERT INTO t(id, a) VALUES (3, 3)" ); + long[] res = s.executeLargeBatch(); + assertArrayEquals( new long[]{ 1, 1, 1 }, res ); + } + } + + + @Test + void testParameterizedLargeBatch() throws SQLException { + try ( PreparedStatement p = con.prepareStatement( "INSERT INTO t(id, a) VALUES (?, ?)" ) ) { + p.setInt( 1, 1 ); + p.setInt( 2, 1 ); + p.addBatch(); + p.setInt( 1, 2 ); + p.setInt( 2, 2 ); + p.addBatch(); + long[] res = p.executeLargeBatch(); + assertArrayEquals( new long[]{ 2 }, res ); + } + } + +} diff --git a/src/test/java/org/polypheny/jdbc/TestHelper.java b/src/test/java/org/polypheny/jdbc/TestHelper.java new file mode 100644 index 00000000..0fdb9b2a --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/TestHelper.java @@ -0,0 +1,90 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Statement; + +public class TestHelper { + + public static Connection getConnection() { + final String DB_URL = "jdbc:polypheny://localhost:20590"; + final String USER = "pa"; + final String PASS = ""; + + // TODO Class.forName( "org.polypheny.jdbc.PolyphenyDriver" ); + + try { + return DriverManager.getConnection( DB_URL, USER, PASS ); + + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } + } + + + public static void insertTestData() throws SQLException { + try ( + Connection connection = getConnection(); + Statement statement = connection.createStatement(); + ) { + statement.execute( "DROP TABLE IF EXISTS customers" ); + statement.execute( + "CREATE TABLE customers(\n" + + " id INTEGER PRIMARY KEY,\n" + + " name TEXT NOT NULL,\n" + + " year_joined INTEGER NOT NULL\n" + + ")" + ); + try ( PreparedStatement insert = connection.prepareStatement( "INSERT INTO customers(id, name, year_joined) VALUES (?, ?, ?)" ) ) { + insert.setInt( 1, 1 ); + insert.setString( 2, "Maria" ); + insert.setInt( 3, 2012 ); + insert.addBatch(); + insert.setInt( 1, 2 ); + insert.setString( 2, "Daniel" ); + insert.setInt( 3, 2020 ); + insert.addBatch(); + insert.setInt( 1, 3 ); + insert.setString( 2, "Peter" ); + insert.setInt( 3, 2001 ); + insert.addBatch(); + insert.setInt( 1, 4 ); + insert.setString( 2, "Anna" ); + insert.setInt( 3, 2001 ); + insert.addBatch(); + insert.setInt( 1, 5 ); + insert.setString( 2, "Thomas" ); + insert.setInt( 3, 2004 ); + insert.addBatch(); + insert.setInt( 1, 6 ); + insert.setString( 2, "Andreas" ); + insert.setInt( 3, 2014 ); + insert.addBatch(); + insert.setInt( 1, 7 ); + insert.setString( 2, "Michael" ); + insert.setInt( 3, 2010 ); + insert.addBatch(); + insert.executeBatch(); + } + } + } + +} diff --git a/src/test/java/org/polypheny/jdbc/properties/PolyConnectionPropertiesTest.java b/src/test/java/org/polypheny/jdbc/properties/PolyConnectionPropertiesTest.java new file mode 100644 index 00000000..7bcf9da5 --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/properties/PolyConnectionPropertiesTest.java @@ -0,0 +1,286 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.properties; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.polypheny.jdbc.ConnectionString; +import org.polypheny.jdbc.PolyphenyStatement; +import org.polypheny.jdbc.PrismInterfaceClient; + +public class PolyConnectionPropertiesTest { + + private static ConnectionString connectionString; + + + @BeforeAll + public static void setUpClass() throws SQLException { + connectionString = new ConnectionString( "jdbc:polypheny://localhost:20590" ); + } + + + private static final PrismInterfaceClient PRISM_INTERFACE_CLIENT = mock( PrismInterfaceClient.class ); + private static final PolyphenyStatement polyphenyStatement = mock( PolyphenyStatement.class ); + + + @Test + public void toStatementProperties_Type_Concurrency_Holdability_Conversion() throws SQLException { + when( polyphenyStatement.hasStatementId() ).thenReturn( false ); + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + PolyphenyStatementProperties expectedProperties = new PolyphenyStatementProperties(); + expectedProperties.setPolyphenyStatement( polyphenyStatement ); + expectedProperties.setPrismInterfaceClient( PRISM_INTERFACE_CLIENT ); + expectedProperties.setQueryTimeoutSeconds( PropertyUtils.getDEFAULT_QUERY_TIMEOUT_SECONDS() ); + expectedProperties.setResultSetType( ResultSet.TYPE_SCROLL_INSENSITIVE ); + expectedProperties.setResultSetConcurrency( ResultSet.CONCUR_READ_ONLY ); + expectedProperties.setResultSetHoldability( ResultSet.CLOSE_CURSORS_AT_COMMIT ); + expectedProperties.setFetchSize( PropertyUtils.getDEFAULT_FETCH_SIZE() ); + expectedProperties.setFetchDirection( PropertyUtils.getDEFAULT_FETCH_DIRECTION() ); + expectedProperties.setMaxFieldSize( PropertyUtils.getDEFAULT_MAX_FIELD_SIZE() ); + expectedProperties.setLargeMaxRows( PropertyUtils.getDEFAULT_LARGE_MAX_ROWS() ); + expectedProperties.setDoesEscapeProcessing( PropertyUtils.isDEFAULT_DOING_ESCAPE_PROCESSING() ); + expectedProperties.setIsPoolable( PropertyUtils.isDEFAULT_STATEMENT_POOLABLE() ); + + PolyphenyStatementProperties actualProperties = connectionProperties.toStatementProperties( + ResultSet.TYPE_SCROLL_INSENSITIVE, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT + ); + actualProperties.setPolyphenyStatement( polyphenyStatement ); + assertEquals( expectedProperties.getQueryTimeoutSeconds(), actualProperties.getQueryTimeoutSeconds() ); + assertEquals( expectedProperties.getResultSetType(), actualProperties.getResultSetType() ); + assertEquals( expectedProperties.getResultSetConcurrency(), actualProperties.getResultSetConcurrency() ); + assertEquals( expectedProperties.getResultSetHoldability(), actualProperties.getResultSetHoldability() ); + assertEquals( expectedProperties.getFetchSize(), actualProperties.getFetchSize() ); + assertEquals( expectedProperties.getFetchDirection(), actualProperties.getFetchDirection() ); + assertEquals( expectedProperties.getMaxFieldSize(), actualProperties.getMaxFieldSize() ); + assertEquals( expectedProperties.getLargeMaxRows(), actualProperties.getLargeMaxRows() ); + assertEquals( expectedProperties.isDoesEscapeProcessing(), actualProperties.isDoesEscapeProcessing() ); + assertEquals( expectedProperties.isPoolable(), actualProperties.isPoolable() ); + } + + + @Test + public void toStatementProperties_Type_Concurrency_Conversion() throws SQLException { + when( polyphenyStatement.hasStatementId() ).thenReturn( false ); + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + PolyphenyStatementProperties expectedProperties = new PolyphenyStatementProperties(); + expectedProperties.setPolyphenyStatement( polyphenyStatement ); + expectedProperties.setPrismInterfaceClient( PRISM_INTERFACE_CLIENT ); + expectedProperties.setQueryTimeoutSeconds( PropertyUtils.getDEFAULT_QUERY_TIMEOUT_SECONDS() ); + expectedProperties.setResultSetType( ResultSet.TYPE_FORWARD_ONLY ); + expectedProperties.setResultSetConcurrency( ResultSet.CONCUR_READ_ONLY ); + expectedProperties.setResultSetHoldability( PropertyUtils.getDEFAULT_RESULTSET_HOLDABILITY() ); + expectedProperties.setFetchSize( PropertyUtils.getDEFAULT_FETCH_SIZE() ); + expectedProperties.setFetchDirection( PropertyUtils.getDEFAULT_FETCH_DIRECTION() ); + expectedProperties.setMaxFieldSize( PropertyUtils.getDEFAULT_MAX_FIELD_SIZE() ); + expectedProperties.setLargeMaxRows( PropertyUtils.getDEFAULT_LARGE_MAX_ROWS() ); + expectedProperties.setDoesEscapeProcessing( PropertyUtils.isDEFAULT_DOING_ESCAPE_PROCESSING() ); + expectedProperties.setIsPoolable( PropertyUtils.isDEFAULT_STATEMENT_POOLABLE() ); + + PolyphenyStatementProperties actualProperties = connectionProperties.toStatementProperties( ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY ); + actualProperties.setPolyphenyStatement( polyphenyStatement ); + assertEquals( expectedProperties.getQueryTimeoutSeconds(), actualProperties.getQueryTimeoutSeconds() ); + assertEquals( expectedProperties.getResultSetType(), actualProperties.getResultSetType() ); + assertEquals( expectedProperties.getResultSetConcurrency(), actualProperties.getResultSetConcurrency() ); + assertEquals( expectedProperties.getResultSetHoldability(), actualProperties.getResultSetHoldability() ); + assertEquals( expectedProperties.getFetchSize(), actualProperties.getFetchSize() ); + assertEquals( expectedProperties.getFetchDirection(), actualProperties.getFetchDirection() ); + assertEquals( expectedProperties.getMaxFieldSize(), actualProperties.getMaxFieldSize() ); + assertEquals( expectedProperties.getLargeMaxRows(), actualProperties.getLargeMaxRows() ); + assertEquals( expectedProperties.isDoesEscapeProcessing(), actualProperties.isDoesEscapeProcessing() ); + assertEquals( expectedProperties.isPoolable(), actualProperties.isPoolable() ); + } + + + @Test + public void toStatementProperties_Defaults_Conversion() throws SQLException { + when( polyphenyStatement.hasStatementId() ).thenReturn( false ); + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + PolyphenyStatementProperties expectedProperties = new PolyphenyStatementProperties(); + expectedProperties.setPolyphenyStatement( polyphenyStatement ); + expectedProperties.setPrismInterfaceClient( PRISM_INTERFACE_CLIENT ); + expectedProperties.setQueryTimeoutSeconds( PropertyUtils.getDEFAULT_QUERY_TIMEOUT_SECONDS() ); + expectedProperties.setResultSetType( ResultSet.TYPE_FORWARD_ONLY ); + expectedProperties.setResultSetConcurrency( ResultSet.CONCUR_READ_ONLY ); + expectedProperties.setResultSetHoldability( ResultSet.CLOSE_CURSORS_AT_COMMIT ); + expectedProperties.setFetchSize( PropertyUtils.getDEFAULT_FETCH_SIZE() ); + expectedProperties.setFetchDirection( PropertyUtils.getDEFAULT_FETCH_DIRECTION() ); + expectedProperties.setMaxFieldSize( PropertyUtils.getDEFAULT_MAX_FIELD_SIZE() ); + expectedProperties.setLargeMaxRows( PropertyUtils.getDEFAULT_LARGE_MAX_ROWS() ); + expectedProperties.setDoesEscapeProcessing( PropertyUtils.isDEFAULT_DOING_ESCAPE_PROCESSING() ); + expectedProperties.setIsPoolable( PropertyUtils.isDEFAULT_STATEMENT_POOLABLE() ); + + try { + PolyphenyStatementProperties actualProperties = connectionProperties.toStatementProperties(); + actualProperties.setPolyphenyStatement( polyphenyStatement ); + assertEquals( expectedProperties.getQueryTimeoutSeconds(), actualProperties.getQueryTimeoutSeconds() ); + assertEquals( expectedProperties.getResultSetType(), actualProperties.getResultSetType() ); + assertEquals( expectedProperties.getResultSetConcurrency(), actualProperties.getResultSetConcurrency() ); + assertEquals( expectedProperties.getResultSetHoldability(), actualProperties.getResultSetHoldability() ); + assertEquals( expectedProperties.getFetchSize(), actualProperties.getFetchSize() ); + assertEquals( expectedProperties.getFetchDirection(), actualProperties.getFetchDirection() ); + assertEquals( expectedProperties.getMaxFieldSize(), actualProperties.getMaxFieldSize() ); + assertEquals( expectedProperties.getLargeMaxRows(), actualProperties.getLargeMaxRows() ); + assertEquals( expectedProperties.isDoesEscapeProcessing(), actualProperties.isDoesEscapeProcessing() ); + assertEquals( expectedProperties.isPoolable(), actualProperties.isPoolable() ); + } catch ( SQLException e ) { + fail( "SQLException thrown: " + e.getMessage() ); + } + } + + + @Test + public void setNamespaceName_Valid_Sync() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + String namespaceName = "testNamespace"; + + connectionProperties.setNamespaceName( namespaceName ); + + assertEquals( namespaceName, connectionProperties.getNamespaceName() ); + verify( PRISM_INTERFACE_CLIENT, times( 1 ) ).setConnectionProperties( connectionProperties, connectionProperties.getNetworkTimeout() ); + } + + + @Test + public void setCatalogName_Valid_NoSync() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + String catalogName = "test_catalog"; + + connectionProperties.setCatalogName( catalogName ); + + assertEquals( catalogName, connectionProperties.getCatalogName() ); + verify( PRISM_INTERFACE_CLIENT, times( 0 ) ).setConnectionProperties( connectionProperties, connectionProperties.getNetworkTimeout() ); + } + + + @Test + public void setTransactionIsolation_Invalid_Error() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + + try { + connectionProperties.setTransactionIsolation( 999 ); + fail( "Expected SQLException to be thrown" ); + } catch ( SQLException e ) { + assertEquals( "Invalid value for transaction isolation", e.getMessage() ); + } + } + + + @Test + public void setTransactionIsolation_Valid_Sync() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + int transactionIsolation = Connection.TRANSACTION_READ_COMMITTED; + + try { + connectionProperties.setTransactionIsolation( transactionIsolation ); + } catch ( SQLException e ) { + fail( "Should not throw an exception" ); + } + + assertEquals( transactionIsolation, connectionProperties.getTransactionIsolation() ); + verify( PRISM_INTERFACE_CLIENT, times( 1 ) ).setConnectionProperties( connectionProperties, connectionProperties.getNetworkTimeout() ); + } + + + @Test + public void setNetworkTimeout_Valid_Sync() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + int networkTimeout = 5000; + + connectionProperties.setNetworkTimeout( networkTimeout ); + + verify( PRISM_INTERFACE_CLIENT, times( 1 ) ).setConnectionProperties( connectionProperties, connectionProperties.getNetworkTimeout() ); + assertEquals( connectionProperties.getNetworkTimeout(), networkTimeout ); + } + + + @Test + public void setResultSetHoldability_Invalid_Error() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + + try { + connectionProperties.setResultSetHoldability( 100 ); + } catch ( SQLException e ) { + assertTrue( e.getMessage().contains( "Invalid value for result set holdability" ) ); + } + } + + + @Test + public void setResultSetHoldability_Valid_NoSync() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + + connectionProperties.setResultSetHoldability( ResultSet.CLOSE_CURSORS_AT_COMMIT ); + assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, connectionProperties.getResultSetHoldability() ); + + verify( PRISM_INTERFACE_CLIENT, times( 0 ) ).setConnectionProperties( connectionProperties, connectionProperties.getNetworkTimeout() ); + } + + + @Test + public void setReadOnly_False_Sync() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + connectionProperties.setReadOnly( false ); + + assertFalse( connectionProperties.isReadOnly() ); + verify( PRISM_INTERFACE_CLIENT, times( 1 ) ).setConnectionProperties( connectionProperties, connectionProperties.getNetworkTimeout() ); + } + + + @Test + public void setReadOnly_True_Sync() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + connectionProperties.setReadOnly( true ); + + assertTrue( connectionProperties.isReadOnly() ); + verify( PRISM_INTERFACE_CLIENT, times( 1 ) ).setConnectionProperties( connectionProperties, connectionProperties.getNetworkTimeout() ); + } + + + @Test + public void setAutoCommit_False_Sync() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + connectionProperties.setAutoCommit( false ); + + assertFalse( connectionProperties.isAutoCommit() ); + verify( PRISM_INTERFACE_CLIENT, times( 1 ) ).setConnectionProperties( connectionProperties, connectionProperties.getNetworkTimeout() ); + } + + + @Test + public void setAutoCommit_True_Sync() throws SQLException { + PolyphenyConnectionProperties connectionProperties = new PolyphenyConnectionProperties( connectionString, PRISM_INTERFACE_CLIENT ); + connectionProperties.setAutoCommit( true ); + + assertTrue( connectionProperties.isAutoCommit() ); + verify( PRISM_INTERFACE_CLIENT, times( 1 ) ).setConnectionProperties( connectionProperties, connectionProperties.getNetworkTimeout() ); + } + +} diff --git a/src/test/java/org/polypheny/jdbc/properties/PropertyUtilsTest.java b/src/test/java/org/polypheny/jdbc/properties/PropertyUtilsTest.java new file mode 100644 index 00000000..7d5773f9 --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/properties/PropertyUtilsTest.java @@ -0,0 +1,244 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.properties; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.sql.Connection; +import java.sql.ResultSet; +import org.junit.jupiter.api.Test; + +public class PropertyUtilsTest { + + @Test + public void testDefaultTransactionIsolation() { + final int expected = Connection.TRANSACTION_READ_COMMITTED; + assertEquals( expected, PropertyUtils.getDEFAULT_TRANSACTION_ISOLATION() ); + } + + + @Test + public void testDefaultNetworkTimeout() { + final int expected = 0; + assertEquals( expected, PropertyUtils.getDEFAULT_NETWORK_TIMEOUT() ); + } + + + @Test + public void testDefaultQueryTimeoutSeconds() { + final int expected = 0; + assertEquals( expected, PropertyUtils.getDEFAULT_QUERY_TIMEOUT_SECONDS() ); + } + + + @Test + public void testDefaultFetchSize() { + final int expected = 100; + assertEquals( expected, PropertyUtils.getDEFAULT_FETCH_SIZE() ); + } + + + @Test + public void testDefaultFetchDirection() { + final int expected = ResultSet.FETCH_FORWARD; + assertEquals( expected, PropertyUtils.getDEFAULT_FETCH_DIRECTION() ); + } + + + @Test + public void testDefaultResultSetType() { + final int expected = ResultSet.TYPE_FORWARD_ONLY; + assertEquals( expected, PropertyUtils.getDEFAULT_RESULTSET_TYPE() ); + } + + + @Test + public void testDefaultResultSetConcurrency() { + final int expected = ResultSet.CONCUR_READ_ONLY; + assertEquals( expected, PropertyUtils.getDEFAULT_RESULTSET_CONCURRENCY() ); + } + + + @Test + public void testDefaultMaxFieldSize() { + final int expected = 0; + assertEquals( expected, PropertyUtils.getDEFAULT_MAX_FIELD_SIZE() ); + } + + + @Test + public void testDefaultLargeMaxRows() { + final long expected = 0L; + assertEquals( expected, PropertyUtils.getDEFAULT_LARGE_MAX_ROWS() ); + } + + + @Test + public void testDefaultDoingEscapeProcessing() { + final boolean expected = true; + assertEquals( expected, PropertyUtils.isDEFAULT_DOING_ESCAPE_PROCESSING() ); + } + + + @Test + public void testDefaultStatementPoolable() { + final boolean expected = false; + assertEquals( expected, PropertyUtils.isDEFAULT_STATEMENT_POOLABLE() ); + } + + + @Test + public void testDefaultPreparedStatementPoolable() { + final boolean expected = false; + assertEquals( expected, PropertyUtils.isDEFAULT_PREPARED_STATEMENT_POOLABLE() ); + } + + + @Test + public void testDefaultCallableStatementPoolable() { + final boolean expected = false; + assertEquals( expected, PropertyUtils.isDEFAULT_CALLABLE_STATEMENT_POOLABLE() ); + } + + + @Test + public void testDefaultAutocommit() { + final boolean expected = true; + assertEquals( expected, PropertyUtils.isDEFAULT_AUTOCOMMIT() ); + } + + + @Test + public void testDefaultReadOnly() { + final boolean expected = false; + assertEquals( expected, PropertyUtils.isDEFAULT_READ_ONLY() ); + } + + + @Test + public void defaultHostSetToLocalHost() { + final String expected = "localhost"; + assertEquals( expected, PropertyUtils.getDEFAULT_HOST() ); + + } + + + @Test + public void defaultPortIsCorrect() { + final int expected = 20590; + assertEquals( expected, PropertyUtils.getDEFAULT_PORT() ); + } + + + @Test + public void testDefaultResultSetHoldability() { + final int expected = ResultSet.CLOSE_CURSORS_AT_COMMIT; + assertEquals( expected, PropertyUtils.getDEFAULT_RESULTSET_HOLDABILITY() ); + } + + + @Test + public void testDefaultHost() { + final String expected = "localhost"; + assertEquals( expected, PropertyUtils.getDEFAULT_HOST() ); + } + + + @Test + public void testDefaultPort() { + final int expected = 20590; + assertEquals( expected, PropertyUtils.getDEFAULT_PORT() ); + } + + + @Test + public void testSqlLanguageName() { + final String expected = "sql"; + assertEquals( expected, PropertyUtils.getSQL_LANGUAGE_NAME() ); + } + + + @Test + public void testUsernameKey() { + final String expected = "user"; + assertEquals( expected, PropertyUtils.getUSERNAME_KEY() ); + } + + + @Test + public void testPasswordKey() { + final String expected = "password"; + assertEquals( expected, PropertyUtils.getPASSWORD_KEY() ); + } + + + @Test + public void testNamespaceKey() { + final String expected = "namespace"; + assertEquals( expected, PropertyUtils.getNAMESPACE_KEY() ); + } + + + @Test + public void testAutocommitKey() { + final String expected = "autocommit"; + assertEquals( expected, PropertyUtils.getAUTOCOMMIT_KEY() ); + } + + + @Test + public void testReadOnlyKey() { + final String expected = "readonly"; + assertEquals( expected, PropertyUtils.getREAD_ONLY_KEY() ); + } + + + @Test + public void testResultSetHoldabilityKey() { + final String expected = "holdability"; + assertEquals( expected, PropertyUtils.getRESULT_SET_HOLDABILITY_KEY() ); + } + + + @Test + public void testNetworkTimeoutKey() { + final String expected = "nwtimeout"; + assertEquals( expected, PropertyUtils.getNETWORK_TIMEOUT_KEY() ); + } + + + @Test + public void testTransactionIsolationKey() { + final String expected = "isolation"; + assertEquals( expected, PropertyUtils.getTRANSACTION_ISOLATION_KEY() ); + } + + + @Test + public void testTimezoneKey() { + final String expected = "timezone"; + assertEquals( expected, PropertyUtils.getTIMEZONE_KEY() ); + } + + + @Test + public void testStrictModeKey() { + final String expected = "strict"; + assertEquals( expected, PropertyUtils.getSTRICT_MODE_KEY() ); + } + +} diff --git a/src/test/java/org/polypheny/jdbc/types/TypedValueTest.java b/src/test/java/org/polypheny/jdbc/types/TypedValueTest.java new file mode 100644 index 00000000..f37e9bd3 --- /dev/null +++ b/src/test/java/org/polypheny/jdbc/types/TypedValueTest.java @@ -0,0 +1,957 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.jdbc.types; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.io.StringReader; +import java.math.BigDecimal; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.TimeZone; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.polypheny.jdbc.PrismInterfaceServiceException; +import org.polypheny.jdbc.properties.DriverProperties; +import org.polypheny.prism.ProtoValue; +import org.polypheny.prism.ProtoValue.ValueCase; + +public class TypedValueTest { + + + @Test + public void fromNCharacterStreamWithReader() throws SQLException { + String string = "test"; + Reader reader = new StringReader( string ); + TypedValue typedValue = TypedValue.fromNCharacterStream( reader ); + assertNotNull( typedValue ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( string, typedValue.asString() ); + } + + + @Test + public void fromNStringWithValidString() throws SQLException { + String value = "Hello World"; + TypedValue typedValue = TypedValue.fromNString( value ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( value, typedValue.asString() ); + } + + + @Test + public void fromNStringWithNullString() { + String value = null; + TypedValue typedValue = TypedValue.fromNString( value ); + assertEquals( ValueCase.NULL, typedValue.getValueCase() ); + } + + + @Test + public void fromRowIdWithNullRowId() { + assertThrows( SQLFeatureNotSupportedException.class, () -> TypedValue.fromRowId( null ) ); + } + + + @Test + public void fromRowIdWithValidRowId() { + RowId rowId = Mockito.mock( RowId.class ); + assertThrows( SQLFeatureNotSupportedException.class, () -> TypedValue.fromRowId( rowId ) ); + } + + + @Test + public void fromUrlWithNullUrl() { + assertThrows( SQLFeatureNotSupportedException.class, () -> TypedValue.fromUrl( null ) ); + } + + + @Test + public void fromUrlWithValidUrl() throws MalformedURLException { + URL url = new URL( "https://example.com" ); + assertThrows( SQLFeatureNotSupportedException.class, () -> TypedValue.fromUrl( url ) ); + } + + + @Test + public void fromArrayWithGivenValue() throws SQLException { + Array value = Mockito.mock( Array.class ); + TypedValue typedValue = TypedValue.fromArray( value ); + + assertEquals( ValueCase.LIST, typedValue.getValueCase() ); + assertEquals( value, typedValue.asArray() ); + } + + + @Test + public void fromClobWithClobValue() throws SQLException { + String content = "This is awesome!"; + Clob clob = new PolyClob( content ); + TypedValue typedValue = TypedValue.fromClob( clob ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( content, typedValue.asString() ); + } + + + @Test + public void fromNull() { + TypedValue typedValue = TypedValue.fromNull(); + assertNotNull( typedValue ); + assertEquals( ValueCase.NULL, typedValue.getValueCase() ); + assertTrue( typedValue.isNull() ); + } + + + @Test + public void fromBlobWithBlobValue() throws SQLException { + byte[] data = { 2, 34, 5, 7 }; + Blob blob = new PolyBlob( data ); + TypedValue typedValue = TypedValue.fromBlob( blob ); + assertEquals( ValueCase.FILE, typedValue.getValueCase() ); + assertEquals( blob, typedValue.asBlob() ); + } + + + @Test + public void fromBlobWithBlobValueAsByteThrows() throws SQLException { + byte[] data = { 2, 34, 5, 7 }; + Blob blob = new PolyBlob( data ); + TypedValue typedValue = TypedValue.fromBlob( blob ); + assertEquals( ValueCase.FILE, typedValue.getValueCase() ); + assertThrows( PrismInterfaceServiceException.class, typedValue::asBytes ); + } + + + @Test + public void fromCharacterStreamWithLength1() throws SQLException { + Reader reader = new StringReader( "Hello World" ); + int length = 11; + + TypedValue typedValue = TypedValue.fromCharacterStream( reader, length ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( "Hello World", typedValue.asString() ); + } + + + @Test + public void fromCharacterStreamWithLength2() throws SQLException { + Reader reader = new StringReader( "Hello World" ); + long length = 11; + + TypedValue typedValue = TypedValue.fromCharacterStream( reader, length ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( "Hello World", typedValue.asString() ); + } + + + @Test + public void fromCharacterStreamWithValidStream() throws SQLException { + Reader reader = new StringReader( "Hello World" ); + + TypedValue typedValue = TypedValue.fromCharacterStream( reader, 11 ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( "Hello World", typedValue.asString() ); + } + + + @Test + public void fromCharacterStreamThrowsSQLException() throws SQLException { + String inputString = "Test"; + InputStream inputStream = new ByteArrayInputStream( inputString.getBytes( StandardCharsets.UTF_8 ) ); + Reader reader = new InputStreamReader( inputStream ); + TypedValue typedValue = TypedValue.fromCharacterStream( reader ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( inputString, typedValue.asString() ); + } + + + @Test + public void fromBinaryStreamWithValidStream2() throws SQLException { + InputStream stream = new ByteArrayInputStream( new byte[]{ 1, 2, 3, 4, 5 } ); + TypedValue typedValue = TypedValue.fromBinaryStream( stream ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.BINARY, typedValue.getValueCase() ); + assertArrayEquals( new byte[]{ 1, 2, 3, 4, 5 }, typedValue.asBytes() ); + } + + + @Test + public void fromBinaryStreamWithValidStream1() throws SQLException { + InputStream stream = new ByteArrayInputStream( "Hello World".getBytes( StandardCharsets.UTF_8 ) ); + TypedValue typedValue = TypedValue.fromBinaryStream( stream ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.BINARY, typedValue.getValueCase() ); + assertArrayEquals( "Hello World".getBytes( StandardCharsets.UTF_8 ), typedValue.asBytes() ); + } + + + @Test + public void fromBinaryStreamWithValidStream3() throws SQLException { + InputStream stream = new ByteArrayInputStream( "Hello World".getBytes( StandardCharsets.UTF_8 ) ); + TypedValue typedValue = TypedValue.fromBinaryStream( stream, 11 ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.BINARY, typedValue.getValueCase() ); + assertArrayEquals( "Hello World".getBytes( StandardCharsets.UTF_8 ), typedValue.asBytes() ); + } + + + @Test + public void fromBinaryStreamWithValidStream4() throws SQLException { + InputStream stream = new ByteArrayInputStream( "Hello World".getBytes( StandardCharsets.UTF_8 ) ); + TypedValue typedValue = TypedValue.fromBinaryStream( stream, 11L ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.BINARY, typedValue.getValueCase() ); + assertArrayEquals( "Hello World".getBytes( StandardCharsets.UTF_8 ), typedValue.asBytes() ); + } + + + @Test() + public void fromBinaryStreamWhenStreamIsInvalidThenThrowIOException() { + assertThrows( NullPointerException.class, () -> TypedValue.fromBinaryStream( null, 0 ) ); + } + + + @Test + public void fromUnicodeStreamWithValidStream() throws SQLException { + InputStream stream = new ByteArrayInputStream( "Hello World".getBytes( StandardCharsets.UTF_8 ) ); + TypedValue typedValue = TypedValue.fromUnicodeStream( stream, 0 ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( "Hello World", typedValue.asString() ); + } + + + @Test() + public void fromUnicodeStreamWithInvalidStreamThrowsIOException() { + assertThrows( NullPointerException.class, () -> TypedValue.fromUnicodeStream( null, 0 ) ); + } + + + @Test() + public void fromAsciiStreamWithInvalidInputStream() { + assertThrows( NullPointerException.class, () -> TypedValue.fromAsciiStream( null, 0 ) ); + } + + + @Test + public void fromAsciiStreamWithValidInputStreamAndLength() throws SQLException { + String inputString = "Hello, World!"; + InputStream inputStream = new ByteArrayInputStream( inputString.getBytes( StandardCharsets.US_ASCII ) ); + + TypedValue typedValue = TypedValue.fromAsciiStream( inputStream, inputString.length() ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( inputString, typedValue.asString() ); + } + + + @Test + public void fromAsciiStreamWithLength() throws SQLException { + String input = "Hello, World!"; + InputStream inputStream = new ByteArrayInputStream( input.getBytes( StandardCharsets.US_ASCII ) ); + + TypedValue typedValue = TypedValue.fromAsciiStream( inputStream, input.length() ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( input, typedValue.asString() ); + } + + + @Test + public void fromAsciiStreamWithValidStream() throws SQLException { + InputStream stream = new ByteArrayInputStream( "Hello World".getBytes( StandardCharsets.US_ASCII ) ); + TypedValue typedValue = TypedValue.fromAsciiStream( stream ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( "Hello World", typedValue.asString() ); + } + + + @Test() + public void fromTimeWithNullCalendarThrowsException() { + assertThrows( NullPointerException.class, () -> TypedValue.fromTime( new Time( 10, 30, 0 ), null ) ); + } + + + @Test + public void timeZoneTest() { + Time input = new Time( 123456 ); + Time input2 = new Time( input.getTime() ); + assertEquals( input, input2 ); + } + + + @Test + public void stringTrimmingTest() throws SQLException { + TypedValue value = TypedValue.fromString( "123456789" ); + TypedValue trimmed = value.getTrimmed( 4 ); + assertEquals( "1234", trimmed.asString() ); + } + + + @Test + public void binaryTrimmingTest() throws SQLException { + byte[] data = { 1, 2, 3, 4, 5, 6, 7, 8, 9 }; + byte[] expected = { 1, 2, 3, 4 }; + TypedValue value = TypedValue.fromBytes( data ); + TypedValue trimmed = value.getTrimmed( 4 ); + assertArrayEquals( expected, trimmed.asBytes() ); + } + + + @Test + public void asBytesReturnsProperValue() throws SQLException { + byte[] data = { 1, 2, 3, 4, 5, 6, 7, 8, 9 }; + TypedValue value = TypedValue.fromBytes( data ); + assertArrayEquals( data, value.asBytes() ); + } + + + @Test + public void fromTimeWithValidTimeAndCalendar() throws SQLException { + Time time = new Time( 12, 30, 0 ); + Calendar calendar = Calendar.getInstance(); + calendar.set( Calendar.YEAR, 2022 ); + calendar.set( Calendar.MONTH, Calendar.JANUARY ); + calendar.set( Calendar.DAY_OF_MONTH, 1 ); + calendar.setTimeZone( TimeZone.getTimeZone( "UTC" ) ); + + TypedValue typedValue = TypedValue.fromTime( time, calendar ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.TIME, typedValue.getValueCase() ); + assertEquals( time, typedValue.asTime() ); + } + + + @Test() + public void fromTimeWithNullTimeAndCalendar() { + Calendar calendar = Calendar.getInstance(); + calendar.set( Calendar.YEAR, 2022 ); + calendar.set( Calendar.MONTH, Calendar.JANUARY ); + calendar.set( Calendar.DAY_OF_MONTH, 1 ); + + assertThrows( NullPointerException.class, () -> TypedValue.fromTime( null, calendar ) ); + } + + + @Test + public void fromTimeWithNullTimeValue() throws SQLException { + TypedValue typedValue = TypedValue.fromTime( null ); + + assertNotNull( typedValue ); + assertTrue( typedValue.isNull() ); + assertEquals( ValueCase.NULL, typedValue.getValueCase() ); + assertNull( typedValue.asTime() ); + } + + + @Test + public void fromTimeWithValidTimeValue() throws SQLException { + Time time = new Time( 12, 30, 0 ); + TypedValue typedValue = TypedValue.fromTime( time ); + + assertNotNull( typedValue ); + assertEquals( ValueCase.TIME, typedValue.getValueCase() ); + assertEquals( time, typedValue.asTime() ); + } + + + @Test + public void asTimeWithValidValue() throws SQLException { + Time time = new Time( 12, 30, 0 ); + TypedValue typedValue = TypedValue.fromTime( time ); + assertEquals( time, typedValue.asTime() ); + } + + + @Test() + public void fromDateWhenNullCalendarProvidedThenThrowException() { + assertThrows( NullPointerException.class, () -> TypedValue.fromDate( new Date( 2022, 1, 1 ), null ) ); + } + + + @Test + public void fromDateWhenValidDateAndCalendarProvided() throws SQLException { + Date date = new Date( 2021, Calendar.JANUARY, 1 ); + Calendar calendar = Calendar.getInstance(); + calendar.set( 2022, Calendar.JANUARY, 1 ); + calendar.setTimeZone( TimeZone.getTimeZone( "UTC" ) ); + + TypedValue typedValue = TypedValue.fromDate( date, calendar ); + + assertEquals( ValueCase.DATE, typedValue.getValueCase() ); + assertEquals( date, typedValue.asDate() ); + } + + + @Test() + public void fromDateWithNullDateAndCalendarProvided() { + Calendar calendar = Calendar.getInstance(); + calendar.set( 2022, Calendar.JANUARY, 1 ); + + assertThrows( NullPointerException.class, () -> TypedValue.fromDate( null, calendar ) ); + } + + + @Test + public void fromDateWithNullDate() throws SQLException { + TypedValue typedValue = TypedValue.fromDate( null ); + + assertNotNull( typedValue ); + assertTrue( typedValue.isNull() ); + assertEquals( ValueCase.NULL, typedValue.getValueCase() ); + assertNull( typedValue.asDate() ); + } + + + @Test + public void fromDateWithValidDate() throws SQLException { + Date date = Date.valueOf( "2022-01-01" ); + TypedValue typedValue = TypedValue.fromDate( date ); + assertEquals( ValueCase.DATE, typedValue.getValueCase() ); + assertEquals( date, typedValue.asDate() ); + } + + + @Test + public void fromObjectWithValidDate() throws SQLException { + Date date = Date.valueOf( "2022-01-01" ); + TypedValue typedValue = TypedValue.fromObject( date ); + assertEquals( ValueCase.DATE, typedValue.getValueCase() ); + assertEquals( date, typedValue.asDate() ); + } + + + @Test + public void fromBytesWithByteArray() throws SQLException { + byte[] bytes = { 1, 2, 3, 4, 5 }; + TypedValue typedValue = TypedValue.fromBytes( bytes ); + assertEquals( ValueCase.BINARY, typedValue.getValueCase() ); + assertEquals( bytes, typedValue.asBytes() ); + } + + + @Test + public void fromStringWithEmptyString() throws SQLException { + TypedValue typedValue = TypedValue.fromString( "" ); + + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( "", typedValue.asString() ); + assertFalse( typedValue.isNull() ); + } + + + @Test + public void fromStringWithValidString() throws SQLException { + TypedValue typedValue = TypedValue.fromString( "12345" ); + assertEquals( ValueCase.STRING, typedValue.getValueCase() ); + assertEquals( "12345", typedValue.asString() ); + } + + + @Test + public void fromStringWithNullString() throws SQLException { + TypedValue typedValue = TypedValue.fromString( null ); + + assertTrue( typedValue.isNull() ); + assertEquals( ValueCase.NULL, typedValue.getValueCase() ); + assertNull( typedValue.asString() ); + } + + + @Test + public void fromBigDecimalWithValidInput() throws SQLException { + BigDecimal value = new BigDecimal( "10.5" ); + TypedValue typedValue = TypedValue.fromBigDecimal( value ); + + assertEquals( ValueCase.BIG_DECIMAL, typedValue.getValueCase() ); + assertEquals( value, typedValue.asBigDecimal() ); + } + + + @Test + public void fromBigDecimalWithNullInput() throws SQLException { + TypedValue typedValue = TypedValue.fromBigDecimal( null ); + assertTrue( typedValue.isNull() ); + assertEquals( ValueCase.NULL, typedValue.getValueCase() ); + assertNull( typedValue.asBigDecimal() ); + } + + + @Test + public void fromDoubleWithValidInput() throws SQLException { + TypedValue typedValue = TypedValue.fromDouble( 3.14 ); + + assertEquals( ValueCase.DOUBLE, typedValue.getValueCase() ); + assertEquals( 3.14, typedValue.asDouble() ); + } + + + @Test + public void fromDoubleWithNegativeInput() throws SQLException { + TypedValue typedValue = TypedValue.fromDouble( -10.5 ); + assertEquals( ValueCase.DOUBLE, typedValue.getValueCase() ); + assertEquals( -10.5, typedValue.asDouble() ); + } + + + @Test + public void fromDoubleWithZeroInput() throws SQLException { + TypedValue typedValue = TypedValue.fromDouble( 0.0 ); + + assertEquals( ValueCase.DOUBLE, typedValue.getValueCase() ); + assertEquals( 0.0, typedValue.asDouble() ); + } + + + @Test + public void fromFloatWithValidFloatValue() throws SQLException { + TypedValue typedValue = TypedValue.fromFloat( 3.14f ); + + assertEquals( ValueCase.FLOAT, typedValue.getValueCase() ); + assertEquals( 3.14f, typedValue.asFloat() ); + } + + + @Test + public void fromLongWithValidInput() throws SQLException { + TypedValue typedValue = TypedValue.fromLong( 1234567890L ); + + assertEquals( ValueCase.LONG, typedValue.getValueCase() ); + assertEquals( 1234567890L, typedValue.asLong() ); + } + + + @Test + public void fromIntWithValidInteger() throws SQLException { + TypedValue typedValue = TypedValue.fromInteger( 10 ); + + assertEquals( ValueCase.INTEGER, typedValue.getValueCase() ); + assertEquals( 10, typedValue.asInt() ); + assertFalse( typedValue.isNull() ); + } + + + @Test + public void fromShortWithValidShortValue() throws SQLException { + TypedValue typedValue = TypedValue.fromShort( (short) 10 ); + + assertEquals( ValueCase.INTEGER, typedValue.getValueCase() ); + assertEquals( (short) 10, typedValue.asShort() ); + assertFalse( typedValue.isNull() ); + assertFalse( typedValue.isNull() ); + } + + + @Test + public void fromByteWithValidByteValue() throws SQLException { + TypedValue typedValue = TypedValue.fromByte( (byte) 10 ); + + assertEquals( ValueCase.INTEGER, typedValue.getValueCase() ); + assertEquals( (byte) 10, typedValue.asByte() ); + } + + + @Test + public void fromBooleanWithFalseValue() throws SQLException { + TypedValue typedValue = TypedValue.fromBoolean( false ); + + assertEquals( ValueCase.BOOLEAN, typedValue.getValueCase() ); + assertFalse( typedValue.asBoolean() ); + } + + + @Test + public void fromBooleanWithTrueValue() throws SQLException { + TypedValue typedValue = TypedValue.fromBoolean( true ); + + assertEquals( ValueCase.BOOLEAN, typedValue.getValueCase() ); + assertTrue( typedValue.asBoolean() ); + } + + + @Test + public void fromBooleanWithValidBooleanValue() throws SQLException { + TypedValue typedValue = TypedValue.fromBoolean( true ); + + assertEquals( ValueCase.BOOLEAN, typedValue.getValueCase() ); + assertTrue( typedValue.asBoolean() ); + } + + + @Test + public void fromPolyIntervalMonths() throws SQLException { + PolyInterval interval = new PolyInterval( 23, 0 ); + TypedValue value = TypedValue.fromInterval( interval ); + assertFalse( value.isNull() ); + assertEquals( ValueCase.INTERVAL, value.getValueCase() ); + assertEquals( interval, value.asInterval() ); + } + + + @Test + public void fromPolyIntervalMillis() throws SQLException { + PolyInterval interval = new PolyInterval( 0, 23 ); + TypedValue value = TypedValue.fromInterval( interval ); + assertFalse( value.isNull() ); + assertEquals( ValueCase.INTERVAL, value.getValueCase() ); + assertEquals( interval, value.asInterval() ); + } + + + @Test + public void fromPolyIntervalNull() throws SQLException { + TypedValue value = TypedValue.fromInterval( null ); + assertTrue( value.isNull() ); + assertEquals( ValueCase.NULL, value.getValueCase() ); + assertNull( value.asInterval() ); + } + + + @Test + public void fromPolyDocument() throws SQLException { + PolyDocument document = new PolyDocument(); + document.put( "firstValue", TypedValue.fromBoolean( true ) ); + document.put( "secondValue", TypedValue.fromDouble( 12.345 ) ); + document.put( "thirdValue", TypedValue.fromInterval( new PolyInterval( 69, 0 ) ) ); + + TypedValue value = TypedValue.fromDocument( document ); + assertFalse( value.isNull() ); + assertEquals( ValueCase.DOCUMENT, value.getValueCase() ); + assertEquals( document, value.asDocument() ); + } + + + @Test + public void fromPolyDocumentNull() throws SQLException { + TypedValue value = TypedValue.fromDocument( null ); + assertTrue( value.isNull() ); + assertEquals( ValueCase.NULL, value.getValueCase() ); + assertNull( value.asDocument() ); + } + + + @Test + void booleanTest() throws SQLException { + boolean value = true; + TypedValue typedValue1 = TypedValue.fromBoolean( true ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.BOOLEAN, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value, typedValue2.asBoolean() ); + } + + + @Test + void integerTest() throws SQLException { + int value = 1234; + TypedValue typedValue1 = TypedValue.fromInteger( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.INTEGER, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value, typedValue2.asInt() ); + } + + + @Test + void longTest() throws SQLException { + long value = 1234L; + TypedValue typedValue1 = TypedValue.fromLong( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.LONG, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value, typedValue2.asLong() ); + } + + + @Test + void binaryTest() throws SQLException { + byte[] value = new byte[]{ 1, 2, 3, 4 }; + TypedValue typedValue1 = TypedValue.fromBytes( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.BINARY, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertArrayEquals( value, typedValue2.asBytes() ); + } + + + @Test + void binaryTestAsObject() throws SQLException { + byte[] value = new byte[]{ 1, 2, 3, 4 }; + TypedValue typedValue1 = TypedValue.fromBytes( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.BINARY, protoValue.getValueCase() ); + + assertArrayEquals( value, (byte[]) new TypedValue( protoValue ).asObject() ); + } + + + @Test + void dateTest() throws SQLException { + Date value = new Date( 49852800000L ); + TypedValue typedValue1 = TypedValue.fromDate( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.DATE, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value, typedValue2.asDate() ); + } + + + @Test + void doubleTest() throws SQLException { + double value = 1.234; + TypedValue typedValue1 = TypedValue.fromDouble( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.DOUBLE, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value, typedValue2.asDouble() ); + } + + + @Test + void floatTest() throws SQLException { + float value = 1.234f; + TypedValue typedValue1 = TypedValue.fromFloat( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.FLOAT, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value, typedValue2.asFloat() ); + } + + + @Test + void nullTest() throws SQLException { + TypedValue typedValue1 = TypedValue.fromNull(); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.NULL, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertNull( typedValue2.asObject() ); + } + + + @Test + void stringTest() throws SQLException { + String value = "a string"; + TypedValue typedValue1 = TypedValue.fromString( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.STRING, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value, typedValue2.asString() ); + } + + + @Test + void timeTest() throws SQLException { + Time value = new Time( 234975L ); + TypedValue typedValue1 = TypedValue.fromTime( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.TIME, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + long millis = typedValue2.asTime().getTime(); + millis -= DriverProperties.getDEFAULT_TIMEZONE().getOffset( millis ); + assertEquals( 234975L, millis ); + } + + + @Test + void timestampTest() throws SQLException { + Timestamp value = new Timestamp( 47285720L ); + TypedValue typedValue1 = TypedValue.fromTimestamp( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.TIMESTAMP, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + long millis = typedValue2.asTimestamp().getTime(); + millis -= DriverProperties.getDEFAULT_TIMEZONE().getOffset( millis ); + assertEquals( 47285720L, millis ); + } + + + @Test + void bigDecimalTest() throws SQLException { + BigDecimal value = new BigDecimal( "3457980.32453" ); + TypedValue typedValue1 = TypedValue.fromBigDecimal( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.BIG_DECIMAL, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value, typedValue2.asBigDecimal() ); + } + + + @Test + void listTest() throws SQLException { + ArrayList values = new ArrayList<>(); + values.add( TypedValue.fromInteger( 1 ) ); + values.add( TypedValue.fromInteger( 2 ) ); + values.add( TypedValue.fromInteger( 3 ) ); + Array value = new PolyArray( "INTEGER", values ); + + TypedValue typedValue1 = TypedValue.fromArray( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.LIST, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertArrayEquals( (Object[]) value.getArray(), (Object[]) typedValue2.asArray().getArray() ); + } + + + @Test + void intervalTest() throws SQLException { + PolyInterval value = new PolyInterval( 32, 0 ); + TypedValue typedValue1 = TypedValue.fromInterval( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.INTERVAL, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value, typedValue2.asInterval() ); + } + + + @Test + void intervalTestAsObject() throws SQLException { + PolyInterval value = new PolyInterval( 32, 0 ); + TypedValue typedValue1 = TypedValue.fromInterval( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.INTERVAL, protoValue.getValueCase() ); + + assertEquals( value, new TypedValue( protoValue ).asObject() ); + } + + + @Test + void documentTest() throws SQLException { + PolyDocument value = new PolyDocument(); + value.put( "firstValue", TypedValue.fromBoolean( true ) ); + value.put( "secondValue", TypedValue.fromDouble( 12.345 ) ); + value.put( "thirdValue", TypedValue.fromInterval( new PolyInterval( 69, 0 ) ) ); + + TypedValue typedValue1 = TypedValue.fromDocument( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.DOCUMENT, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value.get( "firstValue" ).asBoolean(), typedValue2.asDocument().get( "firstValue" ).asBoolean() ); + assertEquals( value.get( "secondValue" ).asDouble(), typedValue2.asDocument().get( "secondValue" ).asDouble() ); + assertEquals( value.get( "thirdValue" ).asInterval(), typedValue2.asDocument().get( "thirdValue" ).asInterval() ); + } + + + @Test + void documentTestAsObject() throws SQLException { + PolyDocument value = new PolyDocument(); + value.put( "firstValue", TypedValue.fromBoolean( true ) ); + value.put( "secondValue", TypedValue.fromDouble( 12.345 ) ); + value.put( "thirdValue", TypedValue.fromInterval( new PolyInterval( 69, 0 ) ) ); + + TypedValue typedValue1 = TypedValue.fromDocument( value ); + ProtoValue protoValue = typedValue1.serialize(); + + PolyDocument document = (PolyDocument) new TypedValue( protoValue ).asObject(); + assertEquals( value.get( "firstValue" ).asBoolean(), document.get( "firstValue" ).asBoolean() ); + assertEquals( value.get( "secondValue" ).asDouble(), document.get( "secondValue" ).asDouble() ); + assertEquals( value.get( "thirdValue" ).asInterval(), document.get( "thirdValue" ).asInterval() ); + } + + + @Test + void fileTest() throws SQLException { + Blob value = new PolyBlob( new byte[]{ 1, 2, 3, 4, 5 } ); + TypedValue typedValue1 = TypedValue.fromBlob( value ); + ProtoValue protoValue = typedValue1.serialize(); + + assertEquals( ValueCase.FILE, protoValue.getValueCase() ); + + TypedValue typedValue2 = new TypedValue( protoValue ); + assertArrayEquals( value.getBytes( 1, 5 ), typedValue2.asBlob().getBytes( 1, 5 ) ); + } + + + @Test + void getLengthTest() throws SQLException { + String value = "12345678"; + TypedValue typedValue1 = TypedValue.fromString( value ); + ProtoValue protoValue = typedValue1.serialize(); + TypedValue typedValue2 = new TypedValue( protoValue ); + assertEquals( value.length(), typedValue2.getLength() ); + } + +}