diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
new file mode 100644
index 00000000000..74f5825021a
--- /dev/null
+++ b/.devcontainer/devcontainer.json
@@ -0,0 +1,36 @@
+{
+ "name": "Java",
+
+ "image": "mcr.microsoft.com/devcontainers/java:0-17",
+
+ "features": {
+ "ghcr.io/devcontainers/features/java:1": {
+ "version": "none",
+ "installMaven": "true",
+ "installGradle": "false"
+ },
+ "ghcr.io/devcontainers/features/docker-in-docker:2": {}
+ },
+
+ // Use 'forwardPorts' to make a list of ports inside the container available locally.
+ // "forwardPorts": [],
+
+ // Use 'postCreateCommand' to run commands after the container is created.
+ // "postCreateCommand": "java -version",
+
+ "customizations": {
+ "vscode": {
+ "extensions" : [
+ "vscjava.vscode-java-pack",
+ "vscjava.vscode-maven",
+ "vscjava.vscode-java-debug",
+ "EditorConfig.EditorConfig",
+ "ms-azuretools.vscode-docker",
+ "antfu.vite",
+ "ms-kubernetes-tools.vscode-kubernetes-tools",
+ "github.vscode-pull-request-github"
+ ]
+ }
+ }
+
+}
diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 00000000000..4e23fc5467f
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,286 @@
+[*]
+charset = utf-8
+end_of_line = lf
+indent_size = 4
+indent_style = space
+insert_final_newline = true
+max_line_length = 120
+tab_width = 4
+ij_continuation_indent_size = 8
+ij_formatter_off_tag = @formatter:off
+ij_formatter_on_tag = @formatter:on
+ij_formatter_tags_enabled = true
+ij_smart_tabs = false
+ij_visual_guides = none
+ij_wrap_on_typing = false
+trim_trailing_whitespace = true
+
+[*.java]
+indent_size = 2
+ij_continuation_indent_size = 4
+ij_java_align_consecutive_assignments = false
+ij_java_align_consecutive_variable_declarations = false
+ij_java_align_group_field_declarations = false
+ij_java_align_multiline_annotation_parameters = false
+ij_java_align_multiline_array_initializer_expression = false
+ij_java_align_multiline_assignment = false
+ij_java_align_multiline_binary_operation = false
+ij_java_align_multiline_chained_methods = false
+ij_java_align_multiline_extends_list = false
+ij_java_align_multiline_for = true
+ij_java_align_multiline_method_parentheses = false
+ij_java_align_multiline_parameters = true
+ij_java_align_multiline_parameters_in_calls = false
+ij_java_align_multiline_parenthesized_expression = false
+ij_java_align_multiline_records = true
+ij_java_align_multiline_resources = true
+ij_java_align_multiline_ternary_operation = false
+ij_java_align_multiline_text_blocks = false
+ij_java_align_multiline_throws_list = false
+ij_java_align_subsequent_simple_methods = false
+ij_java_align_throws_keyword = false
+ij_java_align_types_in_multi_catch = true
+ij_java_annotation_parameter_wrap = off
+ij_java_array_initializer_new_line_after_left_brace = false
+ij_java_array_initializer_right_brace_on_new_line = false
+ij_java_array_initializer_wrap = normal
+ij_java_assert_statement_colon_on_next_line = false
+ij_java_assert_statement_wrap = normal
+ij_java_assignment_wrap = normal
+ij_java_binary_operation_sign_on_next_line = false
+ij_java_binary_operation_wrap = normal
+ij_java_blank_lines_after_anonymous_class_header = 0
+ij_java_blank_lines_after_class_header = 0
+ij_java_blank_lines_after_imports = 1
+ij_java_blank_lines_after_package = 1
+ij_java_blank_lines_around_class = 1
+ij_java_blank_lines_around_field = 0
+ij_java_blank_lines_around_field_in_interface = 0
+ij_java_blank_lines_around_initializer = 1
+ij_java_blank_lines_around_method = 1
+ij_java_blank_lines_around_method_in_interface = 1
+ij_java_blank_lines_before_class_end = 0
+ij_java_blank_lines_before_imports = 1
+ij_java_blank_lines_before_method_body = 0
+ij_java_blank_lines_before_package = 1
+ij_java_block_brace_style = end_of_line
+ij_java_block_comment_add_space = false
+ij_java_block_comment_at_first_column = true
+ij_java_builder_methods = none
+ij_java_call_parameters_new_line_after_left_paren = false
+ij_java_call_parameters_right_paren_on_new_line = false
+ij_java_call_parameters_wrap = normal
+ij_java_case_statement_on_separate_line = true
+ij_java_catch_on_new_line = false
+ij_java_class_annotation_wrap = split_into_lines
+ij_java_class_brace_style = end_of_line
+ij_java_class_count_to_use_import_on_demand = 999
+ij_java_class_names_in_javadoc = 1
+ij_java_do_not_indent_top_level_class_members = false
+ij_java_do_not_wrap_after_single_annotation = false
+ij_java_do_not_wrap_after_single_annotation_in_parameter = false
+ij_java_do_while_brace_force = always
+ij_java_doc_add_blank_line_after_description = true
+ij_java_doc_add_blank_line_after_param_comments = false
+ij_java_doc_add_blank_line_after_return = false
+ij_java_doc_add_p_tag_on_empty_lines = true
+ij_java_doc_align_exception_comments = true
+ij_java_doc_align_param_comments = true
+ij_java_doc_do_not_wrap_if_one_line = false
+ij_java_doc_enable_formatting = true
+ij_java_doc_enable_leading_asterisks = true
+ij_java_doc_indent_on_continuation = false
+ij_java_doc_keep_empty_lines = true
+ij_java_doc_keep_empty_parameter_tag = true
+ij_java_doc_keep_empty_return_tag = true
+ij_java_doc_keep_empty_throws_tag = true
+ij_java_doc_keep_invalid_tags = true
+ij_java_doc_param_description_on_new_line = false
+ij_java_doc_preserve_line_breaks = false
+ij_java_doc_use_throws_not_exception_tag = true
+ij_java_else_on_new_line = false
+ij_java_entity_dd_suffix = EJB
+ij_java_entity_eb_suffix = Bean
+ij_java_entity_hi_suffix = Home
+ij_java_entity_lhi_prefix = Local
+ij_java_entity_lhi_suffix = Home
+ij_java_entity_li_prefix = Local
+ij_java_entity_pk_class = java.lang.String
+ij_java_entity_vo_suffix = VO
+ij_java_enum_constants_wrap = normal
+ij_java_extends_keyword_wrap = normal
+ij_java_extends_list_wrap = normal
+ij_java_field_annotation_wrap = split_into_lines
+ij_java_finally_on_new_line = false
+ij_java_for_brace_force = always
+ij_java_for_statement_new_line_after_left_paren = false
+ij_java_for_statement_right_paren_on_new_line = false
+ij_java_for_statement_wrap = normal
+ij_java_generate_final_locals = false
+ij_java_generate_final_parameters = false
+ij_java_if_brace_force = always
+ij_java_imports_layout = $*,|,*
+ij_java_indent_case_from_switch = true
+ij_java_insert_inner_class_imports = false
+ij_java_insert_override_annotation = true
+ij_java_keep_blank_lines_before_right_brace = 2
+ij_java_keep_blank_lines_between_package_declaration_and_header = 2
+ij_java_keep_blank_lines_in_code = 2
+ij_java_keep_blank_lines_in_declarations = 2
+ij_java_keep_builder_methods_indents = false
+ij_java_keep_control_statement_in_one_line = true
+ij_java_keep_first_column_comment = true
+ij_java_keep_indents_on_empty_lines = false
+ij_java_keep_line_breaks = true
+ij_java_keep_multiple_expressions_in_one_line = false
+ij_java_keep_simple_blocks_in_one_line = false
+ij_java_keep_simple_classes_in_one_line = false
+ij_java_keep_simple_lambdas_in_one_line = false
+ij_java_keep_simple_methods_in_one_line = false
+ij_java_label_indent_absolute = false
+ij_java_label_indent_size = 0
+ij_java_lambda_brace_style = end_of_line
+ij_java_layout_static_imports_separately = true
+ij_java_line_comment_add_space = false
+ij_java_line_comment_add_space_on_reformat = false
+ij_java_line_comment_at_first_column = true
+ij_java_message_dd_suffix = EJB
+ij_java_message_eb_suffix = Bean
+ij_java_method_annotation_wrap = split_into_lines
+ij_java_method_brace_style = end_of_line
+ij_java_method_call_chain_wrap = normal
+ij_java_method_parameters_new_line_after_left_paren = false
+ij_java_method_parameters_right_paren_on_new_line = false
+ij_java_method_parameters_wrap = normal
+ij_java_modifier_list_wrap = false
+ij_java_multi_catch_types_wrap = normal
+ij_java_names_count_to_use_import_on_demand = 999
+ij_java_new_line_after_lparen_in_annotation = false
+ij_java_new_line_after_lparen_in_record_header = false
+ij_java_parameter_annotation_wrap = normal
+ij_java_parentheses_expression_new_line_after_left_paren = false
+ij_java_parentheses_expression_right_paren_on_new_line = false
+ij_java_place_assignment_sign_on_next_line = false
+ij_java_prefer_longer_names = true
+ij_java_prefer_parameters_wrap = false
+ij_java_record_components_wrap = normal
+ij_java_repeat_synchronized = true
+ij_java_replace_instanceof_and_cast = false
+ij_java_replace_null_check = true
+ij_java_replace_sum_lambda_with_method_ref = true
+ij_java_resource_list_new_line_after_left_paren = false
+ij_java_resource_list_right_paren_on_new_line = false
+ij_java_resource_list_wrap = normal
+ij_java_rparen_on_new_line_in_annotation = false
+ij_java_rparen_on_new_line_in_record_header = false
+ij_java_session_dd_suffix = EJB
+ij_java_session_eb_suffix = Bean
+ij_java_session_hi_suffix = Home
+ij_java_session_lhi_prefix = Local
+ij_java_session_lhi_suffix = Home
+ij_java_session_li_prefix = Local
+ij_java_session_si_suffix = Service
+ij_java_space_after_closing_angle_bracket_in_type_argument = false
+ij_java_space_after_colon = true
+ij_java_space_after_comma = true
+ij_java_space_after_comma_in_type_arguments = true
+ij_java_space_after_for_semicolon = true
+ij_java_space_after_quest = true
+ij_java_space_after_type_cast = true
+ij_java_space_before_annotation_array_initializer_left_brace = false
+ij_java_space_before_annotation_parameter_list = false
+ij_java_space_before_array_initializer_left_brace = true
+ij_java_space_before_catch_keyword = true
+ij_java_space_before_catch_left_brace = true
+ij_java_space_before_catch_parentheses = true
+ij_java_space_before_class_left_brace = true
+ij_java_space_before_colon = true
+ij_java_space_before_colon_in_foreach = true
+ij_java_space_before_comma = false
+ij_java_space_before_do_left_brace = true
+ij_java_space_before_else_keyword = true
+ij_java_space_before_else_left_brace = true
+ij_java_space_before_finally_keyword = true
+ij_java_space_before_finally_left_brace = true
+ij_java_space_before_for_left_brace = true
+ij_java_space_before_for_parentheses = true
+ij_java_space_before_for_semicolon = false
+ij_java_space_before_if_left_brace = true
+ij_java_space_before_if_parentheses = true
+ij_java_space_before_method_call_parentheses = false
+ij_java_space_before_method_left_brace = true
+ij_java_space_before_method_parentheses = false
+ij_java_space_before_opening_angle_bracket_in_type_parameter = false
+ij_java_space_before_quest = true
+ij_java_space_before_switch_left_brace = true
+ij_java_space_before_switch_parentheses = true
+ij_java_space_before_synchronized_left_brace = true
+ij_java_space_before_synchronized_parentheses = true
+ij_java_space_before_try_left_brace = true
+ij_java_space_before_try_parentheses = true
+ij_java_space_before_type_parameter_list = false
+ij_java_space_before_while_keyword = true
+ij_java_space_before_while_left_brace = true
+ij_java_space_before_while_parentheses = true
+ij_java_space_inside_one_line_enum_braces = false
+ij_java_space_within_empty_array_initializer_braces = false
+ij_java_space_within_empty_method_call_parentheses = false
+ij_java_space_within_empty_method_parentheses = false
+ij_java_spaces_around_additive_operators = true
+ij_java_spaces_around_annotation_eq = true
+ij_java_spaces_around_assignment_operators = true
+ij_java_spaces_around_bitwise_operators = true
+ij_java_spaces_around_equality_operators = true
+ij_java_spaces_around_lambda_arrow = true
+ij_java_spaces_around_logical_operators = true
+ij_java_spaces_around_method_ref_dbl_colon = false
+ij_java_spaces_around_multiplicative_operators = true
+ij_java_spaces_around_relational_operators = true
+ij_java_spaces_around_shift_operators = true
+ij_java_spaces_around_type_bounds_in_type_parameters = true
+ij_java_spaces_around_unary_operator = false
+ij_java_spaces_within_angle_brackets = false
+ij_java_spaces_within_annotation_parentheses = false
+ij_java_spaces_within_array_initializer_braces = false
+ij_java_spaces_within_braces = false
+ij_java_spaces_within_brackets = false
+ij_java_spaces_within_cast_parentheses = false
+ij_java_spaces_within_catch_parentheses = false
+ij_java_spaces_within_for_parentheses = false
+ij_java_spaces_within_if_parentheses = false
+ij_java_spaces_within_method_call_parentheses = false
+ij_java_spaces_within_method_parentheses = false
+ij_java_spaces_within_parentheses = false
+ij_java_spaces_within_record_header = false
+ij_java_spaces_within_switch_parentheses = false
+ij_java_spaces_within_synchronized_parentheses = false
+ij_java_spaces_within_try_parentheses = false
+ij_java_spaces_within_while_parentheses = false
+ij_java_special_else_if_treatment = true
+ij_java_subclass_name_suffix = Impl
+ij_java_ternary_operation_signs_on_next_line = false
+ij_java_ternary_operation_wrap = normal
+ij_java_test_name_suffix = Test
+ij_java_throws_keyword_wrap = normal
+ij_java_throws_list_wrap = normal
+ij_java_use_external_annotations = false
+ij_java_use_fq_class_names = false
+ij_java_use_relative_indents = false
+ij_java_use_single_class_imports = true
+ij_java_variable_annotation_wrap = normal
+ij_java_visibility = public
+ij_java_while_brace_force = always
+ij_java_while_on_new_line = false
+ij_java_wrap_comments = false
+ij_java_wrap_first_method_in_call_chain = false
+ij_java_wrap_long_lines = false
+
+[*.md]
+insert_final_newline = false
+trim_trailing_whitespace = false
+
+[*.yaml]
+indent_size = 2
+[*.yml]
+indent_size = 2
+
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 6a65f014c3f..cd94e7a297b 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -14,5 +14,5 @@
# TESTS
/kafka-ui-e2e-checks/ @provectus/kafka-qa
-# HELM CHARTS
-/charts/ @provectus/kafka-devops
+# INFRA
+/.github/workflows/ @provectus/kafka-devops
diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml
new file mode 100644
index 00000000000..4ec791ebb9c
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug.yml
@@ -0,0 +1,92 @@
+name: "\U0001F41E Bug report"
+description: File a bug report
+labels: ["status/triage", "type/bug"]
+assignees: []
+
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Hi, thanks for raising the issue(-s), all contributions really matter!
+ Please, note that we'll close the issue without further explanation if you don't follow
+ this template and don't provide the information requested within this template.
+
+ - type: checkboxes
+ id: terms
+ attributes:
+ label: Issue submitter TODO list
+ description: By you checking these checkboxes we can be sure you've done the essential things.
+ options:
+ - label: I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
+ required: true
+ - label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
+ required: true
+ - label: I've tried running `master`-labeled docker image and the issue still persists there
+ required: true
+ - label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
+ required: true
+
+ - type: textarea
+ attributes:
+ label: Describe the bug (actual behavior)
+ description: A clear and concise description of what the bug is. Use a list, if there is more than one problem
+ validations:
+ required: true
+
+ - type: textarea
+ attributes:
+ label: Expected behavior
+ description: A clear and concise description of what you expected to happen
+ validations:
+ required: false
+
+ - type: textarea
+ attributes:
+ label: Your installation details
+ description: |
+ How do you run the app? Please provide as much info as possible:
+ 1. App version (commit hash in the top left corner of the UI)
+ 2. Helm chart version, if you use one
+ 3. Your application config. Please remove the sensitive info like passwords or API keys.
+ 4. Any IAAC configs
+ validations:
+ required: true
+
+ - type: textarea
+ attributes:
+ label: Steps to reproduce
+ description: |
+ Please write down the order of the actions required to reproduce the issue.
+ For the advanced setups/complicated issue, we might need you to provide
+ a minimal [reproducible example](https://stackoverflow.com/help/minimal-reproducible-example).
+ validations:
+ required: true
+
+ - type: textarea
+ attributes:
+ label: Screenshots
+ description: |
+ If applicable, add screenshots to help explain your problem
+ validations:
+ required: false
+
+ - type: textarea
+ attributes:
+ label: Logs
+ description: |
+ If applicable, *upload* screenshots to help explain your problem
+ validations:
+ required: false
+
+ - type: textarea
+ attributes:
+ label: Additional context
+ description: |
+ Add any other context about the problem here. E.G.:
+ 1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
+ Were they successful or the same issue occurred? Please provide steps as well.
+ 2. Related issues (if there are any).
+ 3. Logs (if available)
+ 4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
deleted file mode 100644
index 6a2b8abec37..00000000000
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ /dev/null
@@ -1,40 +0,0 @@
----
-name: "\U0001F41E Bug report"
-about: Create a bug report
-title: ''
-labels: status/triage, type/bug
-assignees: ''
-
----
-
-**Describe the bug**
-
-
-
-**Set up**
-
-
-
-**Steps to Reproduce**
-Steps to reproduce the behavior:
-
-1.
-
-**Expected behavior**
-
-
-**Screenshots**
-
-
-
-**Additional context**
-
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 00000000000..ab1839eb161
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,14 @@
+blank_issues_enabled: false
+contact_links:
+ - name: Report helm issue
+ url: https://github.com/provectus/kafka-ui-charts
+ about: Our helm charts are located in another repo. Please raise issues/PRs regarding charts in that repo.
+ - name: Official documentation
+ url: https://docs.kafka-ui.provectus.io/
+ about: Before reaching out for support, please refer to our documentation. Read "FAQ" and "Common problems", also try using search there.
+ - name: Community Discord
+ url: https://discord.gg/4DWzD7pGE5
+ about: Chat with other users, get some support or ask questions.
+ - name: GitHub Discussions
+ url: https://github.com/provectus/kafka-ui/discussions
+ about: An alternative place to ask questions or to get some support.
diff --git a/.github/ISSUE_TEMPLATE/feature.yml b/.github/ISSUE_TEMPLATE/feature.yml
new file mode 100644
index 00000000000..e52c2b7ae99
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature.yml
@@ -0,0 +1,66 @@
+name: "\U0001F680 Feature request"
+description: Propose a new feature
+labels: ["status/triage", "type/feature"]
+assignees: []
+
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Hi, thanks for raising the issue(-s), all contributions really matter!
+ Please, note that we'll close the issue without further explanation if you don't follow
+ this template and don't provide the information requested within this template.
+
+ - type: checkboxes
+ id: terms
+ attributes:
+ label: Issue submitter TODO list
+ description: By you checking these checkboxes we can be sure you've done the essential things.
+ options:
+ - label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
+ required: true
+ - label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there
+ required: true
+
+ - type: textarea
+ attributes:
+ label: Is your proposal related to a problem?
+ description: |
+ Provide a clear and concise description of what the problem is.
+ For example, "I'm always frustrated when..."
+ validations:
+ required: false
+
+ - type: textarea
+ attributes:
+ label: Describe the feature you're interested in
+ description: |
+ Provide a clear and concise description of what you want to happen.
+ validations:
+ required: true
+
+ - type: textarea
+ attributes:
+ label: Describe alternatives you've considered
+ description: |
+ Let us know about other solutions you've tried or researched.
+ validations:
+ required: false
+
+ - type: input
+ attributes:
+ label: Version you're running
+ description: |
+ Please provide the app version you're currently running:
+ 1. App version (commit hash in the top left corner of the UI)
+ validations:
+ required: true
+
+ - type: textarea
+ attributes:
+ label: Additional context
+ description: |
+ Is there anything else you can add about the proposal?
+ You might want to link to related issues here, if you haven't already.
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
deleted file mode 100644
index 68bcf80782f..00000000000
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ /dev/null
@@ -1,35 +0,0 @@
----
-name: "\U0001F680 Feature request"
-about: Propose a new feature
-title: ''
-labels: status/triage, type/feature
-assignees: ''
-
----
-
-### Is your proposal related to a problem?
-
-
-
-### Describe the solution you'd like
-
-
-
-### Describe alternatives you've considered
-
-
-
-### Additional context
-
-
-
diff --git a/.github/ISSUE_TEMPLATE/k8s_whine.md b/.github/ISSUE_TEMPLATE/k8s_whine.md
deleted file mode 100644
index 1d767005eb0..00000000000
--- a/.github/ISSUE_TEMPLATE/k8s_whine.md
+++ /dev/null
@@ -1,40 +0,0 @@
----
-name: "⎈ K8s/Helm problem report"
-about: Report a problem with k8s/helm charts/etc
-title: ''
-labels: scope/k8s, status/triage
-assignees: azatsafin, 5hin0bi
-
----
-
-**Describe the bug**
-
-
-
-**Set up**
-
-
-
-**Steps to Reproduce**
-Steps to reproduce the behavior:
-
-1.
-
-**Expected behavior**
-
-
-**Screenshots**
-
-
-
-**Additional context**
-
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 75ea103c31e..7e8552962ab 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -8,8 +8,6 @@ updates:
timezone: Europe/Moscow
reviewers:
- "Haarolean"
- assignees:
- - "Haarolean"
labels:
- "scope/backend"
- "type/dependencies"
@@ -99,8 +97,6 @@ updates:
timezone: Europe/Moscow
reviewers:
- "Haarolean"
- assignees:
- - "Haarolean"
labels:
- "scope/infrastructure"
- "type/dependencies"
diff --git a/.github/release_drafter.yaml b/.github/release_drafter.yaml
index 421697f2e70..36795355407 100644
--- a/.github/release_drafter.yaml
+++ b/.github/release_drafter.yaml
@@ -9,21 +9,33 @@ template: |
exclude-labels:
- 'scope/infrastructure'
- 'scope/QA'
+ - 'scope/AQA'
- 'type/dependencies'
- 'type/chore'
- 'type/documentation'
- 'type/refactoring'
categories:
+ - title: '🚩 Breaking Changes'
+ labels:
+ - 'impact/changelog'
+
- title: '⚙️Features'
labels:
- 'type/feature'
+
- title: '🪛Enhancements'
labels:
- 'type/enhancement'
+
- title: '🔨Bug Fixes'
labels:
- 'type/bug'
+
+ - title: 'Security'
+ labels:
+ - 'type/security'
+
- title: '⎈ Helm/K8S Changes'
labels:
- 'scope/k8s'
diff --git a/.github/workflows/aws_publisher.yaml b/.github/workflows/aws_publisher.yaml
index 39468d4dde3..5ce2b587fb9 100644
--- a/.github/workflows/aws_publisher.yaml
+++ b/.github/workflows/aws_publisher.yaml
@@ -1,4 +1,4 @@
-name: AWS Marketplace Publisher
+name: "Infra: Release: AWS Marketplace Publisher"
on:
workflow_dispatch:
inputs:
@@ -10,6 +10,11 @@ on:
description: 'Version of KafkaUI'
required: true
default: '0.3.2'
+ PublishOnMarketplace:
+ description: 'If set to true, the request to update AWS Server product version will be raised'
+ required: true
+ default: false
+ type: boolean
jobs:
build-ami:
@@ -19,14 +24,14 @@ jobs:
- name: Clone infra repo
run: |
echo "Cloning repo..."
- git clone https://kafka-ui-infra:${{ secrets.KAFKA_UI_INFRA_TOKEN }}@gitlab.provectus.com/provectus-internals/kafka-ui-infra.git --branch ${{ github.event.inputs.KafkaUIInfraBranch }}
+ git clone https://infra-tech:${{ secrets.INFRA_USER_ACCESS_TOKEN }}@github.com/provectus/kafka-ui-infra.git --branch ${{ github.event.inputs.KafkaUIInfraBranch }}
echo "Cd to packer DIR..."
cd kafka-ui-infra/ami
echo "WORK_DIR=$(pwd)" >> $GITHUB_ENV
echo "Packer will be triggered in this dir $WORK_DIR"
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v3
with:
aws-access-key-id: ${{ secrets.AWS_AMI_PUBLISH_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_AMI_PUBLISH_KEY_SECRET }}
@@ -46,6 +51,27 @@ jobs:
with:
command: build
arguments: "-color=false -on-error=abort -var=kafka_ui_release_version=${{ github.event.inputs.KafkaUIReleaseVersion }}"
- target: kafka-ui-infra/ami/kafka-ui.pkr.hcl
+ target: kafka-ui.pkr.hcl
+ working_directory: ${{ env.WORK_DIR }}
env:
PACKER_LOG: 1
+
+ # add fresh AMI to AWS Marketplace
+ - name: Publish Artifact at Marketplace
+ if: ${{ github.event.inputs.PublishOnMarketplace == 'true' }}
+ env:
+ PRODUCT_ID: ${{ secrets.AWS_SERVER_PRODUCT_ID }}
+ RELEASE_VERSION: "${{ github.event.inputs.KafkaUIReleaseVersion }}"
+ RELEASE_NOTES: "https://github.com/provectus/kafka-ui/releases/tag/v${{ github.event.inputs.KafkaUIReleaseVersion }}"
+ MP_ROLE_ARN: ${{ secrets.AWS_MARKETPLACE_AMI_ACCESS_ROLE }} # https://docs.aws.amazon.com/marketplace/latest/userguide/ami-single-ami-products.html#single-ami-marketplace-ami-access
+ AMI_OS_VERSION: "amzn2-ami-kernel-5.10-hvm-*-x86_64-gp2"
+ run: |
+ set -x
+ pwd
+ ls -la kafka-ui-infra/ami
+ echo $WORK_DIR/manifest.json
+ export AMI_ID=$(jq -r '.builds[-1].artifact_id' kafka-ui-infra/ami/manifest.json | cut -d ":" -f2)
+ /bin/bash kafka-ui-infra/aws-marketplace/prepare_changeset.sh > changeset.json
+ aws marketplace-catalog start-change-set \
+ --catalog "AWSMarketplace" \
+ --change-set "$(cat changeset.json)"
diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml
index aa9237618b5..7f62772832a 100644
--- a/.github/workflows/backend.yml
+++ b/.github/workflows/backend.yml
@@ -1,4 +1,4 @@
-name: backend
+name: "Backend: PR/master build & test"
on:
push:
branches:
@@ -8,6 +8,9 @@ on:
paths:
- "kafka-ui-api/**"
- "pom.xml"
+permissions:
+ checks: write
+ pull-requests: write
jobs:
build-and-test:
runs-on: ubuntu-latest
@@ -16,17 +19,12 @@ jobs:
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- - name: Cache local Maven repository
- uses: actions/cache@v3
- with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-
- - name: Set up JDK 1.13
- uses: actions/setup-java@v1
+ - name: Set up JDK
+ uses: actions/setup-java@v3
with:
- java-version: 1.13
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
- name: Cache SonarCloud packages
uses: actions/cache@v3
with:
@@ -34,23 +32,25 @@ jobs:
key: ${{ runner.os }}-sonar
restore-keys: ${{ runner.os }}-sonar
- name: Build and analyze pull request target
- if: ${{ github.event_name == 'pull_request_target' }}
+ if: ${{ github.event_name == 'pull_request' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN_BACKEND }}
+ HEAD_REF: ${{ github.head_ref }}
+ BASE_REF: ${{ github.base_ref }}
run: |
- mvn versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
- mvn -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar \
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
+ ./mvnw -B -V -ntp verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar \
-Dsonar.projectKey=com.provectus:kafka-ui_backend \
-Dsonar.pullrequest.key=${{ github.event.pull_request.number }} \
- -Dsonar.pullrequest.branch=${{ github.head_ref }} \
- -Dsonar.pullrequest.base=${{ github.base_ref }}
+ -Dsonar.pullrequest.branch=$HEAD_REF \
+ -Dsonar.pullrequest.base=$BASE_REF
- name: Build and analyze push master
if: ${{ github.event_name == 'push' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN_BACKEND }}
run: |
- mvn versions:set -DnewVersion=$GITHUB_SHA
- mvn -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar \
+ ./mvnw -B -ntp versions:set -DnewVersion=$GITHUB_SHA
+ ./mvnw -B -V -ntp verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar \
-Dsonar.projectKey=com.provectus:kafka-ui_backend
diff --git a/.github/workflows/block_merge.yml b/.github/workflows/block_merge.yml
index ce98cc7e3a2..c689d45b0d7 100644
--- a/.github/workflows/block_merge.yml
+++ b/.github/workflows/block_merge.yml
@@ -1,4 +1,4 @@
-name: Pull Request Labels
+name: "Infra: PR block merge"
on:
pull_request:
types: [opened, labeled, unlabeled, synchronize]
@@ -6,7 +6,7 @@ jobs:
block_merge:
runs-on: ubuntu-latest
steps:
- - uses: mheap/github-action-required-labels@v1
+ - uses: mheap/github-action-required-labels@v5
with:
mode: exactly
count: 0
diff --git a/.github/workflows/branch-deploy.yml b/.github/workflows/branch-deploy.yml
index 7d93385c307..2aa76126299 100644
--- a/.github/workflows/branch-deploy.yml
+++ b/.github/workflows/branch-deploy.yml
@@ -1,4 +1,4 @@
-name: DeployFromBranch
+name: "Infra: Feature Testing: Init env"
on:
workflow_dispatch:
@@ -10,40 +10,33 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
- name: get branch name
id: extract_branch
run: |
- hub pr checkout ${{ github.event.pull_request.number }}
- branch_name=$(hub branch | grep "*" | sed -e 's/^\*//')
- echo $branch_name
- echo ::set-output name=branch::${branch_name}
- tag=$(echo $branch_name | sed 's/\//-/g' | sed 's/\./-/g' | sed 's/\_/-/g' | sed -e 's/\(.*\)/\L\1/' | cut -c1-32 | sed -E 's/(^[^a-z0-9])*([a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*)([^a-z0-9]*)/\2/')
- echo ::set-output name=tag::${tag}
+ tag='pr${{ github.event.pull_request.number }}'
+ echo "tag=${tag}" >> $GITHUB_OUTPUT
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- - name: Cache local Maven repository
- uses: actions/cache@v3
- with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-
- - name: Set up JDK 1.13
- uses: actions/setup-java@v1
+ - name: Set up JDK
+ uses: actions/setup-java@v3
with:
- java-version: 1.13
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
- name: Build
id: build
run: |
- mvn versions:set -DnewVersion=$GITHUB_SHA
- mvn clean package -Pprod -DskipTests
- export VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
- echo "::set-output name=version::${VERSION}"
+ ./mvnw -B -ntp versions:set -DnewVersion=$GITHUB_SHA
+ ./mvnw -B -V -ntp clean package -Pprod -DskipTests
+ export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
+ echo "version=${VERSION}" >> $GITHUB_OUTPUT
- name: Set up QEMU
- uses: docker/setup-qemu-action@v1
+ uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
id: buildx
- uses: docker/setup-buildx-action@v1
+ uses: docker/setup-buildx-action@v2
- name: Cache Docker layers
uses: actions/cache@v3
with:
@@ -52,7 +45,7 @@ jobs:
restore-keys: |
${{ runner.os }}-buildx-
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v3
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@@ -62,7 +55,7 @@ jobs:
uses: aws-actions/amazon-ecr-login@v1
- name: Build and push
id: docker_build_and_push
- uses: docker/build-push-action@v2
+ uses: docker/build-push-action@v4
with:
builder: ${{ steps.buildx.outputs.name }}
context: kafka-ui-api
@@ -80,29 +73,33 @@ jobs:
steps:
- name: clone
run: |
- git clone https://kafka-ui-infra:${{ secrets.KAFKA_UI_INFRA_TOKEN }}@gitlab.provectus.com/provectus-internals/kafka-ui-infra.git
+ git clone https://infra-tech:${{ secrets.INFRA_USER_ACCESS_TOKEN }}@github.com/provectus/kafka-ui-infra.git --branch envs
- name: create deployment
run: |
cd kafka-ui-infra/aws-infrastructure4eks/argocd/scripts
echo "Branch:${{ needs.build.outputs.tag }}"
./kafka-ui-deployment-from-branch.sh ${{ needs.build.outputs.tag }} ${{ github.event.label.name }} ${{ secrets.FEATURE_TESTING_UI_PASSWORD }}
- git config --global user.email "kafka-ui-infra@provectus.com"
- git config --global user.name "kafka-ui-infra"
+ git config --global user.email "infra-tech@provectus.com"
+ git config --global user.name "infra-tech"
git add ../kafka-ui-from-branch/
git commit -m "added env:${{ needs.build.outputs.deploy }}" && git push || true
- - name: make comment with private deployment link
+ - name: update status check for private deployment
if: ${{ github.event.label.name == 'status/feature_testing' }}
- uses: peter-evans/create-or-update-comment@v2
+ uses: Sibz/github-status-action@v1.1.6
with:
- issue-number: ${{ github.event.pull_request.number }}
- body: |
- Custom deployment will be available at http://${{ needs.build.outputs.tag }}.internal.kafka-ui.provectus.io
+ authToken: ${{secrets.GITHUB_TOKEN}}
+ context: "Click Details button to open custom deployment page"
+ state: "success"
+ sha: ${{ github.event.pull_request.head.sha || github.sha }}
+ target_url: "http://${{ needs.build.outputs.tag }}.internal.kafka-ui.provectus.io"
- - name: make comment with public deployment link
+ - name: update status check for public deployment
if: ${{ github.event.label.name == 'status/feature_testing_public' }}
- uses: peter-evans/create-or-update-comment@v2
+ uses: Sibz/github-status-action@v1.1.6
with:
- issue-number: ${{ github.event.pull_request.number }}
- body: |
- Custom deployment will be available at http://${{ needs.build.outputs.tag }}.kafka-ui.provectus.io in 5 minutes
+ authToken: ${{secrets.GITHUB_TOKEN}}
+ context: "Click Details button to open custom deployment page"
+ state: "success"
+ sha: ${{ github.event.pull_request.head.sha || github.sha }}
+ target_url: "http://${{ needs.build.outputs.tag }}.internal.kafka-ui.provectus.io"
diff --git a/.github/workflows/branch-remove.yml b/.github/workflows/branch-remove.yml
index 37b24239d52..d32e1d4edb6 100644
--- a/.github/workflows/branch-remove.yml
+++ b/.github/workflows/branch-remove.yml
@@ -1,40 +1,22 @@
-name: RemoveCustomDeployment
+name: "Infra: Feature Testing: Destroy env"
on:
workflow_dispatch:
pull_request:
types: ['unlabeled', 'closed']
jobs:
remove:
- if: ${{ github.event.label.name == 'status/feature_testing' || github.event.label.name == 'status/feature_testing_public' }}
runs-on: ubuntu-latest
+ if: ${{ (github.event.label.name == 'status/feature_testing' || github.event.label.name == 'status/feature_testing_public') || (github.event.action == 'closed' && (contains(github.event.pull_request.labels.*.name, 'status/feature_testing') || contains(github.event.pull_request.labels.*.name, 'status/feature_testing_public'))) }}
steps:
- uses: actions/checkout@v3
- - name: get branch name
- id: extract_branch
- run: |
- hub pr checkout ${{ github.event.pull_request.number }}
- branch_name=$(hub branch | grep "*" | sed -e 's/^\*//')
- echo $branch_name
- echo ::set-output name=branch::${branch_name}
- tag=$(echo $branch_name | sed 's/\//-/g' | sed 's/\./-/g' | sed 's/\_/-/g' | cut -c1-32)
- echo ::set-output name=tag::${tag}
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: clone
run: |
- git clone https://kafka-ui-infra:${{ secrets.KAFKA_UI_INFRA_TOKEN }}@gitlab.provectus.com/provectus-internals/kafka-ui-infra.git
+ git clone https://infra-tech:${{ secrets.INFRA_USER_ACCESS_TOKEN }}@github.com/provectus/kafka-ui-infra.git --branch envs
- name: remove env
run: |
cd kafka-ui-infra/aws-infrastructure4eks/argocd/scripts
- echo "Branch:${{ steps.extract_branch.outputs.tag }}"
- ./delete-env.sh ${{ steps.extract_branch.outputs.tag }}
- git config --global user.email "kafka-ui-infra@provectus.com"
- git config --global user.name "kafka-ui-infra"
+ ./delete-env.sh pr${{ github.event.pull_request.number }} || true
+ git config --global user.email "infra-tech@provectus.com"
+ git config --global user.name "infra-tech"
git add ../kafka-ui-from-branch/
git commit -m "removed env:${{ needs.build.outputs.deploy }}" && git push || true
- - name: make comment with deployment link
- uses: peter-evans/create-or-update-comment@v2
- with:
- issue-number: ${{ github.event.pull_request.number }}
- body: |
- Custom deployment removed
diff --git a/.github/workflows/build-public-image.yml b/.github/workflows/build-public-image.yml
new file mode 100644
index 00000000000..5f6c46e25eb
--- /dev/null
+++ b/.github/workflows/build-public-image.yml
@@ -0,0 +1,74 @@
+name: "Infra: Image Testing: Deploy"
+on:
+ workflow_dispatch:
+ pull_request:
+ types: ['labeled']
+jobs:
+ build:
+ if: ${{ github.event.label.name == 'status/image_testing' }}
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
+ - name: get branch name
+ id: extract_branch
+ run: |
+ tag='${{ github.event.pull_request.number }}'
+ echo "tag=${tag}" >> $GITHUB_OUTPUT
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build
+ id: build
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=$GITHUB_SHA
+ ./mvnw -B -V -ntp clean package -Pprod -DskipTests
+ export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
+ echo "version=${VERSION}" >> $GITHUB_OUTPUT
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+ - name: Set up Docker Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v2
+ - name: Cache Docker layers
+ uses: actions/cache@v3
+ with:
+ path: /tmp/.buildx-cache
+ key: ${{ runner.os }}-buildx-${{ github.sha }}
+ restore-keys: |
+ ${{ runner.os }}-buildx-
+ - name: Configure AWS credentials for Kafka-UI account
+ uses: aws-actions/configure-aws-credentials@v3
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: us-east-1
+ - name: Login to Amazon ECR
+ id: login-ecr
+ uses: aws-actions/amazon-ecr-login@v1
+ with:
+ registry-type: 'public'
+ - name: Build and push
+ id: docker_build_and_push
+ uses: docker/build-push-action@v4
+ with:
+ builder: ${{ steps.buildx.outputs.name }}
+ context: kafka-ui-api
+ push: true
+ tags: public.ecr.aws/provectus/kafka-ui-custom-build:${{ steps.extract_branch.outputs.tag }}
+ build-args: |
+ JAR_FILE=kafka-ui-api-${{ steps.build.outputs.version }}.jar
+ cache-from: type=local,src=/tmp/.buildx-cache
+ cache-to: type=local,dest=/tmp/.buildx-cache
+ - name: make comment with private deployment link
+ uses: peter-evans/create-or-update-comment@v3
+ with:
+ issue-number: ${{ github.event.pull_request.number }}
+ body: |
+ Image published at public.ecr.aws/provectus/kafka-ui-custom-build:${{ steps.extract_branch.outputs.tag }}
+ outputs:
+ tag: ${{ steps.extract_branch.outputs.tag }}
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 8eae96d5fb6..c50da89ae86 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -20,6 +20,8 @@ on:
paths:
- 'kafka-ui-contract/**'
- 'kafka-ui-react-app/**'
+ - 'kafka-ui-api/**'
+ - 'kafka-ui-serde-api/**'
schedule:
- cron: '39 15 * * 6'
@@ -31,7 +33,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- language: [ 'javascript' ]
+ language: [ 'javascript', 'java' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
@@ -42,7 +44,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
- uses: github/codeql-action/init@v1
+ uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -50,10 +52,17 @@ jobs:
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
- uses: github/codeql-action/autobuild@v1
+ uses: github/codeql-action/autobuild@v2
# ℹ️ Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@@ -67,4 +76,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v1
+ uses: github/codeql-action/analyze@v2
diff --git a/.github/workflows/cve.yaml b/.github/workflows/cve.yaml
index ab6df3a049d..4b38fa2465c 100644
--- a/.github/workflows/cve.yaml
+++ b/.github/workflows/cve.yaml
@@ -10,32 +10,26 @@ jobs:
steps:
- uses: actions/checkout@v3
- - name: Cache local Maven repository
- uses: actions/cache@v3
- with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-
-
- - name: Set up JDK 1.13
- uses: actions/setup-java@v1
+ - name: Set up JDK
+ uses: actions/setup-java@v3
with:
- java-version: 1.13
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
- name: Build project
id: build
run: |
- mvn versions:set -DnewVersion=$GITHUB_SHA
- mvn clean package -DskipTests
- export VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
- echo "::set-output name=version::${VERSION}"
+ ./mvnw -B -ntp versions:set -DnewVersion=$GITHUB_SHA
+ ./mvnw -B -V -ntp clean package -DskipTests
+ export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
+ echo "version=${VERSION}" >> $GITHUB_OUTPUT
- name: Set up QEMU
- uses: docker/setup-qemu-action@v1
+ uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v1
+ uses: docker/setup-buildx-action@v2
- name: Cache Docker layers
uses: actions/cache@v3
@@ -46,7 +40,7 @@ jobs:
${{ runner.os }}-buildx-
- name: Build docker image
- uses: docker/build-push-action@v2
+ uses: docker/build-push-action@v4
with:
builder: ${{ steps.buildx.outputs.name }}
context: kafka-ui-api
@@ -61,7 +55,7 @@ jobs:
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Run CVE checks
- uses: aquasecurity/trivy-action@0.3.0
+ uses: aquasecurity/trivy-action@0.12.0
with:
image-ref: "provectuslabs/kafka-ui:${{ steps.build.outputs.version }}"
format: "table"
diff --git a/.github/workflows/delete-public-image.yml b/.github/workflows/delete-public-image.yml
new file mode 100644
index 00000000000..45b4e8f7f37
--- /dev/null
+++ b/.github/workflows/delete-public-image.yml
@@ -0,0 +1,34 @@
+name: "Infra: Image Testing: Delete"
+on:
+ workflow_dispatch:
+ pull_request:
+ types: ['unlabeled', 'closed']
+jobs:
+ remove:
+ if: ${{ github.event.label.name == 'status/image_testing' || ( github.event.action == 'closed' && (contains(github.event.pull_request.labels, 'status/image_testing'))) }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: get branch name
+ id: extract_branch
+ run: |
+ echo
+ tag='${{ github.event.pull_request.number }}'
+ echo "tag=${tag}" >> $GITHUB_OUTPUT
+ - name: Configure AWS credentials for Kafka-UI account
+ uses: aws-actions/configure-aws-credentials@v3
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: us-east-1
+ - name: Login to Amazon ECR
+ id: login-ecr
+ uses: aws-actions/amazon-ecr-login@v1
+ with:
+ registry-type: 'public'
+ - name: Remove from ECR
+ id: remove_from_ecr
+ run: |
+ aws ecr-public batch-delete-image \
+ --repository-name kafka-ui-custom-build \
+ --image-ids imageTag=${{ steps.extract_branch.outputs.tag }} \
+ --region us-east-1
diff --git a/.github/workflows/documentation.yaml b/.github/workflows/documentation.yaml
index 3c1211b7b36..a0726c204a3 100644
--- a/.github/workflows/documentation.yaml
+++ b/.github/workflows/documentation.yaml
@@ -1,4 +1,4 @@
-name: Documentation
+name: "Infra: Docs: URL linter"
on:
pull_request:
types:
@@ -15,9 +15,9 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Check URLs in files
- uses: urlstechie/urlchecker-action@0.2.31
+ uses: urlstechie/urlchecker-action@0.0.34
with:
exclude_patterns: localhost,127.0.,192.168.
- exclude_urls: https://api.server,https://graph.microsoft.com/User.Read,https://dev-a63ggcut.auth0.com/
+ exclude_urls: https://api.server,https://graph.microsoft.com/User.Read,https://dev-a63ggcut.auth0.com/,http://main-schema-registry:8081,http://schema-registry:8081,http://another-yet-schema-registry:8081,http://another-schema-registry:8081
print_all: false
file_types: .md
diff --git a/.github/workflows/e2e-automation.yml b/.github/workflows/e2e-automation.yml
new file mode 100644
index 00000000000..b3bb2f266fc
--- /dev/null
+++ b/.github/workflows/e2e-automation.yml
@@ -0,0 +1,88 @@
+name: "E2E: Automation suite"
+on:
+ workflow_dispatch:
+ inputs:
+ test_suite:
+ description: 'Select test suite to run'
+ default: 'regression'
+ required: true
+ type: choice
+ options:
+ - regression
+ - sanity
+ - smoke
+ qase_token:
+ description: 'Set Qase token to enable integration'
+ required: false
+ type: string
+
+jobs:
+ build-and-test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.sha }}
+ - name: Configure AWS credentials
+ uses: aws-actions/configure-aws-credentials@v3
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: eu-central-1
+ - name: Set up environment
+ id: set_env_values
+ run: |
+ cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
+ - name: Pull with Docker
+ id: pull_chrome
+ run: |
+ docker pull selenoid/vnc_chrome:103.0
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build with Maven
+ id: build_app
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ - name: Compose with Docker
+ id: compose_app
+ # use the following command until #819 will be fixed
+ run: |
+ docker-compose -f kafka-ui-e2e-checks/docker/selenoid-git.yaml up -d
+ docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
+ - name: Run test suite
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ github.event.inputs.qase_token }} -Dsurefire.suiteXmlFiles='src/test/resources/${{ github.event.inputs.test_suite }}.xml' -Dsuite=${{ github.event.inputs.test_suite }} -f 'kafka-ui-e2e-checks' test -Pprod
+ - name: Generate Allure report
+ uses: simple-elf/allure-report-action@master
+ if: always()
+ id: allure-report
+ with:
+ allure_results: ./kafka-ui-e2e-checks/allure-results
+ gh_pages: allure-results
+ allure_report: allure-report
+ subfolder: allure-results
+ report_url: "http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com"
+ - uses: jakejarvis/s3-sync-action@master
+ if: always()
+ env:
+ AWS_S3_BUCKET: 'kafkaui-allure-reports'
+ AWS_REGION: 'eu-central-1'
+ SOURCE_DIR: 'allure-history/allure-results'
+ - name: Deploy report to Amazon S3
+ if: always()
+ uses: Sibz/github-status-action@v1.1.6
+ with:
+ authToken: ${{secrets.GITHUB_TOKEN}}
+ context: "Click Details button to open Allure report"
+ state: "success"
+ sha: ${{ github.sha }}
+ target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
+ - name: Dump Docker logs on failure
+ if: failure()
+ uses: jwalton/gh-docker-logs@v2.2.1
diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml
index 574aa922adf..e62cd724a8f 100644
--- a/.github/workflows/e2e-checks.yaml
+++ b/.github/workflows/e2e-checks.yaml
@@ -1,13 +1,15 @@
-name: e2e-checks
+name: "E2E: PR healthcheck"
on:
pull_request_target:
- types: ["opened", "edited", "reopened", "synchronize"]
+ types: [ "opened", "edited", "reopened", "synchronize" ]
paths:
- "kafka-ui-api/**"
- "kafka-ui-contract/**"
- "kafka-ui-react-app/**"
- "kafka-ui-e2e-checks/**"
- "pom.xml"
+permissions:
+ statuses: write
jobs:
build-and-test:
runs-on: ubuntu-latest
@@ -15,39 +17,41 @@ jobs:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- - name: Cache local Maven repository
- uses: actions/cache@v3
+ - name: Configure AWS credentials
+ uses: aws-actions/configure-aws-credentials@v3
with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-
- - name: Set the values
+ aws-access-key-id: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
+ aws-region: eu-central-1
+ - name: Set up environment
id: set_env_values
run: |
cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
- - name: pull docker
- id: pull_selenoid
+ - name: Pull with Docker
+ id: pull_chrome
run: |
- docker pull selenoid/vnc:chrome_86.0
- - name: Set up JDK 1.13
- uses: actions/setup-java@v1
+ docker pull selenoid/vnc_chrome:103.0
+ - name: Set up JDK
+ uses: actions/setup-java@v3
with:
- java-version: 1.13
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
- name: Build with Maven
id: build_app
run: |
- mvn versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
- mvn clean package -DskipTests ${{ github.event.inputs.extraMavenOptions }}
- - name: compose app
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ - name: Compose with Docker
id: compose_app
# use the following command until #819 will be fixed
run: |
- docker-compose -f ./documentation/compose/kafka-ui-connectors.yaml up -d
- - name: e2e run
+ docker-compose -f kafka-ui-e2e-checks/docker/selenoid-git.yaml up -d
+ docker-compose -f ./documentation/compose/e2e-tests.yaml up -d && until [ "$(docker exec kafka-ui wget --spider --server-response http://localhost:8080/actuator/health 2>&1 | grep -c 'HTTP/1.1 200 OK')" == "1" ]; do echo "Waiting for kafka-ui ..." && sleep 1; done
+ - name: Run test suite
run: |
- mvn versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
- mvn -pl '!kafka-ui-api' test -Pprod
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
+ ./mvnw -B -V -ntp -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -f 'kafka-ui-e2e-checks' test -Pprod
- name: Generate allure report
uses: simple-elf/allure-report-action@master
if: always()
@@ -57,23 +61,22 @@ jobs:
gh_pages: allure-results
allure_report: allure-report
subfolder: allure-results
- - name: Deploy allure report to Github Pages
+ report_url: "http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com"
+ - uses: jakejarvis/s3-sync-action@master
if: always()
- uses: peaceiris/actions-gh-pages@v3
- with:
- github_token: ${{ secrets.GITHUB_TOKEN }}
- publish_dir: allure-history
- publish_branch: gh-pages
- destination_dir: ./allure
- - name: Post the link to allure report
+ env:
+ AWS_S3_BUCKET: 'kafkaui-allure-reports'
+ AWS_REGION: 'eu-central-1'
+ SOURCE_DIR: 'allure-history/allure-results'
+ - name: Deploy report to Amazon S3
if: always()
uses: Sibz/github-status-action@v1.1.6
with:
authToken: ${{secrets.GITHUB_TOKEN}}
- context: "Test report"
+ context: "Click Details button to open Allure report"
state: "success"
sha: ${{ github.event.pull_request.head.sha || github.sha }}
- target_url: https://${{ github.repository_owner }}.github.io/kafka-ui/allure/allure-results/${{ github.run_number }}
+ target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
- name: Dump docker logs on failure
if: failure()
- uses: jwalton/gh-docker-logs@v2.2.0
+ uses: jwalton/gh-docker-logs@v2.2.1
diff --git a/.github/workflows/e2e-manual.yml b/.github/workflows/e2e-manual.yml
new file mode 100644
index 00000000000..31cd3bdf642
--- /dev/null
+++ b/.github/workflows/e2e-manual.yml
@@ -0,0 +1,43 @@
+name: "E2E: Manual suite"
+on:
+ workflow_dispatch:
+ inputs:
+ test_suite:
+ description: 'Select test suite to run'
+ default: 'manual'
+ required: true
+ type: choice
+ options:
+ - manual
+ - qase
+ qase_token:
+ description: 'Set Qase token to enable integration'
+ required: true
+ type: string
+
+jobs:
+ build-and-test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.sha }}
+ - name: Set up environment
+ id: set_env_values
+ run: |
+ cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build with Maven
+ id: build_app
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ - name: Run test suite
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ github.event.inputs.qase_token }} -Dsurefire.suiteXmlFiles='src/test/resources/${{ github.event.inputs.test_suite }}.xml' -Dsuite=${{ github.event.inputs.test_suite }} -f 'kafka-ui-e2e-checks' test -Pprod
diff --git a/.github/workflows/e2e-weekly.yml b/.github/workflows/e2e-weekly.yml
new file mode 100644
index 00000000000..439d8037649
--- /dev/null
+++ b/.github/workflows/e2e-weekly.yml
@@ -0,0 +1,75 @@
+name: "E2E: Weekly suite"
+on:
+ schedule:
+ - cron: '0 1 * * 1'
+
+jobs:
+ build-and-test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.sha }}
+ - name: Configure AWS credentials
+ uses: aws-actions/configure-aws-credentials@v3
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: eu-central-1
+ - name: Set up environment
+ id: set_env_values
+ run: |
+ cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
+ - name: Pull with Docker
+ id: pull_chrome
+ run: |
+ docker pull selenoid/vnc_chrome:103.0
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build with Maven
+ id: build_app
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ - name: Compose with Docker
+ id: compose_app
+ # use the following command until #819 will be fixed
+ run: |
+ docker-compose -f kafka-ui-e2e-checks/docker/selenoid-git.yaml up -d
+ docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
+ - name: Run test suite
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -Dsurefire.suiteXmlFiles='src/test/resources/sanity.xml' -Dsuite=weekly -f 'kafka-ui-e2e-checks' test -Pprod
+ - name: Generate Allure report
+ uses: simple-elf/allure-report-action@master
+ if: always()
+ id: allure-report
+ with:
+ allure_results: ./kafka-ui-e2e-checks/allure-results
+ gh_pages: allure-results
+ allure_report: allure-report
+ subfolder: allure-results
+ report_url: "http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com"
+ - uses: jakejarvis/s3-sync-action@master
+ if: always()
+ env:
+ AWS_S3_BUCKET: 'kafkaui-allure-reports'
+ AWS_REGION: 'eu-central-1'
+ SOURCE_DIR: 'allure-history/allure-results'
+ - name: Deploy report to Amazon S3
+ if: always()
+ uses: Sibz/github-status-action@v1.1.6
+ with:
+ authToken: ${{secrets.GITHUB_TOKEN}}
+ context: "Click Details button to open Allure report"
+ state: "success"
+ sha: ${{ github.sha }}
+ target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
+ - name: Dump Docker logs on failure
+ if: failure()
+ uses: jwalton/gh-docker-logs@v2.2.1
diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml
index ee13432afa9..9d7300448c9 100644
--- a/.github/workflows/frontend.yaml
+++ b/.github/workflows/frontend.yaml
@@ -1,4 +1,4 @@
-name: frontend
+name: "Frontend: PR/master build & test"
on:
push:
branches:
@@ -8,6 +8,9 @@ on:
paths:
- "kafka-ui-contract/**"
- "kafka-ui-react-app/**"
+permissions:
+ checks: write
+ pull-requests: write
jobs:
build-and-test:
env:
@@ -20,35 +23,33 @@ jobs:
# Disabling shallow clone is recommended for improving relevancy of reporting
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- - name: Use Node.js
- uses: actions/setup-node@v3.1.1
+ - uses: pnpm/action-setup@v2.4.0
with:
- node-version: "14"
- - name: Cache node dependency
- uses: actions/cache@v3
+ version: 8.6.12
+ - name: Install node
+ uses: actions/setup-node@v3.8.1
with:
- path: kafka-ui-react-app/node_modules
- key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
- restore-keys: |
- ${{ runner.os }}-node-
+ node-version: "18.17.1"
+ cache: "pnpm"
+ cache-dependency-path: "./kafka-ui-react-app/pnpm-lock.yaml"
- name: Install Node dependencies
run: |
cd kafka-ui-react-app/
- npm install
+ pnpm install --frozen-lockfile
- name: Generate sources
run: |
cd kafka-ui-react-app/
- npm run gen:sources
+ pnpm gen:sources
- name: Linter
run: |
cd kafka-ui-react-app/
- npm run lint:CI
+ pnpm lint:CI
- name: Tests
run: |
cd kafka-ui-react-app/
- npm run test:CI
+ pnpm test:CI
- name: SonarCloud Scan
- uses: workshur/sonarcloud-github-action@improved_basedir
+ uses: sonarsource/sonarcloud-github-action@master
with:
projectBaseDir: ./kafka-ui-react-app
args: -Dsonar.pullrequest.key=${{ github.event.pull_request.number }} -Dsonar.pullrequest.branch=${{ github.head_ref }} -Dsonar.pullrequest.base=${{ github.base_ref }}
diff --git a/.github/workflows/helm.yaml b/.github/workflows/helm.yaml
deleted file mode 100644
index 664a15e8a73..00000000000
--- a/.github/workflows/helm.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
-name: Helm
-on:
- pull_request:
- types: [ 'opened', 'edited', 'reopened', 'synchronize' ]
- paths:
- - "charts/**"
-
- schedule:
- # * is a special character in YAML so you have to quote this string
- - cron: '0 8 * * 3'
-
-jobs:
- build-and-test:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - name: Helm tool installer
- uses: Azure/setup-helm@v1
- - name: Setup Kubeval
- uses: lra/setup-kubeval@v1.0.1
- - name: Run kubeval
- shell: bash
- run: |
- sed -i "s@enabled: false@enabled: true@g" charts/kafka-ui/values.yaml
- K8S_VERSIONS=$(git ls-remote --refs --tags https://github.com/kubernetes/kubernetes.git | cut -d/ -f3 | grep -e '^v1\.[0-9]\{2\}\.[0]\{1,2\}$' | grep -v -e '^v1\.1[0-8]\{1\}' | cut -c2-)
- echo "NEXT K8S VERSIONS ARE GOING TO BE TESTED: $K8S_VERSIONS"
- echo ""
- for version in $K8S_VERSIONS
- do
- echo $version;
- helm template charts/kafka-ui -f charts/kafka-ui/values.yaml | kubeval --additional-schema-locations https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master --strict -v $version;
- done
diff --git a/.github/workflows/master.yaml b/.github/workflows/master.yaml
index 1d1aa1f130b..d751e500210 100644
--- a/.github/workflows/master.yaml
+++ b/.github/workflows/master.yaml
@@ -1,4 +1,4 @@
-name: Master
+name: "Master: Build & deploy"
on:
workflow_dispatch:
push:
@@ -9,37 +9,33 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
-
- - name: Cache local Maven repository
- uses: actions/cache@v3
with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-
+ ref: ${{ github.event.pull_request.head.sha }}
- - name: Set up JDK 1.13
- uses: actions/setup-java@v1
+ - name: Set up JDK
+ uses: actions/setup-java@v3
with:
- java-version: 1.13
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
- name: Build
id: build
run: |
- mvn versions:set -DnewVersion=$GITHUB_SHA
- mvn clean package -Pprod -DskipTests
- export VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
- echo "::set-output name=version::${VERSION}"
+ ./mvnw -B -ntp versions:set -DnewVersion=$GITHUB_SHA
+ ./mvnw -V -B -ntp clean package -Pprod -DskipTests
+ export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
+ echo "version=${VERSION}" >> $GITHUB_OUTPUT
#################
# #
# Docker images #
# #
#################
- name: Set up QEMU
- uses: docker/setup-qemu-action@v1
+ uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v1
+ uses: docker/setup-buildx-action@v2
- name: Cache Docker layers
uses: actions/cache@v3
@@ -50,18 +46,19 @@ jobs:
${{ runner.os }}-buildx-
- name: Login to DockerHub
- uses: docker/login-action@v1
+ uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build_and_push
- uses: docker/build-push-action@v2
+ uses: docker/build-push-action@v4
with:
builder: ${{ steps.buildx.outputs.name }}
context: kafka-ui-api
platforms: linux/amd64,linux/arm64
+ provenance: false
push: true
tags: |
provectuslabs/kafka-ui:${{ steps.build.outputs.version }}
@@ -77,11 +74,11 @@ jobs:
#################################
- name: update-master-deployment
run: |
- git clone https://kafka-ui-infra:${{ secrets.KAFKA_UI_INFRA_TOKEN }}@gitlab.provectus.com/provectus-internals/kafka-ui-infra.git --branch master
+ git clone https://infra-tech:${{ secrets.INFRA_USER_ACCESS_TOKEN }}@github.com/provectus/kafka-ui-infra.git --branch master
cd kafka-ui-infra/aws-infrastructure4eks/argocd/scripts
echo "Image digest is:${{ steps.docker_build_and_push.outputs.digest }}"
./kafka-ui-update-master-digest.sh ${{ steps.docker_build_and_push.outputs.digest }}
- git config --global user.email "kafka-ui-infra@provectus.com"
- git config --global user.name "kafka-ui-infra"
+ git config --global user.email "infra-tech@provectus.com"
+ git config --global user.name "infra-tech"
git add ../kafka-ui/*
git commit -m "updated master image digest: ${{ steps.docker_build_and_push.outputs.digest }}" && git push
diff --git a/.github/workflows/pr-checks.yaml b/.github/workflows/pr-checks.yaml
index 74ff75b833d..ce7dd17ae40 100644
--- a/.github/workflows/pr-checks.yaml
+++ b/.github/workflows/pr-checks.yaml
@@ -1,13 +1,14 @@
-name: "PR Checklist checked"
+name: "PR: Checklist linter"
on:
pull_request_target:
types: [opened, edited, synchronize, reopened]
-
+permissions:
+ checks: write
jobs:
task-check:
runs-on: ubuntu-latest
steps:
- - uses: kentaro-m/task-completed-checker-action@v0.1.0
+ - uses: kentaro-m/task-completed-checker-action@v0.1.2
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
- uses: dekinderfiets/pr-description-enforcer@0.0.1
diff --git a/.github/workflows/release-serde-api.yaml b/.github/workflows/release-serde-api.yaml
new file mode 100644
index 00000000000..44e5babc7e9
--- /dev/null
+++ b/.github/workflows/release-serde-api.yaml
@@ -0,0 +1,30 @@
+name: "Infra: Release: Serde API"
+on: workflow_dispatch
+
+jobs:
+ release-serde-api:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+
+ - run: |
+ git config user.name github-actions
+ git config user.email github-actions@github.com
+
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: "17"
+ distribution: "zulu"
+ cache: "maven"
+
+ - id: install-secret-key
+ name: Install GPG secret key
+ run: |
+ cat <(echo -e "${{ secrets.GPG_PRIVATE_KEY }}") | gpg --batch --import
+
+ - name: Publish to Maven Central
+ run: |
+ mvn source:jar javadoc:jar package gpg:sign -Dgpg.passphrase=${{ secrets.GPG_PASSPHRASE }} -Dserver.username=${{ secrets.NEXUS_USERNAME }} -Dserver.password=${{ secrets.NEXUS_PASSWORD }} nexus-staging:deploy -pl kafka-ui-serde-api -s settings.xml
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 7a21d3e804d..4c2837f1af0 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -1,4 +1,4 @@
-name: Release
+name: "Infra: Release"
on:
release:
types: [published]
@@ -12,34 +12,29 @@ jobs:
- uses: actions/checkout@v3
with:
fetch-depth: 0
+ ref: ${{ github.event.pull_request.head.sha }}
- run: |
git config user.name github-actions
git config user.email github-actions@github.com
- - name: Cache local Maven repository
- uses: actions/cache@v3
- with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-maven-
-
- - name: Set up JDK 1.13
- uses: actions/setup-java@v1
+ - name: Set up JDK
+ uses: actions/setup-java@v3
with:
- java-version: 1.13
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
- name: Build with Maven
id: build
run: |
- mvn versions:set -DnewVersion=${{ github.event.release.tag_name }}
- mvn clean package -Pprod -DskipTests
- export VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
- echo ::set-output name=version::${VERSION}
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.release.tag_name }}
+ ./mvnw -B -V -ntp clean package -Pprod -DskipTests
+ export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
+ echo "version=${VERSION}" >> $GITHUB_OUTPUT
- name: Upload files to a GitHub release
- uses: svenstaro/upload-release-action@2.2.1
+ uses: svenstaro/upload-release-action@2.7.0
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: kafka-ui-api/target/kafka-ui-api-${{ steps.build.outputs.version }}.jar
@@ -56,10 +51,10 @@ jobs:
# #
#################
- name: Set up QEMU
- uses: docker/setup-qemu-action@v1
+ uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v1
+ uses: docker/setup-buildx-action@v2
- name: Cache Docker layers
uses: actions/cache@v3
@@ -70,18 +65,19 @@ jobs:
${{ runner.os }}-buildx-
- name: Login to DockerHub
- uses: docker/login-action@v1
+ uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build_and_push
- uses: docker/build-push-action@v2
+ uses: docker/build-push-action@v4
with:
builder: ${{ steps.buildx.outputs.name }}
context: kafka-ui-api
platforms: linux/amd64,linux/arm64
+ provenance: false
push: true
tags: |
provectuslabs/kafka-ui:${{ steps.build.outputs.version }}
@@ -95,30 +91,10 @@ jobs:
runs-on: ubuntu-latest
needs: release
steps:
- - uses: actions/checkout@v3
+ - name: Repository Dispatch
+ uses: peter-evans/repository-dispatch@v2
with:
- fetch-depth: 1
-
- - run: |
- git config user.name github-actions
- git config user.email github-actions@github.com
-
- - uses: azure/setup-helm@v1
-
- - name: update chart version
- run: |
- export version=${{needs.release.outputs.version}}
- sed -i "s/version:.*/version: ${version}/" charts/kafka-ui/Chart.yaml
- sed -i "s/appVersion:.*/appVersion: ${version}/" charts/kafka-ui/Chart.yaml
-
- - name: add chart
- run: |
- export VERSION=${{needs.release.outputs.version}}
- MSG=$(helm package --app-version ${VERSION} charts/kafka-ui)
- git fetch origin
- git stash
- git checkout -b gh-pages origin/gh-pages
- helm repo index .
- git add -f ${MSG##*/} index.yaml
- git commit -m "release ${VERSION}"
- git push
+ token: ${{ secrets.CHARTS_ACTIONS_TOKEN }}
+ repository: provectus/kafka-ui-charts
+ event-type: prepare-helm-release
+ client-payload: '{"appversion": "${{ needs.release.outputs.version }}"}'
diff --git a/.github/workflows/release_drafter.yml b/.github/workflows/release_drafter.yml
index 742254b942e..d313edac3c4 100644
--- a/.github/workflows/release_drafter.yml
+++ b/.github/workflows/release_drafter.yml
@@ -1,19 +1,34 @@
-name: Release Drafter
+name: "Infra: Release Drafter run"
on:
push:
- # branches to consider in the event; optional, defaults to all
branches:
- master
workflow_dispatch:
+ inputs:
+ version:
+ description: 'Release version'
+ required: false
+ branch:
+ description: 'Target branch'
+ required: false
+ default: 'master'
+
+permissions:
+ contents: read
jobs:
update_release_draft:
runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ pull-requests: write
steps:
- uses: release-drafter/release-drafter@v5
with:
config-name: release_drafter.yaml
disable-autolabeler: true
+ version: ${{ github.event.inputs.version }}
+ commitish: ${{ github.event.inputs.branch }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/separate_env_public_create.yml b/.github/workflows/separate_env_public_create.yml
index eb59e55f7ad..cac2d444ecd 100644
--- a/.github/workflows/separate_env_public_create.yml
+++ b/.github/workflows/separate_env_public_create.yml
@@ -1,4 +1,4 @@
-name: Separate environment create
+name: "Infra: Feature Testing Public: Init env"
on:
workflow_dispatch:
inputs:
@@ -8,19 +8,82 @@ on:
default: 'demo'
jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
+ - name: get branch name
+ id: extract_branch
+ run: |
+ tag="${{ github.event.inputs.ENV_NAME }}-$(date '+%F-%H-%M-%S')"
+ echo "tag=${tag}" >> $GITHUB_OUTPUT
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build
+ id: build
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=$GITHUB_SHA
+ ./mvnw -B -V -ntp clean package -Pprod -DskipTests
+ export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
+ echo "version=${VERSION}" >> $GITHUB_OUTPUT
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+ - name: Set up Docker Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v2
+ - name: Cache Docker layers
+ uses: actions/cache@v3
+ with:
+ path: /tmp/.buildx-cache
+ key: ${{ runner.os }}-buildx-${{ github.sha }}
+ restore-keys: |
+ ${{ runner.os }}-buildx-
+ - name: Configure AWS credentials for Kafka-UI account
+ uses: aws-actions/configure-aws-credentials@v3
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: eu-central-1
+ - name: Login to Amazon ECR
+ id: login-ecr
+ uses: aws-actions/amazon-ecr-login@v1
+ - name: Build and push
+ id: docker_build_and_push
+ uses: docker/build-push-action@v4
+ with:
+ builder: ${{ steps.buildx.outputs.name }}
+ context: kafka-ui-api
+ push: true
+ tags: 297478128798.dkr.ecr.eu-central-1.amazonaws.com/kafka-ui:${{ steps.extract_branch.outputs.tag }}
+ build-args: |
+ JAR_FILE=kafka-ui-api-${{ steps.build.outputs.version }}.jar
+ cache-from: type=local,src=/tmp/.buildx-cache
+ cache-to: type=local,dest=/tmp/.buildx-cache
+ outputs:
+ tag: ${{ steps.extract_branch.outputs.tag }}
+
separate-env-create:
runs-on: ubuntu-latest
+ needs: build
steps:
- name: clone
run: |
- git clone https://kafka-ui-infra:${{ secrets.KAFKA_UI_INFRA_TOKEN }}@gitlab.provectus.com/provectus-internals/kafka-ui-infra.git
+ git clone https://infra-tech:${{ secrets.INFRA_USER_ACCESS_TOKEN }}@github.com/provectus/kafka-ui-infra.git --branch envs
- name: separate env create
run: |
cd kafka-ui-infra/aws-infrastructure4eks/argocd/scripts
- bash separate_env_create.sh ${{ github.event.inputs.ENV_NAME }} ${{ secrets.FEATURE_TESTING_UI_PASSWORD }}
- git config --global user.email "kafka-ui-infra@provectus.com"
- git config --global user.name "kafka-ui-infra"
+ bash separate_env_create.sh ${{ github.event.inputs.ENV_NAME }} ${{ secrets.FEATURE_TESTING_UI_PASSWORD }} ${{ needs.build.outputs.tag }}
+ git config --global user.email "infra-tech@provectus.com"
+ git config --global user.name "infra-tech"
git add -A
git commit -m "separate env added: ${{ github.event.inputs.ENV_NAME }}" && git push || true
diff --git a/.github/workflows/separate_env_public_remove.yml b/.github/workflows/separate_env_public_remove.yml
index 19084801377..145be002c9f 100644
--- a/.github/workflows/separate_env_public_remove.yml
+++ b/.github/workflows/separate_env_public_remove.yml
@@ -1,4 +1,4 @@
-name: Separate environment remove
+name: "Infra: Feature Testing Public: Destroy env"
on:
workflow_dispatch:
inputs:
@@ -13,12 +13,12 @@ jobs:
steps:
- name: clone
run: |
- git clone https://kafka-ui-infra:${{ secrets.KAFKA_UI_INFRA_TOKEN }}@gitlab.provectus.com/provectus-internals/kafka-ui-infra.git
+ git clone https://infra-tech:${{ secrets.INFRA_USER_ACCESS_TOKEN }}@github.com/provectus/kafka-ui-infra.git --branch envs
- name: separate environment remove
run: |
cd kafka-ui-infra/aws-infrastructure4eks/argocd/scripts
bash separate_env_remove.sh ${{ github.event.inputs.ENV_NAME }}
- git config --global user.email "kafka-ui-infra@provectus.com"
- git config --global user.name "kafka-ui-infra"
+ git config --global user.email "infra-tech@provectus.com"
+ git config --global user.name "infra-tech"
git add -A
git commit -m "separate env removed: ${{ github.event.inputs.ENV_NAME }}" && git push || true
diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml
index 5e9ac844fb0..cb9870c5208 100644
--- a/.github/workflows/stale.yaml
+++ b/.github/workflows/stale.yaml
@@ -1,4 +1,4 @@
-name: 'Close stale issues'
+name: 'Infra: Close stale issues'
on:
schedule:
- cron: '30 1 * * *'
@@ -7,7 +7,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- - uses: actions/stale@v5
+ - uses: actions/stale@v8
with:
days-before-issue-stale: 7
days-before-issue-close: 3
diff --git a/.github/workflows/terraform-deploy.yml b/.github/workflows/terraform-deploy.yml
index db2f651e038..e42d52b11a1 100644
--- a/.github/workflows/terraform-deploy.yml
+++ b/.github/workflows/terraform-deploy.yml
@@ -1,4 +1,4 @@
-name: terraform_deploy
+name: "Infra: Terraform deploy"
on:
workflow_dispatch:
inputs:
@@ -26,18 +26,14 @@ jobs:
echo "Terraform will be triggered in this dir $TF_DIR"
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v3
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-central-1
- name: Terraform Install
- uses: hashicorp/setup-terraform@v1
-
- - name: Terraform format
- id: fmt
- run: cd $TF_DIR && terraform fmt -check
+ uses: hashicorp/setup-terraform@v2
- name: Terraform init
id: init
diff --git a/.github/workflows/triage_issues.yml b/.github/workflows/triage_issues.yml
index 8009a2a64bd..344ba5c118d 100644
--- a/.github/workflows/triage_issues.yml
+++ b/.github/workflows/triage_issues.yml
@@ -1,4 +1,4 @@
-name: Add triage label to new issues
+name: "Infra: Triage: Apply triage label for issues"
on:
issues:
types:
diff --git a/.github/workflows/triage_prs.yml b/.github/workflows/triage_prs.yml
index 90d76936036..6906cd8a8a5 100644
--- a/.github/workflows/triage_prs.yml
+++ b/.github/workflows/triage_prs.yml
@@ -1,4 +1,4 @@
-name: Add triage label to new PRs
+name: "Infra: Triage: Apply triage label for PRs"
on:
pull_request:
types:
diff --git a/.github/workflows/welcome-first-time-contributors.yml b/.github/workflows/welcome-first-time-contributors.yml
index b0258c9235b..1ac861055cc 100644
--- a/.github/workflows/welcome-first-time-contributors.yml
+++ b/.github/workflows/welcome-first-time-contributors.yml
@@ -7,7 +7,9 @@ on:
issues:
types:
- opened
-
+permissions:
+ issues: write
+ pull-requests: write
jobs:
welcome:
runs-on: ubuntu-latest
diff --git a/.github/workflows/workflow_linter.yaml b/.github/workflows/workflow_linter.yaml
index b4af45d57ef..df9983a5301 100644
--- a/.github/workflows/workflow_linter.yaml
+++ b/.github/workflows/workflow_linter.yaml
@@ -1,4 +1,4 @@
-name: "Workflow linter"
+name: "Infra: Workflow linter"
on:
pull_request:
types:
diff --git a/.gitignore b/.gitignore
index 55b770349f8..a12e7753760 100644
--- a/.gitignore
+++ b/.gitignore
@@ -31,6 +31,9 @@ build/
.vscode/
/kafka-ui-api/app/node
+### SDKMAN ###
+.sdkmanrc
+
.DS_Store
*.code-workspace
diff --git a/.mvn/wrapper/MavenWrapperDownloader.java b/.mvn/wrapper/MavenWrapperDownloader.java
deleted file mode 100644
index e76d1f3241d..00000000000
--- a/.mvn/wrapper/MavenWrapperDownloader.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright 2007-present the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.net.*;
-import java.io.*;
-import java.nio.channels.*;
-import java.util.Properties;
-
-public class MavenWrapperDownloader {
-
- private static final String WRAPPER_VERSION = "0.5.6";
- /**
- * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
- */
- private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
- + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
-
- /**
- * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
- * use instead of the default one.
- */
- private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
- ".mvn/wrapper/maven-wrapper.properties";
-
- /**
- * Path where the maven-wrapper.jar will be saved to.
- */
- private static final String MAVEN_WRAPPER_JAR_PATH =
- ".mvn/wrapper/maven-wrapper.jar";
-
- /**
- * Name of the property which should be used to override the default download url for the wrapper.
- */
- private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
-
- public static void main(String args[]) {
- System.out.println("- Downloader started");
- File baseDirectory = new File(args[0]);
- System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
-
- // If the maven-wrapper.properties exists, read it and check if it contains a custom
- // wrapperUrl parameter.
- File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
- String url = DEFAULT_DOWNLOAD_URL;
- if(mavenWrapperPropertyFile.exists()) {
- FileInputStream mavenWrapperPropertyFileInputStream = null;
- try {
- mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
- Properties mavenWrapperProperties = new Properties();
- mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
- url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
- } catch (IOException e) {
- System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
- } finally {
- try {
- if(mavenWrapperPropertyFileInputStream != null) {
- mavenWrapperPropertyFileInputStream.close();
- }
- } catch (IOException e) {
- // Ignore ...
- }
- }
- }
- System.out.println("- Downloading from: " + url);
-
- File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
- if(!outputFile.getParentFile().exists()) {
- if(!outputFile.getParentFile().mkdirs()) {
- System.out.println(
- "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
- }
- }
- System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
- try {
- downloadFileFromURL(url, outputFile);
- System.out.println("Done");
- System.exit(0);
- } catch (Throwable e) {
- System.out.println("- Error downloading");
- e.printStackTrace();
- System.exit(1);
- }
- }
-
- private static void downloadFileFromURL(String urlString, File destination) throws Exception {
- if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
- String username = System.getenv("MVNW_USERNAME");
- char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
- Authenticator.setDefault(new Authenticator() {
- @Override
- protected PasswordAuthentication getPasswordAuthentication() {
- return new PasswordAuthentication(username, password);
- }
- });
- }
- URL website = new URL(urlString);
- ReadableByteChannel rbc;
- rbc = Channels.newChannel(website.openStream());
- FileOutputStream fos = new FileOutputStream(destination);
- fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
- fos.close();
- rbc.close();
- }
-
-}
diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar
index 2cc7d4a55c0..bf82ff01c6c 100644
Binary files a/.mvn/wrapper/maven-wrapper.jar and b/.mvn/wrapper/maven-wrapper.jar differ
diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties
index 642d572ce90..dc3affce3dd 100644
--- a/.mvn/wrapper/maven-wrapper.properties
+++ b/.mvn/wrapper/maven-wrapper.properties
@@ -1,2 +1,18 @@
-distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
-wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.6/apache-maven-3.8.6-bin.zip
+wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index c01826ba493..ab17417cf97 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,3 +1,5 @@
+This guide is an exact copy of the same documented located [in our official docs](https://docs.kafka-ui.provectus.io/development/contributing). If there are any differences between the documents, the one located in our official docs should prevail.
+
This guide aims to walk you through the process of working on issues and Pull Requests (PRs).
Bear in mind that you will not be able to complete some steps on your own if you do not have a “write” permission. Feel free to reach out to the maintainers to help you unlock these activities.
@@ -20,7 +22,7 @@ You also need to consider labels. You can sort the issues by scope labels, such
## Grabbing the issue
There is a bunch of criteria that make an issue feasible for development.
-The implementation of any features and/or their enhancements should be reasonable, must be backed by justified requirements (demanded by the community, [roadmap](documentation/project/ROADMAP.md) plans, etc.). The final decision is left for the maintainers' discretion.
+The implementation of any features and/or their enhancements should be reasonable, must be backed by justified requirements (demanded by the community, [roadmap](https://docs.kafka-ui.provectus.io/project/roadmap) plans, etc.). The final decision is left for the maintainers' discretion.
All bugs should be confirmed as such (i.e. the behavior is unintended).
@@ -39,7 +41,7 @@ To keep the status of the issue clear to everyone, please keep the card's status
## Setting up a local development environment
-Please refer to [this guide](documentation/project/contributing/README.md).
+Please refer to [this guide](https://docs.kafka-ui.provectus.io/development/contributing).
# Pull Requests
@@ -78,6 +80,7 @@ When creating a PR please do the following:
4. If the PR does not close any of the issues, the PR itself might need to have a milestone set. Reach out to the maintainers to consult.
5. Assign the PR to yourself. A PR assignee is someone whose goal is to get the PR merged.
6. Add reviewers. As a rule, reviewers' suggestions are pretty good; please use them.
+7. Upon merging the PR, please use a meaningful commit message, task name should be fine in this case.
### Pull Request checklist
diff --git a/README.md b/README.md
index 765239512ac..f6a16c862c3 100644
--- a/README.md
+++ b/README.md
@@ -1,21 +1,37 @@
 UI for Apache Kafka
------------------
#### Versatile, fast and lightweight web UI for managing Apache Kafka® clusters. Built by developers, for developers.
+
[](https://github.com/provectus/kafka-ui/blob/master/LICENSE)

[](https://github.com/provectus/kafka-ui/releases)
[](https://discord.gg/4DWzD7pGE5)
+[](https://hub.docker.com/r/provectuslabs/kafka-ui)
-### DISCLAIMER
-UI for Apache Kafka is a free, open-source tool that is curated by Provectus, and is built and supported by the open-source community. The tool will remain free and open-source in the future. Provectus does not plan to add any paid features or subscription plans so that everyone can have a better experience observing their data. UI for Apache Kafka is a part of the [Provectus NextGen Data Platform](https://provectus.com/nextgen-data-platform/). Check it out for more details!
+
+ DOCS •
+ QUICK START •
+ COMMUNITY DISCORD
+
+ AWS Marketplace •
+ ProductHunt
+
+
+
+
+
-#### UI for Apache Kafka is a free, open-source web UI to monitor and manage Apache Kafka clusters.
+#### UI for Apache Kafka is a free, open-source web UI to monitor and manage Apache Kafka clusters.
-UI for Apache Kafka is a simple tool that makes your data flows observable, helps find and troubleshoot issues faster and deliver optimal performance. Its lightweight dashboard makes it easy to track key metrics of your Kafka clusters - Brokers, Topics, Partitions, Production, and Consumption.
+UI for Apache Kafka is a simple tool that makes your data flows observable, helps find and troubleshoot issues faster and deliver optimal performance. Its lightweight dashboard makes it easy to track key metrics of your Kafka clusters - Brokers, Topics, Partitions, Production, and Consumption.
-Set up UI for Apache Kafka with just a couple of easy commands to visualize your Kafka data in a comprehensible way. You can run the tool locally or in
-the cloud.
+### DISCLAIMER
+UI for Apache Kafka is a free tool built and supported by the open-source community. Curated by Provectus, it will remain free and open-source, without any paid features or subscription plans to be added in the future.
+Looking for the help of Kafka experts? Provectus can help you design, build, deploy, and manage Apache Kafka clusters and streaming applications. Discover [Professional Services for Apache Kafka](https://provectus.com/professional-services-apache-kafka/), to unlock the full potential of Kafka in your enterprise!
+
+Set up UI for Apache Kafka with just a couple of easy commands to visualize your Kafka data in a comprehensible way. You can run the tool locally or in
+the cloud.

@@ -27,21 +43,24 @@ the cloud.
* **View Consumer Groups** — view per-partition parked offsets, combined and per-partition lag
* **Browse Messages** — browse messages with JSON, plain text, and Avro encoding
* **Dynamic Topic Configuration** — create and configure new topics with dynamic configuration
-* **Configurable Authentification** — secure your installation with optional Github/Gitlab/Google OAuth 2.0
-
+* **Configurable Authentification** — [secure](https://docs.kafka-ui.provectus.io/configuration/authentication) your installation with optional Github/Gitlab/Google OAuth 2.0
+* **Custom serialization/deserialization plugins** - [use](https://docs.kafka-ui.provectus.io/configuration/serialization-serde) a ready-to-go serde for your data like AWS Glue or Smile, or code your own!
+* **Role based access control** - [manage permissions](https://docs.kafka-ui.provectus.io/configuration/rbac-role-based-access-control) to access the UI with granular precision
+* **Data masking** - [obfuscate](https://docs.kafka-ui.provectus.io/configuration/data-masking) sensitive data in topic messages
+
# The Interface
UI for Apache Kafka wraps major functions of Apache Kafka with an intuitive user interface.

## Topics
-UI for Apache Kafka makes it easy for you to create topics in your browser by several clicks,
+UI for Apache Kafka makes it easy for you to create topics in your browser by several clicks,
pasting your own parameters, and viewing topics in the list.

It's possible to jump from connectors view to corresponding topics and from a topic to consumers (back and forth) for more convenient navigation.
-connectors, overview topic settings.
+connectors, overview topic settings.

@@ -55,123 +74,68 @@ There are 3 supported types of schemas: Avro®, JSON Schema, and Protobuf schema

-Before producing avro-encoded messages, you have to add an avro schema for the topic in Schema Registry. Now all these steps are easy to do
+Before producing avro/protobuf encoded messages, you have to add a schema for the topic in Schema Registry. Now all these steps are easy to do
with a few clicks in a user-friendly interface.

# Getting Started
-To run UI for Apache Kafka, you can use a pre-built Docker image or build it locally.
+To run UI for Apache Kafka, you can use either a pre-built Docker image or build it (or a jar file) yourself.
-## Configuration
+## Quick start (Demo run)
-We have plenty of [docker-compose files](documentation/compose/DOCKER_COMPOSE.md) as examples. They're built for various configuration stacks.
-
-# Guides
-
-- [SSO configuration](documentation/guides/SSO.md)
-- [AWS IAM configuration](documentation/guides/AWS_IAM.md)
-- [Docker-compose files](documentation/compose/DOCKER_COMPOSE.md)
-- [Connection to a secure broker](documentation/compose/SECURE_BROKER.md)
-
-### Configuration File
-Example of how to configure clusters in the [application-local.yml](https://github.com/provectus/kafka-ui/blob/master/kafka-ui-api/src/main/resources/application-local.yml) configuration file:
+```
+docker run -it -p 8080:8080 -e DYNAMIC_CONFIG_ENABLED=true provectuslabs/kafka-ui
+```
+Then access the web UI at [http://localhost:8080](http://localhost:8080)
-```sh
-kafka:
- clusters:
- -
- name: local
- bootstrapServers: localhost:29091
- schemaRegistry: http://localhost:8085
- schemaRegistryAuth:
- username: username
- password: password
-# schemaNameTemplate: "%s-value"
- jmxPort: 9997
- -
-```
+The command is sufficient to try things out. When you're done trying things out, you can proceed with a [persistent installation](https://docs.kafka-ui.provectus.io/quick-start/persistent-start)
-* `name`: cluster name
-* `bootstrapServers`: where to connect
-* `schemaRegistry`: schemaRegistry's address
-* `schemaRegistryAuth.username`: schemaRegistry's basic authentication username
-* `schemaRegistryAuth.password`: schemaRegistry's basic authentication password
-* `schemaNameTemplate`: how keys are saved to schemaRegistry
-* `jmxPort`: open jmxPosrts of a broker
-* `readOnly`: enable read only mode
+## Persistent installation
-Configure as many clusters as you need by adding their configs below separated with `-`.
+```
+services:
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8080:8080
+ environment:
+ DYNAMIC_CONFIG_ENABLED: 'true'
+ volumes:
+ - ~/kui/config.yml:/etc/kafkaui/dynamic_config.yaml
+```
-## Running a Docker Image
-The official Docker image for UI for Apache Kafka is hosted here: [hub.docker.com/r/provectuslabs/kafka-ui](https://hub.docker.com/r/provectuslabs/kafka-ui).
+Please refer to our [configuration](https://docs.kafka-ui.provectus.io/configuration/quick-start) page to proceed with further app configuration.
-Launch Docker container in the background:
-```sh
+## Some useful configuration related links
-docker run -p 8080:8080 \
- -e KAFKA_CLUSTERS_0_NAME=local \
- -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 \
- -d provectuslabs/kafka-ui:latest
+[Web UI Cluster Configuration Wizard](https://docs.kafka-ui.provectus.io/configuration/configuration-wizard)
-```
-Then access the web UI at [http://localhost:8080](http://localhost:8080).
-Further configuration with environment variables - [see environment variables](#env_variables)
-
-### Docker Compose
+[Configuration file explanation](https://docs.kafka-ui.provectus.io/configuration/configuration-file)
-If you prefer to use `docker-compose` please refer to the [documentation](docker-compose.md).
+[Docker Compose examples](https://docs.kafka-ui.provectus.io/configuration/compose-examples)
+[Misc configuration properties](https://docs.kafka-ui.provectus.io/configuration/misc-configuration-properties)
-## Building With Docker
+## Helm charts
-### Prerequisites
+[Quick start](https://docs.kafka-ui.provectus.io/configuration/helm-charts/quick-start)
-Check [software-required.md](documentation/project/contributing/software-required.md)
+## Building from sources
-### Building
+[Quick start](https://docs.kafka-ui.provectus.io/development/building/prerequisites) with building
-Check [building.md](documentation/project/contributing/building.md)
+## Liveliness and readiness probes
+Liveliness and readiness endpoint is at `/actuator/health`.
+Info endpoint (build info) is located at `/actuator/info`.
-### Running
+# Configuration options
-Check [running.md](documentation/project/contributing/running.md)
+All of the environment variables/config properties could be found [here](https://docs.kafka-ui.provectus.io/configuration/misc-configuration-properties).
-## Liveliness and readiness probes
-Liveliness and readiness endpoint is at `/actuator/health`.
-Info endpoint (build info) is located at `/actuator/info`.
+# Contributing
-## Environment Variables
-
-Alternatively, each variable of the .yml file can be set with an environment variable.
-For example, if you want to use an environment variable to set the `name` parameter, you can write it like this: `KAFKA_CLUSTERS_2_NAME`
-
-|Name |Description
-|-----------------------|-------------------------------
-|`SERVER_SERVLET_CONTEXT_PATH` | URI basePath
-|`LOGGING_LEVEL_ROOT` | Setting log level (trace, debug, info, warn, error). Default: info
-|`LOGGING_LEVEL_COM_PROVECTUS` |Setting log level (trace, debug, info, warn, error). Default: debug
-|`SERVER_PORT` |Port for the embedded server. Default: `8080`
-|`KAFKA_ADMIN-CLIENT-TIMEOUT` | Kafka API timeout in ms. Default: `30000`
-|`KAFKA_CLUSTERS_0_NAME` | Cluster name
-|`KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS` |Address where to connect
-|`KAFKA_CLUSTERS_0_KSQLDBSERVER` | KSQL DB server address
-|`KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL` |Security protocol to connect to the brokers. For SSL connection use "SSL", for plaintext connection don't set this environment variable
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRY` |SchemaRegistry's address
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME` |SchemaRegistry's basic authentication username
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD` |SchemaRegistry's basic authentication password
-|`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE` |How keys are saved to schemaRegistry
-|`KAFKA_CLUSTERS_0_JMXPORT` |Open jmxPosrts of a broker
-|`KAFKA_CLUSTERS_0_READONLY` |Enable read-only mode. Default: false
-|`KAFKA_CLUSTERS_0_DISABLELOGDIRSCOLLECTION` |Disable collecting segments information. It should be true for confluent cloud. Default: false
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME` |Given name for the Kafka Connect cluster
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS` |Address of the Kafka Connect service endpoint
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_USERNAME`| Kafka Connect cluster's basic authentication username
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_PASSWORD`| Kafka Connect cluster's basic authentication password
-|`KAFKA_CLUSTERS_0_JMXSSL` |Enable SSL for JMX? `true` or `false`. For advanced setup, see `kafka-ui-jmx-secured.yml`
-|`KAFKA_CLUSTERS_0_JMXUSERNAME` |Username for JMX authentication
-|`KAFKA_CLUSTERS_0_JMXPASSWORD` |Password for JMX authentication
-|`TOPIC_RECREATE_DELAY_SECONDS` |Time delay between topic deletion and topic creation attempts for topic recreate functionality. Default: 1
-|`TOPIC_RECREATE_MAXRETRIES` |Number of attempts of topic creation after topic deletion for topic recreate functionality. Default: 15
+Please refer to [contributing guide](https://docs.kafka-ui.provectus.io/development/contributing), we'll guide you from there.
diff --git a/SECURITY.md b/SECURITY.md
index 26e7552b42b..318166dd606 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -6,7 +6,10 @@ Following versions of the project are currently being supported with security up
| Version | Supported |
| ------- | ------------------ |
-| 0.4.x | :white_check_mark: |
+| 0.7.x | :white_check_mark: |
+| 0.6.x | :x: |
+| 0.5.x | :x: |
+| 0.4.x | :x: |
| 0.3.x | :x: |
| 0.2.x | :x: |
| 0.1.x | :x: |
diff --git a/charts/kafka-ui/.helmignore b/charts/kafka-ui/.helmignore
deleted file mode 100644
index 7a93969f5a0..00000000000
--- a/charts/kafka-ui/.helmignore
+++ /dev/null
@@ -1,25 +0,0 @@
-# Patterns to ignore when building packages.
-# This supports shell glob matching, relative path matching, and
-# negation (prefixed with !). Only one pattern per line.
-.DS_Store
-# Common VCS dirs
-.git/
-.gitignore
-.bzr/
-.bzrignore
-.hg/
-.hgignore
-.svn/
-# Common backup files
-*.swp
-*.bak
-*.tmp
-*.orig
-*~
-# Various IDEs
-.project
-.idea/
-*.tmproj
-.vscode/
-example/
-README.md
diff --git a/charts/kafka-ui/Chart.yaml b/charts/kafka-ui/Chart.yaml
deleted file mode 100644
index f1be768f337..00000000000
--- a/charts/kafka-ui/Chart.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
-apiVersion: v2
-name: kafka-ui
-description: A Helm chart for kafka-UI
-type: application
-version: 0.0.3
-appVersion: latest
-icon: https://github.com/provectus/kafka-ui/raw/master/images/kafka-ui-logo.png
diff --git a/charts/kafka-ui/README.md b/charts/kafka-ui/README.md
deleted file mode 100644
index eac7aa04fd2..00000000000
--- a/charts/kafka-ui/README.md
+++ /dev/null
@@ -1,31 +0,0 @@
-# Kafka-UI Helm Chart
-
-## Configuration
-
-Most of the Helm charts parameters are common, follow table describe unique parameters related to application configuration.
-
-### Kafka-UI parameters
-
-| Parameter| Description| Default|
-|---|---|---|
-| `existingConfigMap`| Name of the existing ConfigMap with Kafka-UI environment variables | `nil`|
-| `existingSecret`| Name of the existing Secret with Kafka-UI environment variables| `nil`|
-| `envs.secret`| Set of the sensitive environment variables to pass to Kafka-UI | `{}`|
-| `envs.config`| Set of the environment variables to pass to Kafka-UI | `{}`|
-| `networkPolicy.enabled` | Enable network policies | `false`|
-| `networkPolicy.egressRules.customRules` | Custom network egress policy rules | `[]`|
-| `networkPolicy.ingressRules.customRules` | Custom network ingress policy rules | `[]`|
-| `podLabels` | Extra labels for Kafka-UI pod | `{}`|
-
-## Example
-
-To install Kafka-UI need to execute follow:
-``` bash
-helm repo add kafka-ui https://provectus.github.io/kafka-ui
-helm install kafka-ui kafka-ui/kafka-ui --set envs.config.KAFKA_CLUSTERS_0_NAME=local --set envs.config.KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
-```
-To connect to Kafka-UI web application need to execute:
-``` bash
-kubectl port-forward svc/kafka-ui 8080:80
-```
-Open the `http://127.0.0.1:8080` on the browser to access Kafka-UI.
diff --git a/charts/kafka-ui/index.yaml b/charts/kafka-ui/index.yaml
deleted file mode 100644
index 872807193db..00000000000
--- a/charts/kafka-ui/index.yaml
+++ /dev/null
@@ -1,3 +0,0 @@
-apiVersion: v1
-entries: {}
-generated: "2021-11-11T12:26:08.479581+03:00"
diff --git a/charts/kafka-ui/templates/NOTES.txt b/charts/kafka-ui/templates/NOTES.txt
deleted file mode 100644
index 94e8d394344..00000000000
--- a/charts/kafka-ui/templates/NOTES.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-1. Get the application URL by running these commands:
-{{- if .Values.ingress.enabled }}
-{{- range $host := .Values.ingress.hosts }}
- {{- range .paths }}
- http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ . }}
- {{- end }}
-{{- end }}
-{{- else if contains "NodePort" .Values.service.type }}
- export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "kafka-ui.fullname" . }})
- export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
- echo http://$NODE_IP:$NODE_PORT
-{{- else if contains "LoadBalancer" .Values.service.type }}
- NOTE: It may take a few minutes for the LoadBalancer IP to be available.
- You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "kafka-ui.fullname" . }}'
- export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "kafka-ui.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
- echo http://$SERVICE_IP:{{ .Values.service.port }}
-{{- else if contains "ClusterIP" .Values.service.type }}
- export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "kafka-ui.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
- echo "Visit http://127.0.0.1:8080 to use your application"
- kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:8080
-{{- end }}
diff --git a/charts/kafka-ui/templates/_helpers.tpl b/charts/kafka-ui/templates/_helpers.tpl
deleted file mode 100644
index 076c4886f80..00000000000
--- a/charts/kafka-ui/templates/_helpers.tpl
+++ /dev/null
@@ -1,63 +0,0 @@
-{{/* vim: set filetype=mustache: */}}
-{{/*
-Expand the name of the chart.
-*/}}
-{{- define "kafka-ui.name" -}}
-{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
-{{- end }}
-
-{{/*
-Create a default fully qualified app name.
-We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
-If release name contains chart name it will be used as a full name.
-*/}}
-{{- define "kafka-ui.fullname" -}}
-{{- if .Values.fullnameOverride }}
-{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
-{{- else }}
-{{- $name := default .Chart.Name .Values.nameOverride }}
-{{- if contains $name .Release.Name }}
-{{- .Release.Name | trunc 63 | trimSuffix "-" }}
-{{- else }}
-{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
-{{- end }}
-{{- end }}
-{{- end }}
-
-{{/*
-Create chart name and version as used by the chart label.
-*/}}
-{{- define "kafka-ui.chart" -}}
-{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
-{{- end }}
-
-{{/*
-Common labels
-*/}}
-{{- define "kafka-ui.labels" -}}
-helm.sh/chart: {{ include "kafka-ui.chart" . }}
-{{ include "kafka-ui.selectorLabels" . }}
-{{- if .Chart.AppVersion }}
-app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
-{{- end }}
-app.kubernetes.io/managed-by: {{ .Release.Service }}
-{{- end }}
-
-{{/*
-Selector labels
-*/}}
-{{- define "kafka-ui.selectorLabels" -}}
-app.kubernetes.io/name: {{ include "kafka-ui.name" . }}
-app.kubernetes.io/instance: {{ .Release.Name }}
-{{- end }}
-
-{{/*
-Create the name of the service account to use
-*/}}
-{{- define "kafka-ui.serviceAccountName" -}}
-{{- if .Values.serviceAccount.create }}
-{{- default (include "kafka-ui.fullname" .) .Values.serviceAccount.name }}
-{{- else }}
-{{- default "default" .Values.serviceAccount.name }}
-{{- end }}
-{{- end }}
diff --git a/charts/kafka-ui/templates/configmap.yaml b/charts/kafka-ui/templates/configmap.yaml
deleted file mode 100644
index c802e575222..00000000000
--- a/charts/kafka-ui/templates/configmap.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-apiVersion: v1
-kind: ConfigMap
-metadata:
- name: {{ include "kafka-ui.fullname" . }}
- labels:
- {{- include "kafka-ui.labels" . | nindent 4 }}
-data:
- {{- toYaml .Values.envs.config | nindent 2 }}
\ No newline at end of file
diff --git a/charts/kafka-ui/templates/deployment.yaml b/charts/kafka-ui/templates/deployment.yaml
deleted file mode 100644
index 630d93c8329..00000000000
--- a/charts/kafka-ui/templates/deployment.yaml
+++ /dev/null
@@ -1,103 +0,0 @@
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- name: {{ include "kafka-ui.fullname" . }}
- labels:
- {{- include "kafka-ui.labels" . | nindent 4 }}
-spec:
-{{- if not .Values.autoscaling.enabled }}
- replicas: {{ .Values.replicaCount }}
-{{- end }}
- selector:
- matchLabels:
- {{- include "kafka-ui.selectorLabels" . | nindent 6 }}
- template:
- metadata:
- annotations:
- {{- with .Values.podAnnotations }}
- {{- toYaml . | nindent 8 }}
- {{- end }}
- checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }}
- checksum/secret: {{ include (print $.Template.BasePath "/secret.yaml") . | sha256sum }}
- labels:
- {{- include "kafka-ui.selectorLabels" . | nindent 8 }}
- {{- if .Values.podLabels }}
- {{- toYaml .Values.podLabels | nindent 8 }}
- {{- end }}
- spec:
- {{- with .Values.imagePullSecrets }}
- imagePullSecrets:
- {{- toYaml . | nindent 8 }}
- {{- end }}
- {{- with .Values.initContainers }}
- initContainers:
- {{- toYaml . | nindent 8 }}
- {{- end }}
- serviceAccountName: {{ include "kafka-ui.serviceAccountName" . }}
- securityContext:
- {{- toYaml .Values.podSecurityContext | nindent 8 }}
- containers:
- - name: {{ .Chart.Name }}
- securityContext:
- {{- toYaml .Values.securityContext | nindent 12 }}
- image: "{{ .Values.image.registry }}/{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
- imagePullPolicy: {{ .Values.image.pullPolicy }}
- {{- with .Values.env }}
- env:
- {{- toYaml . | nindent 12 }}
- {{- end }}
- envFrom:
- {{- if .Values.existingConfigMap }}
- - configMapRef:
- name: {{ .Values.existingConfigMap }}
- {{- end }}
- - configMapRef:
- name: {{ include "kafka-ui.fullname" . }}
- {{- if .Values.existingSecret }}
- - secretRef:
- name: {{ .Values.existingSecret }}
- {{- end }}
- - secretRef:
- name: {{ include "kafka-ui.fullname" . }}
- ports:
- - name: http
- containerPort: 8080
- protocol: TCP
- livenessProbe:
- httpGet:
- {{- $contextPath := .Values.envs.config.SERVER_SERVLET_CONTEXT_PATH | default "" | printf "%s/actuator/health" | urlParse }}
- path: {{ get $contextPath "path" }}
- port: http
- initialDelaySeconds: 60
- periodSeconds: 30
- timeoutSeconds: 10
- readinessProbe:
- httpGet:
- {{- $contextPath := .Values.envs.config.SERVER_SERVLET_CONTEXT_PATH | default "" | printf "%s/actuator/health" | urlParse }}
- path: {{ get $contextPath "path" }}
- port: http
- initialDelaySeconds: 60
- periodSeconds: 30
- timeoutSeconds: 10
- resources:
- {{- toYaml .Values.resources | nindent 12 }}
- {{- with .Values.volumeMounts }}
- volumeMounts:
- {{- toYaml . | nindent 12 }}
- {{- end }}
- {{- with .Values.volumes }}
- volumes:
- {{- toYaml . | nindent 8 }}
- {{- end }}
- {{- with .Values.nodeSelector }}
- nodeSelector:
- {{- toYaml . | nindent 8 }}
- {{- end }}
- {{- with .Values.affinity }}
- affinity:
- {{- toYaml . | nindent 8 }}
- {{- end }}
- {{- with .Values.tolerations }}
- tolerations:
- {{- toYaml . | nindent 8 }}
- {{- end }}
diff --git a/charts/kafka-ui/templates/hpa.yaml b/charts/kafka-ui/templates/hpa.yaml
deleted file mode 100644
index 1509ef3f010..00000000000
--- a/charts/kafka-ui/templates/hpa.yaml
+++ /dev/null
@@ -1,28 +0,0 @@
-{{- if .Values.autoscaling.enabled }}
-apiVersion: autoscaling/v2beta1
-kind: HorizontalPodAutoscaler
-metadata:
- name: {{ include "kafka-ui.fullname" . }}
- labels:
- {{- include "kafka-ui.labels" . | nindent 4 }}
-spec:
- scaleTargetRef:
- apiVersion: apps/v1
- kind: Deployment
- name: {{ include "kafka-ui.fullname" . }}
- minReplicas: {{ .Values.autoscaling.minReplicas }}
- maxReplicas: {{ .Values.autoscaling.maxReplicas }}
- metrics:
- {{- if .Values.autoscaling.targetCPUUtilizationPercentage }}
- - type: Resource
- resource:
- name: cpu
- targetAverageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }}
- {{- end }}
- {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }}
- - type: Resource
- resource:
- name: memory
- targetAverageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }}
- {{- end }}
-{{- end }}
diff --git a/charts/kafka-ui/templates/ingress.yaml b/charts/kafka-ui/templates/ingress.yaml
deleted file mode 100644
index 8631ea0130c..00000000000
--- a/charts/kafka-ui/templates/ingress.yaml
+++ /dev/null
@@ -1,87 +0,0 @@
-{{- if .Values.ingress.enabled -}}
-{{- $fullName := include "kafka-ui.fullname" . -}}
-{{- $svcPort := .Values.service.port -}}
-{{- if $.Capabilities.APIVersions.Has "networking.k8s.io/v1" }}
-apiVersion: networking.k8s.io/v1
-{{- else if $.Capabilities.APIVersions.Has "networking.k8s.io/v1beta1" }}
-apiVersion: networking.k8s.io/v1beta1
-{{- else }}
-apiVersion: extensions/v1beta1
-{{- end }}
-kind: Ingress
-metadata:
- name: {{ $fullName }}
- labels:
- {{- include "kafka-ui.labels" . | nindent 4 }}
- {{- with .Values.ingress.annotations }}
- annotations:
- {{- toYaml . | nindent 4 }}
- {{- end }}
-spec:
- {{- if .Values.ingress.tls.enabled }}
- tls:
- - hosts:
- - {{ tpl .Values.ingress.host . }}
- secretName: {{ .Values.ingress.tls.secretName }}
- {{- end }}
- {{- if .Values.ingress.ingressClassName }}
- ingressClassName: {{ .Values.ingress.ingressClassName }}
- {{- end }}
- rules:
- - http:
- paths:
-{{- if $.Capabilities.APIVersions.Has "networking.k8s.io/v1" -}}
- {{- range .Values.ingress.precedingPaths }}
- - path: {{ .path }}
- pathType: Prefix
- backend:
- service:
- name: {{ .serviceName }}
- port:
- number: {{ .servicePort }}
- {{- end }}
- - backend:
- service:
- name: {{ $fullName }}
- port:
- number: {{ $svcPort }}
- pathType: Prefix
-{{- if .Values.ingress.path }}
- path: {{ .Values.ingress.path }}
-{{- end }}
- {{- range .Values.ingress.succeedingPaths }}
- - path: {{ .path }}
- pathType: Prefix
- backend:
- service:
- name: {{ .serviceName }}
- port:
- number: {{ .servicePort }}
- {{- end }}
-{{- if tpl .Values.ingress.host . }}
- host: {{tpl .Values.ingress.host . }}
-{{- end }}
-{{- else -}}
- {{- range .Values.ingress.precedingPaths }}
- - path: {{ .path }}
- backend:
- serviceName: {{ .serviceName }}
- servicePort: {{ .servicePort }}
- {{- end }}
- - backend:
- serviceName: {{ $fullName }}
- servicePort: {{ $svcPort }}
-{{- if .Values.ingress.path }}
- path: {{ .Values.ingress.path }}
-{{- end }}
- {{- range .Values.ingress.succeedingPaths }}
- - path: {{ .path }}
- backend:
- serviceName: {{ .serviceName }}
- servicePort: {{ .servicePort }}
- {{- end }}
-{{- if tpl .Values.ingress.host . }}
- host: {{ tpl .Values.ingress.host . }}
-{{- end }}
-{{- end }}
-{{- end }}
diff --git a/charts/kafka-ui/templates/networkpolicy-egress.yaml b/charts/kafka-ui/templates/networkpolicy-egress.yaml
deleted file mode 100644
index 4f582802712..00000000000
--- a/charts/kafka-ui/templates/networkpolicy-egress.yaml
+++ /dev/null
@@ -1,18 +0,0 @@
-{{- if and .Values.networkPolicy.enabled .Values.networkPolicy.egressRules.customRules }}
-apiVersion: networking.k8s.io/v1
-kind: NetworkPolicy
-metadata:
- name: {{ printf "%s-egress" (include "kafka-ui.fullname" .) }}
- labels:
- {{- include "kafka-ui.labels" . | nindent 4 }}
-spec:
- podSelector:
- matchLabels:
- {{- include "kafka-ui.selectorLabels" . | nindent 6 }}
- policyTypes:
- - Egress
- egress:
- {{- if .Values.networkPolicy.egressRules.customRules }}
- {{- toYaml .Values.networkPolicy.egressRules.customRules | nindent 4 }}
- {{- end }}
-{{- end }}
diff --git a/charts/kafka-ui/templates/networkpolicy-ingress.yaml b/charts/kafka-ui/templates/networkpolicy-ingress.yaml
deleted file mode 100644
index 74988676b52..00000000000
--- a/charts/kafka-ui/templates/networkpolicy-ingress.yaml
+++ /dev/null
@@ -1,18 +0,0 @@
-{{- if and .Values.networkPolicy.enabled .Values.networkPolicy.ingressRules.customRules }}
-apiVersion: networking.k8s.io/v1
-kind: NetworkPolicy
-metadata:
- name: {{ printf "%s-ingress" (include "kafka-ui.fullname" .) }}
- labels:
- {{- include "kafka-ui.labels" . | nindent 4 }}
-spec:
- podSelector:
- matchLabels:
- {{- include "kafka-ui.selectorLabels" . | nindent 6 }}
- policyTypes:
- - Ingress
- ingress:
- {{- if .Values.networkPolicy.ingressRules.customRules }}
- {{- toYaml .Values.networkPolicy.ingressRules.customRules | nindent 4 }}
- {{- end }}
-{{- end }}
diff --git a/charts/kafka-ui/templates/secret.yaml b/charts/kafka-ui/templates/secret.yaml
deleted file mode 100644
index a2ebf0fdba8..00000000000
--- a/charts/kafka-ui/templates/secret.yaml
+++ /dev/null
@@ -1,9 +0,0 @@
-apiVersion: v1
-kind: Secret
-metadata:
- name: {{ include "kafka-ui.fullname" . }}
- labels:
- {{- include "kafka-ui.labels" . | nindent 4 }}
-type: Opaque
-data:
- {{- toYaml .Values.envs.secret | nindent 2 }}
\ No newline at end of file
diff --git a/charts/kafka-ui/templates/service.yaml b/charts/kafka-ui/templates/service.yaml
deleted file mode 100644
index 5801135c4c7..00000000000
--- a/charts/kafka-ui/templates/service.yaml
+++ /dev/null
@@ -1,22 +0,0 @@
-apiVersion: v1
-kind: Service
-metadata:
- name: {{ include "kafka-ui.fullname" . }}
- labels:
- {{- include "kafka-ui.labels" . | nindent 4 }}
-{{- if .Values.service.annotations }}
- annotations:
-{{ toYaml .Values.service.annotations | nindent 4 }}
-{{- end }}
-spec:
- type: {{ .Values.service.type }}
- ports:
- - port: {{ .Values.service.port }}
- targetPort: http
- protocol: TCP
- name: http
- {{- if (and (eq .Values.service.type "NodePort") .Values.service.nodePort) }}
- nodePort: {{ .Values.service.nodePort }}
- {{- end }}
- selector:
- {{- include "kafka-ui.selectorLabels" . | nindent 4 }}
diff --git a/charts/kafka-ui/templates/serviceaccount.yaml b/charts/kafka-ui/templates/serviceaccount.yaml
deleted file mode 100644
index b89551c833e..00000000000
--- a/charts/kafka-ui/templates/serviceaccount.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-{{- if .Values.serviceAccount.create -}}
-apiVersion: v1
-kind: ServiceAccount
-metadata:
- name: {{ include "kafka-ui.serviceAccountName" . }}
- labels:
- {{- include "kafka-ui.labels" . | nindent 4 }}
- {{- with .Values.serviceAccount.annotations }}
- annotations:
- {{- toYaml . | nindent 4 }}
- {{- end }}
-{{- end }}
diff --git a/charts/kafka-ui/values.yaml b/charts/kafka-ui/values.yaml
deleted file mode 100644
index 33dcfb0b300..00000000000
--- a/charts/kafka-ui/values.yaml
+++ /dev/null
@@ -1,127 +0,0 @@
-replicaCount: 1
-
-image:
- registry: docker.io
- repository: provectuslabs/kafka-ui
- pullPolicy: IfNotPresent
- # Overrides the image tag whose default is the chart appVersion.
- tag: ""
-
-imagePullSecrets: []
-nameOverride: ""
-fullnameOverride: ""
-
-serviceAccount:
- # Specifies whether a service account should be created
- create: true
- # Annotations to add to the service account
- annotations: {}
- # The name of the service account to use.
- # If not set and create is true, a name is generated using the fullname template
- name: ""
-
-existingConfigMap: ""
-existingSecret: ""
-envs:
- secret: {}
- config: {}
-
-networkPolicy:
- enabled: false
- egressRules:
- ## Additional custom egress rules
- ## e.g:
- ## customRules:
- ## - to:
- ## - namespaceSelector:
- ## matchLabels:
- ## label: example
- customRules: []
- ingressRules:
- ## Additional custom ingress rules
- ## e.g:
- ## customRules:
- ## - from:
- ## - namespaceSelector:
- ## matchLabels:
- ## label: example
- customRules: []
-
-podAnnotations: {}
-podLabels: {}
-
-podSecurityContext: {}
- # fsGroup: 2000
-
-securityContext: {}
- # capabilities:
- # drop:
- # - ALL
- # readOnlyRootFilesystem: true
- # runAsNonRoot: true
- # runAsUser: 1000
-
-service:
- type: ClusterIP
- port: 80
- # if you want to force a specific nodePort. Must be use with service.type=NodePort
- # nodePort:
-
-# Ingress configuration
-ingress:
- # Enable ingress resource
- enabled: false
-
- # Annotations for the Ingress
- annotations: {}
-
- # ingressClassName for the Ingress
- ingressClassName: ""
-
- # The path for the Ingress
- path: ""
-
- # The hostname for the Ingress
- host: ""
-
- # configs for Ingress TLS
- tls:
- # Enable TLS termination for the Ingress
- enabled: false
- # the name of a pre-created Secret containing a TLS private key and certificate
- secretName: ""
-
- # HTTP paths to add to the Ingress before the default path
- precedingPaths: []
-
- # Http paths to add to the Ingress after the default path
- succeedingPaths: []
-
-resources: {}
- # limits:
- # cpu: 200m
- # memory: 512Mi
- # requests:
- # cpu: 200m
- # memory: 256Mi
-
-autoscaling:
- enabled: false
- minReplicas: 1
- maxReplicas: 100
- targetCPUUtilizationPercentage: 80
- # targetMemoryUtilizationPercentage: 80
-
-nodeSelector: {}
-
-tolerations: []
-
-affinity: {}
-
-env: {}
-
-initContainers: {}
-
-volumeMounts: {}
-
-volumes: {}
diff --git a/docker-compose.md b/docker-compose.md
deleted file mode 100644
index d3912c67151..00000000000
--- a/docker-compose.md
+++ /dev/null
@@ -1,45 +0,0 @@
-# Quick Start with docker-compose
-
-Environment variables documentation - [see usage](README.md#env_variables).
-We have plenty of example files with more complex configurations. Please check them out in ``docker`` directory.
-
-* Add a new service in docker-compose.yml
-
-```yaml
-version: '2'
-services:
- kafka-ui:
- image: provectuslabs/kafka-ui
- container_name: kafka-ui
- ports:
- - "8080:8080"
- restart: always
- environment:
- - KAFKA_CLUSTERS_0_NAME=local
- - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
- - KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181
-```
-
-* If you prefer UI for Apache Kafka in read only mode
-
-```yaml
-version: '2'
-services:
- kafka-ui:
- image: provectuslabs/kafka-ui
- container_name: kafka-ui
- ports:
- - "8080:8080"
- restart: always
- environment:
- - KAFKA_CLUSTERS_0_NAME=local
- - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
- - KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181
- - KAFKA_CLUSTERS_0_READONLY=true
-```
-
-* Start UI for Apache Kafka process
-
-```bash
-docker-compose up -d kafka-ui
-```
diff --git a/documentation/compose/DOCKER_COMPOSE.md b/documentation/compose/DOCKER_COMPOSE.md
index 2ea3f09c990..1ca7de1dc0b 100644
--- a/documentation/compose/DOCKER_COMPOSE.md
+++ b/documentation/compose/DOCKER_COMPOSE.md
@@ -1,12 +1,16 @@
# Descriptions of docker-compose configurations (*.yaml)
1. [kafka-ui.yaml](./kafka-ui.yaml) - Default configuration with 2 kafka clusters with two nodes of Schema Registry, one kafka-connect and a few dummy topics.
-2. [kafka-clusters-only.yaml](./kafka-clusters-only.yaml) - A configuration for development purposes, everything besides `kafka-ui` itself (to be run locally).
-3. [kafka-ui-ssl.yml](./kafka-ssl.yml) - Connect to Kafka via TLS/SSL
-4. [kafka-cluster-sr-auth.yaml](./kafka-cluster-sr-auth.yaml) - Schema registry with authentication.
-5. [kafka-ui-auth-context.yaml](./kafka-ui-auth-context.yaml) - Basic (username/password) authentication with custom path (URL) (issue 861).
-6. [kafka-ui-connectors.yaml](./kafka-ui-connectors.yaml) - Configuration with different connectors (github-source, s3, sink-activities, source-activities) and Ksql functionality.
-7. [kafka-ui-jmx-secured.yml](./kafka-ui-jmx-secured.yml) - Kafka’s JMX with SSL and authentication.
-8. [kafka-ui-reverse-proxy.yaml](./kafka-ui-reverse-proxy.yaml) - An example for using the app behind a proxy (like nginx).
-9. [kafka-ui-sasl.yaml](./kafka-ui-sasl.yaml) - SASL auth for Kafka.
-10. [kafka-ui-traefik-proxy.yaml](./kafka-ui-traefik-proxy.yaml) - Traefik specific proxy configuration.
+2. [kafka-ui-arm64.yaml](./kafka-ui-arm64.yaml) - Default configuration for ARM64(Mac M1) architecture with 1 kafka cluster without zookeeper with one node of Schema Registry, one kafka-connect and a few dummy topics.
+3. [kafka-clusters-only.yaml](./kafka-clusters-only.yaml) - A configuration for development purposes, everything besides `kafka-ui` itself (to be run locally).
+4. [kafka-ui-ssl.yml](./kafka-ssl.yml) - Connect to Kafka via TLS/SSL
+5. [kafka-cluster-sr-auth.yaml](./kafka-cluster-sr-auth.yaml) - Schema registry with authentication.
+6. [kafka-ui-auth-context.yaml](./kafka-ui-auth-context.yaml) - Basic (username/password) authentication with custom path (URL) (issue 861).
+7. [e2e-tests.yaml](./e2e-tests.yaml) - Configuration with different connectors (github-source, s3, sink-activities, source-activities) and Ksql functionality.
+8. [kafka-ui-jmx-secured.yml](./kafka-ui-jmx-secured.yml) - Kafka’s JMX with SSL and authentication.
+9. [kafka-ui-reverse-proxy.yaml](./nginx-proxy.yaml) - An example for using the app behind a proxy (like nginx).
+10. [kafka-ui-sasl.yaml](./kafka-ui-sasl.yaml) - SASL auth for Kafka.
+11. [kafka-ui-traefik-proxy.yaml](./traefik-proxy.yaml) - Traefik specific proxy configuration.
+12. [oauth-cognito.yaml](./oauth-cognito.yaml) - OAuth2 with Cognito
+13. [kafka-ui-with-jmx-exporter.yaml](./kafka-ui-with-jmx-exporter.yaml) - A configuration with 2 kafka clusters with enabled prometheus jmx exporters instead of jmx.
+14. [kafka-with-zookeeper.yaml](./kafka-with-zookeeper.yaml) - An example for using kafka with zookeeper
\ No newline at end of file
diff --git a/documentation/compose/auth-ldap.yaml b/documentation/compose/auth-ldap.yaml
deleted file mode 100644
index 7c25adce5dd..00000000000
--- a/documentation/compose/auth-ldap.yaml
+++ /dev/null
@@ -1,86 +0,0 @@
----
-version: '2'
-services:
-
- kafka-ui:
- container_name: kafka-ui
- image: provectuslabs/kafka-ui:latest
- ports:
- - 8080:8080
- depends_on:
- - zookeeper0
- - kafka0
- - schemaregistry0
- environment:
- KAFKA_CLUSTERS_0_NAME: local
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
- KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper0:2181
- KAFKA_CLUSTERS_0_JMXPORT: 9997
- KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry0:8085
- KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
- KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
- KAFKA_CLUSTERS_1_NAME: secondLocal
- KAFKA_CLUSTERS_1_BOOTSTRAPSERVERS: kafka1:29092
- KAFKA_CLUSTERS_1_ZOOKEEPER: zookeeper1:2181
- KAFKA_CLUSTERS_1_JMXPORT: 9998
- KAFKA_CLUSTERS_1_SCHEMAREGISTRY: http://schemaregistry1:8085
- KAFKA_CLUSTERS_1_KAFKACONNECT_0_NAME: first
- KAFKA_CLUSTERS_1_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
- AUTH_TYPE: "LDAP"
- SPRING_LDAP_URLS: "ldap://ldap:10389"
- SPRING_LDAP_DN_PATTERN: "cn={0},ou=people,dc=planetexpress,dc=com"
-# USER SEARCH FILTER INSTEAD OF DN
-# SPRING_LDAP_USERFILTER_SEARCHBASE: "dc=planetexpress,dc=com"
-# SPRING_LDAP_USERFILTER_SEARCHFILTER: "(&(uid={0})(objectClass=inetOrgPerson))"
-# LDAP ADMIN USER
-# SPRING_LDAP_ADMINUSER: "cn=admin,dc=planetexpress,dc=com"
-# SPRING_LDAP_ADMINPASSWORD: "GoodNewsEveryone"
-
-
-
- ldap:
- image: rroemhild/test-openldap:latest
- hostname: "ldap"
-
- zookeeper0:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2181:2181
-
- kafka0:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper0
- ports:
- - 9092:9092
- - 9997:9997
- environment:
- KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
- JMX_PORT: 9997
- KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
-
- schemaregistry0:
- image: confluentinc/cp-schema-registry:5.5.0
- ports:
- - 8085:8085
- depends_on:
- - zookeeper0
- - kafka0
- environment:
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
- SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper0:2181
- SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
- SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
- SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
-
- SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
- SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
- SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
\ No newline at end of file
diff --git a/documentation/compose/message.json b/documentation/compose/data/message.json
similarity index 100%
rename from documentation/compose/message.json
rename to documentation/compose/data/message.json
diff --git a/documentation/compose/proxy.conf b/documentation/compose/data/proxy.conf
similarity index 100%
rename from documentation/compose/proxy.conf
rename to documentation/compose/data/proxy.conf
diff --git a/documentation/compose/e2e-tests.yaml b/documentation/compose/e2e-tests.yaml
new file mode 100644
index 00000000000..3685d48c238
--- /dev/null
+++ b/documentation/compose/e2e-tests.yaml
@@ -0,0 +1,190 @@
+---
+version: '3.5'
+services:
+
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8080:8080
+ healthcheck:
+ test: wget --no-verbose --tries=1 --spider http://localhost:8080/actuator/health
+ interval: 30s
+ timeout: 10s
+ retries: 10
+ depends_on:
+ kafka0:
+ condition: service_healthy
+ schemaregistry0:
+ condition: service_healthy
+ kafka-connect0:
+ condition: service_healthy
+ environment:
+ KAFKA_CLUSTERS_0_NAME: local
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
+ KAFKA_CLUSTERS_0_METRICS_PORT: 9997
+ KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry0:8085
+ KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
+ KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
+ KAFKA_CLUSTERS_0_KSQLDBSERVER: http://ksqldb:8088
+
+ kafka0:
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka0
+ container_name: kafka0
+ healthcheck:
+ test: unset JMX_PORT && KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9999" && kafka-broker-api-versions --bootstrap-server=localhost:9092
+ interval: 30s
+ timeout: 10s
+ retries: 10
+ ports:
+ - "9092:9092"
+ - "9997:9997"
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092'
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
+ KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
+
+ schemaregistry0:
+ image: confluentinc/cp-schema-registry:7.2.1
+ ports:
+ - 8085:8085
+ depends_on:
+ kafka0:
+ condition: service_healthy
+ healthcheck:
+ test: [ "CMD", "timeout", "1", "curl", "--silent", "--fail", "http://schemaregistry0:8085/subjects" ]
+ interval: 30s
+ timeout: 10s
+ retries: 10
+ environment:
+ SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
+ SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
+ SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
+ SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
+
+ SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
+ SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
+ SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
+
+ kafka-connect0:
+ build:
+ context: ./kafka-connect
+ args:
+ image: confluentinc/cp-kafka-connect:6.0.1
+ ports:
+ - 8083:8083
+ depends_on:
+ kafka0:
+ condition: service_healthy
+ schemaregistry0:
+ condition: service_healthy
+ healthcheck:
+ test: [ "CMD", "nc", "127.0.0.1", "8083" ]
+ interval: 30s
+ timeout: 10s
+ retries: 10
+ environment:
+ CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
+ CONNECT_GROUP_ID: compose-connect-group
+ CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
+ CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
+ CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
+ CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
+ CONNECT_STATUS_STORAGE_TOPIC: _connect_status
+ CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
+ CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
+ CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
+ CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
+ CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
+ CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
+ CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
+ CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
+ CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
+ # AWS_ACCESS_KEY_ID: ""
+ # AWS_SECRET_ACCESS_KEY: ""
+
+ kafka-init-topics:
+ image: confluentinc/cp-kafka:7.2.1
+ volumes:
+ - ./data/message.json:/data/message.json
+ depends_on:
+ kafka0:
+ condition: service_healthy
+ command: "bash -c 'echo Waiting for Kafka to be ready... && \
+ cub kafka-ready -b kafka0:29092 1 30 && \
+ kafka-topics --create --topic users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
+ kafka-topics --create --topic messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
+ kafka-console-producer --bootstrap-server kafka0:29092 --topic users < /data/message.json'"
+
+ postgres-db:
+ build:
+ context: ./postgres
+ args:
+ image: postgres:9.6.22
+ ports:
+ - 5432:5432
+ healthcheck:
+ test: [ "CMD-SHELL", "pg_isready -U dev_user" ]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ environment:
+ POSTGRES_USER: 'dev_user'
+ POSTGRES_PASSWORD: '12345'
+
+ create-connectors:
+ image: ellerbrock/alpine-bash-curl-ssl
+ depends_on:
+ postgres-db:
+ condition: service_healthy
+ kafka-connect0:
+ condition: service_healthy
+ volumes:
+ - ./connectors:/connectors
+ command: bash -c '/connectors/start.sh'
+
+ ksqldb:
+ image: confluentinc/ksqldb-server:0.18.0
+ healthcheck:
+ test: [ "CMD", "timeout", "1", "curl", "--silent", "--fail", "http://localhost:8088/info" ]
+ interval: 30s
+ timeout: 10s
+ retries: 10
+ depends_on:
+ kafka0:
+ condition: service_healthy
+ kafka-connect0:
+ condition: service_healthy
+ schemaregistry0:
+ condition: service_healthy
+ ports:
+ - 8088:8088
+ environment:
+ KSQL_CUB_KAFKA_TIMEOUT: 120
+ KSQL_LISTENERS: http://0.0.0.0:8088
+ KSQL_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
+ KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: "true"
+ KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: "true"
+ KSQL_KSQL_CONNECT_URL: http://kafka-connect0:8083
+ KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
+ KSQL_KSQL_SERVICE_ID: my_ksql_1
+ KSQL_KSQL_HIDDEN_TOPICS: '^_.*'
+ KSQL_CACHE_MAX_BYTES_BUFFERING: 0
diff --git a/documentation/compose/jaas/client.properties b/documentation/compose/jaas/client.properties
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/kafka_connect.jaas b/documentation/compose/jaas/kafka_connect.jaas
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/kafka_connect.password b/documentation/compose/jaas/kafka_connect.password
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/kafka_server.conf b/documentation/compose/jaas/kafka_server.conf
index ef41c992e21..0c1fb34652a 100644
--- a/documentation/compose/jaas/kafka_server.conf
+++ b/documentation/compose/jaas/kafka_server.conf
@@ -11,4 +11,8 @@ KafkaClient {
user_admin="admin-secret";
};
-Client {};
\ No newline at end of file
+Client {
+ org.apache.zookeeper.server.auth.DigestLoginModule required
+ username="zkuser"
+ password="zkuserpassword";
+};
diff --git a/documentation/compose/jaas/schema_registry.jaas b/documentation/compose/jaas/schema_registry.jaas
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/schema_registry.password b/documentation/compose/jaas/schema_registry.password
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/zookeeper_jaas.conf b/documentation/compose/jaas/zookeeper_jaas.conf
new file mode 100644
index 00000000000..2d7fd1b1c29
--- /dev/null
+++ b/documentation/compose/jaas/zookeeper_jaas.conf
@@ -0,0 +1,4 @@
+Server {
+ org.apache.zookeeper.server.auth.DigestLoginModule required
+ user_zkuser="zkuserpassword";
+};
diff --git a/documentation/compose/jmx-exporter/kafka-broker.yml b/documentation/compose/jmx-exporter/kafka-broker.yml
new file mode 100644
index 00000000000..efe0a463567
--- /dev/null
+++ b/documentation/compose/jmx-exporter/kafka-broker.yml
@@ -0,0 +1,2 @@
+rules:
+ - pattern: ".*"
diff --git a/documentation/compose/jmx-exporter/kafka-prepare-and-run b/documentation/compose/jmx-exporter/kafka-prepare-and-run
new file mode 100755
index 00000000000..2ccf17df505
--- /dev/null
+++ b/documentation/compose/jmx-exporter/kafka-prepare-and-run
@@ -0,0 +1,10 @@
+#!/usr/bin/env bash
+
+JAVA_AGENT_FILE="/usr/share/jmx_exporter/jmx_prometheus_javaagent.jar"
+if [ ! -f "$JAVA_AGENT_FILE" ]
+then
+ echo "Downloading jmx_exporter javaagent"
+ curl -o $JAVA_AGENT_FILE https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.16.1/jmx_prometheus_javaagent-0.16.1.jar
+fi
+
+exec /etc/confluent/docker/run
\ No newline at end of file
diff --git a/documentation/compose/kafka-cluster-sr-auth.yaml b/documentation/compose/kafka-cluster-sr-auth.yaml
index 6dbcb12e361..09403cef27a 100644
--- a/documentation/compose/kafka-cluster-sr-auth.yaml
+++ b/documentation/compose/kafka-cluster-sr-auth.yaml
@@ -2,43 +2,44 @@
version: '2'
services:
- zookeeper1:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2182:2181
-
kafka1:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper1
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka1
+ container_name: kafka1
+ ports:
+ - "9092:9092"
+ - "9997:9997"
environment:
KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper1:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:29092,PLAINTEXT_HOST://localhost:9093
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka1:29092,PLAINTEXT_HOST://localhost:9092'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
- JMX_PORT: 9998
- KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=localhost -Dcom.sun.management.jmxremote.rmi.port=9998
- ports:
- - 9093:9093
- - 9998:9998
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka1:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka1:29092,CONTROLLER://kafka1:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
schemaregistry1:
- image: confluentinc/cp-schema-registry:5.5.0
+ image: confluentinc/cp-schema-registry:7.2.1
ports:
- 18085:8085
depends_on:
- - zookeeper1
- kafka1
volumes:
- ./jaas:/conf
environment:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:29092
- SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper1:2181
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
SCHEMA_REGISTRY_HOST_NAME: schemaregistry1
SCHEMA_REGISTRY_LISTENERS: http://schemaregistry1:8085
@@ -54,13 +55,29 @@ services:
SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
kafka-init-topics:
- image: confluentinc/cp-kafka:5.3.1
+ image: confluentinc/cp-kafka:7.2.1
volumes:
- - ./message.json:/data/message.json
+ - ./data/message.json:/data/message.json
depends_on:
- kafka1
command: "bash -c 'echo Waiting for Kafka to be ready... && \
cub kafka-ready -b kafka1:29092 1 30 && \
- kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
- kafka-topics --create --topic second.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
- kafka-console-producer --broker-list kafka1:29092 -topic second.users < /data/message.json'"
+ kafka-topics --create --topic users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka1:29092 && \
+ kafka-topics --create --topic messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka1:29092 && \
+ kafka-console-producer --bootstrap-server kafka1:29092 --topic users < /data/message.json'"
+
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8080:8080
+ depends_on:
+ - kafka1
+ - schemaregistry1
+ environment:
+ KAFKA_CLUSTERS_0_NAME: local
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka1:29092
+ KAFKA_CLUSTERS_0_METRICS_PORT: 9997
+ KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry1:8085
+ KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME: admin
+ KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD: letmein
diff --git a/documentation/compose/kafka-clusters-only.yaml b/documentation/compose/kafka-clusters-only.yaml
deleted file mode 100644
index 1e51dd5a4c9..00000000000
--- a/documentation/compose/kafka-clusters-only.yaml
+++ /dev/null
@@ -1,146 +0,0 @@
----
-version: '2'
-services:
-
- zookeeper0:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2181:2181
-
- kafka0:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper0
- environment:
- KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2
- JMX_PORT: 9997
- KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=localhost -Dcom.sun.management.jmxremote.rmi.port=9997
- ports:
- - 9092:9092
- - 9997:9997
-
- kafka01:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper0
- environment:
- KAFKA_BROKER_ID: 2
- KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka01:29092,PLAINTEXT_HOST://localhost:9094
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,PLAIN:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2
- JMX_PORT: 9999
- KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=localhost -Dcom.sun.management.jmxremote.rmi.port=9999
- ports:
- - 9094:9094
- - 9999:9999
-
- zookeeper1:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2182:2181
-
- kafka1:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper1
- environment:
- KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper1:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:29092,PLAINTEXT_HOST://localhost:9093
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
- JMX_PORT: 9998
- KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=localhost -Dcom.sun.management.jmxremote.rmi.port=9998
- ports:
- - 9093:9093
- - 9998:9998
-
- schemaregistry0:
- image: confluentinc/cp-schema-registry:5.5.0
- depends_on:
- - zookeeper0
- - kafka0
- - kafka01
- environment:
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092,PLAINTEXT://kafka01:29092
- SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper0:2181
- SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
- SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
- SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
-
- SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
- SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
- SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
- ports:
- - 8085:8085
-
- schemaregistry1:
- image: confluentinc/cp-schema-registry:5.5.0
- ports:
- - 18085:8085
- depends_on:
- - zookeeper1
- - kafka1
- environment:
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:29092
- SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper1:2181
- SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
- SCHEMA_REGISTRY_HOST_NAME: schemaregistry1
- SCHEMA_REGISTRY_LISTENERS: http://schemaregistry1:8085
-
- SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
- SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
- SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
-
- kafka-connect0:
- image: confluentinc/cp-kafka-connect:6.0.1
- ports:
- - 8083:8083
- depends_on:
- - kafka0
- - schemaregistry0
- environment:
- CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
- CONNECT_GROUP_ID: compose-connect-group
- CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
- CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
- CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_STATUS_STORAGE_TOPIC: _connect_status
- CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
- CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
- CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
- CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
- CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
- CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
-
-
- kafka-init-topics:
- image: confluentinc/cp-kafka:5.3.1
- volumes:
- - ./message.json:/data/message.json
- depends_on:
- - kafka1
- command: "bash -c 'echo Waiting for Kafka to be ready... && \
- cub kafka-ready -b kafka1:29092 1 30 && \
- kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
- kafka-topics --create --topic second.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
- kafka-topics --create --topic first.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181 && \
- kafka-console-producer --broker-list kafka1:29092 -topic second.users < /data/message.json'"
diff --git a/documentation/compose/kafka-ssl-components.yaml b/documentation/compose/kafka-ssl-components.yaml
new file mode 100644
index 00000000000..407ce5b97a7
--- /dev/null
+++ b/documentation/compose/kafka-ssl-components.yaml
@@ -0,0 +1,178 @@
+---
+version: '3.4'
+services:
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8080:8080
+ depends_on:
+ - kafka0
+ - schemaregistry0
+ - kafka-connect0
+ - ksqldb0
+ environment:
+ KAFKA_CLUSTERS_0_NAME: local
+ KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SSL
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092 # SSL LISTENER!
+ KAFKA_CLUSTERS_0_PROPERTIES_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # DISABLE COMMON NAME VERIFICATION
+
+ KAFKA_CLUSTERS_0_SCHEMAREGISTRY: https://schemaregistry0:8085
+ KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTORELOCATION: /kafka.keystore.jks
+ KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTOREPASSWORD: "secret"
+
+ KAFKA_CLUSTERS_0_KSQLDBSERVER: https://ksqldb0:8088
+ KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTORELOCATION: /kafka.keystore.jks
+ KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTOREPASSWORD: "secret"
+
+ KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: local
+ KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: https://kafka-connect0:8083
+ KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTORELOCATION: /kafka.keystore.jks
+ KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTOREPASSWORD: "secret"
+
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION: /kafka.truststore.jks
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD: "secret"
+ DYNAMIC_CONFIG_ENABLED: 'true' # not necessary for ssl, added for tests
+
+ volumes:
+ - ./ssl/kafka.truststore.jks:/kafka.truststore.jks
+ - ./ssl/kafka.keystore.jks:/kafka.keystore.jks
+
+ kafka0:
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka0
+ container_name: kafka0
+ ports:
+ - "9092:9092"
+ - "9997:9997"
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,SSL:SSL,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'SSL://kafka0:29092,PLAINTEXT_HOST://localhost:9092'
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093'
+ KAFKA_LISTENERS: 'SSL://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'SSL'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ KAFKA_SECURITY_PROTOCOL: SSL
+ KAFKA_SSL_ENABLED_MECHANISMS: PLAIN,SSL
+ KAFKA_SSL_KEYSTORE_FILENAME: kafka.keystore.jks
+ KAFKA_SSL_KEYSTORE_CREDENTIALS: creds
+ KAFKA_SSL_KEY_CREDENTIALS: creds
+ KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.truststore.jks
+ KAFKA_SSL_TRUSTSTORE_CREDENTIALS: creds
+ #KAFKA_SSL_CLIENT_AUTH: 'required'
+ KAFKA_SSL_CLIENT_AUTH: 'requested'
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # COMMON NAME VERIFICATION IS DISABLED SERVER-SIDE
+ volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ - ./ssl/creds:/etc/kafka/secrets/creds
+ - ./ssl/kafka.truststore.jks:/etc/kafka/secrets/kafka.truststore.jks
+ - ./ssl/kafka.keystore.jks:/etc/kafka/secrets/kafka.keystore.jks
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
+
+ schemaregistry0:
+ image: confluentinc/cp-schema-registry:7.2.1
+ depends_on:
+ - kafka0
+ environment:
+ SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: SSL://kafka0:29092
+ SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: SSL
+ SCHEMA_REGISTRY_KAFKASTORE_SSL_TRUSTSTORE_LOCATION: /kafka.truststore.jks
+ SCHEMA_REGISTRY_KAFKASTORE_SSL_TRUSTSTORE_PASSWORD: secret
+ SCHEMA_REGISTRY_KAFKASTORE_SSL_KEYSTORE_LOCATION: /kafka.keystore.jks
+ SCHEMA_REGISTRY_KAFKASTORE_SSL_KEYSTORE_PASSWORD: secret
+ SCHEMA_REGISTRY_KAFKASTORE_SSL_KEY_PASSWORD: secret
+ SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
+ SCHEMA_REGISTRY_LISTENERS: https://schemaregistry0:8085
+ SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: https
+
+ SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "https"
+ SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
+ SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
+ SCHEMA_REGISTRY_SSL_CLIENT_AUTHENTICATION: "REQUIRED"
+ SCHEMA_REGISTRY_SSL_TRUSTSTORE_LOCATION: /kafka.truststore.jks
+ SCHEMA_REGISTRY_SSL_TRUSTSTORE_PASSWORD: secret
+ SCHEMA_REGISTRY_SSL_KEYSTORE_LOCATION: /kafka.keystore.jks
+ SCHEMA_REGISTRY_SSL_KEYSTORE_PASSWORD: secret
+ SCHEMA_REGISTRY_SSL_KEY_PASSWORD: secret
+ ports:
+ - 8085:8085
+ volumes:
+ - ./ssl/kafka.truststore.jks:/kafka.truststore.jks
+ - ./ssl/kafka.keystore.jks:/kafka.keystore.jks
+
+ kafka-connect0:
+ image: confluentinc/cp-kafka-connect:7.2.1
+ ports:
+ - 8083:8083
+ depends_on:
+ - kafka0
+ - schemaregistry0
+ environment:
+ CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
+ CONNECT_GROUP_ID: compose-connect-group
+ CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
+ CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
+ CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
+ CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
+ CONNECT_STATUS_STORAGE_TOPIC: _connect_status
+ CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
+ CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
+ CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: https://schemaregistry0:8085
+ CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
+ CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: https://schemaregistry0:8085
+ CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
+ CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
+ CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
+ CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
+ CONNECT_SECURITY_PROTOCOL: "SSL"
+ CONNECT_SSL_KEYSTORE_LOCATION: "/kafka.keystore.jks"
+ CONNECT_SSL_KEY_PASSWORD: "secret"
+ CONNECT_SSL_KEYSTORE_PASSWORD: "secret"
+ CONNECT_SSL_TRUSTSTORE_LOCATION: "/kafka.truststore.jks"
+ CONNECT_SSL_TRUSTSTORE_PASSWORD: "secret"
+ CONNECT_SSL_CLIENT_AUTH: "requested"
+ CONNECT_REST_ADVERTISED_LISTENER: "https"
+ CONNECT_LISTENERS: "https://kafka-connect0:8083"
+ volumes:
+ - ./ssl/kafka.truststore.jks:/kafka.truststore.jks
+ - ./ssl/kafka.keystore.jks:/kafka.keystore.jks
+
+ ksqldb0:
+ image: confluentinc/ksqldb-server:0.18.0
+ depends_on:
+ - kafka0
+ - kafka-connect0
+ - schemaregistry0
+ ports:
+ - 8088:8088
+ environment:
+ KSQL_CUB_KAFKA_TIMEOUT: 120
+ KSQL_LISTENERS: https://0.0.0.0:8088
+ KSQL_BOOTSTRAP_SERVERS: SSL://kafka0:29092
+ KSQL_SECURITY_PROTOCOL: SSL
+ KSQL_SSL_TRUSTSTORE_LOCATION: /kafka.truststore.jks
+ KSQL_SSL_TRUSTSTORE_PASSWORD: secret
+ KSQL_SSL_KEYSTORE_LOCATION: /kafka.keystore.jks
+ KSQL_SSL_KEYSTORE_PASSWORD: secret
+ KSQL_SSL_KEY_PASSWORD: secret
+ KSQL_SSL_CLIENT_AUTHENTICATION: REQUIRED
+ KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: "true"
+ KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: "true"
+ KSQL_KSQL_CONNECT_URL: https://kafka-connect0:8083
+ KSQL_KSQL_SCHEMA_REGISTRY_URL: https://schemaregistry0:8085
+ KSQL_KSQL_SERVICE_ID: my_ksql_1
+ KSQL_KSQL_HIDDEN_TOPICS: '^_.*'
+ KSQL_CACHE_MAX_BYTES_BUFFERING: 0
+ volumes:
+ - ./ssl/kafka.truststore.jks:/kafka.truststore.jks
+ - ./ssl/kafka.keystore.jks:/kafka.keystore.jks
diff --git a/documentation/compose/kafka-ssl.yml b/documentation/compose/kafka-ssl.yml
index 367874fc5c7..08ff9dc4af8 100644
--- a/documentation/compose/kafka-ssl.yml
+++ b/documentation/compose/kafka-ssl.yml
@@ -1,47 +1,50 @@
---
version: '3.4'
services:
-
kafka-ui:
container_name: kafka-ui
image: provectuslabs/kafka-ui:latest
ports:
- 8080:8080
depends_on:
- - zookeeper0
- - kafka0
+ - kafka
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SSL
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092 # SSL LISTENER!
- KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper0:2181
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_LOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_PASSWORD: secret
+ KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_LOCATION: /kafka.keystore.jks
+ KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_PASSWORD: "secret"
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 # SSL LISTENER!
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION: /kafka.truststore.jks
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD: "secret"
+ KAFKA_CLUSTERS_0_PROPERTIES_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # DISABLE COMMON NAME VERIFICATION
volumes:
- ./ssl/kafka.truststore.jks:/kafka.truststore.jks
+ - ./ssl/kafka.keystore.jks:/kafka.keystore.jks
- zookeeper0:
- image: confluentinc/cp-zookeeper:6.0.1
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2181:2181
-
- kafka0:
- image: confluentinc/cp-kafka:6.0.1
- hostname: kafka0
- depends_on:
- - zookeeper0
+ kafka:
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka
+ container_name: kafka
ports:
- - '9092:9092'
+ - "9092:9092"
+ - "9997:9997"
environment:
KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,SSL:SSL,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'SSL://kafka:29092,PLAINTEXT_HOST://localhost:9092'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
- KAFKA_ADVERTISED_LISTENERS: SSL://kafka0:29092,PLAINTEXT_HOST://localhost:9092
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: SSL:SSL,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: SSL
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:29093'
+ KAFKA_LISTENERS: 'SSL://kafka:29092,CONTROLLER://kafka:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'SSL'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
KAFKA_SECURITY_PROTOCOL: SSL
KAFKA_SSL_ENABLED_MECHANISMS: PLAIN,SSL
KAFKA_SSL_KEYSTORE_FILENAME: kafka.keystore.jks
@@ -50,9 +53,11 @@ services:
KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.truststore.jks
KAFKA_SSL_TRUSTSTORE_CREDENTIALS: creds
#KAFKA_SSL_CLIENT_AUTH: 'required'
- KAFKA_SSL_CLIENT_AUTH: "requested"
+ KAFKA_SSL_CLIENT_AUTH: 'requested'
KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # COMMON NAME VERIFICATION IS DISABLED SERVER-SIDE
volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
- ./ssl/creds:/etc/kafka/secrets/creds
- ./ssl/kafka.truststore.jks:/etc/kafka/secrets/kafka.truststore.jks
- ./ssl/kafka.keystore.jks:/etc/kafka/secrets/kafka.keystore.jks
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
diff --git a/documentation/compose/kafka-ui-acl-with-zk.yaml b/documentation/compose/kafka-ui-acl-with-zk.yaml
new file mode 100644
index 00000000000..e1d70b29702
--- /dev/null
+++ b/documentation/compose/kafka-ui-acl-with-zk.yaml
@@ -0,0 +1,59 @@
+---
+version: '2'
+services:
+
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8080:8080
+ depends_on:
+ - zookeeper
+ - kafka
+ environment:
+ KAFKA_CLUSTERS_0_NAME: local
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
+ KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SASL_PLAINTEXT
+ KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM: PLAIN
+ KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-secret";'
+
+ zookeeper:
+ image: wurstmeister/zookeeper:3.4.6
+ environment:
+ JVMFLAGS: "-Djava.security.auth.login.config=/etc/zookeeper/zookeeper_jaas.conf"
+ volumes:
+ - ./jaas/zookeeper_jaas.conf:/etc/zookeeper/zookeeper_jaas.conf
+ ports:
+ - 2181:2181
+
+ kafka:
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka
+ container_name: kafka
+ ports:
+ - "9092:9092"
+ - "9997:9997"
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092'
+ KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/jaas/kafka_server.conf"
+ KAFKA_AUTHORIZER_CLASS_NAME: "kafka.security.authorizer.AclAuthorizer"
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:29093'
+ KAFKA_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,CONTROLLER://kafka:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'SASL_PLAINTEXT'
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
+ KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN'
+ KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT'
+ KAFKA_SUPER_USERS: 'User:admin'
+ volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ - ./jaas:/etc/kafka/jaas
diff --git a/documentation/compose/kafka-ui-arm64.yaml b/documentation/compose/kafka-ui-arm64.yaml
new file mode 100644
index 00000000000..082d7cb5af0
--- /dev/null
+++ b/documentation/compose/kafka-ui-arm64.yaml
@@ -0,0 +1,106 @@
+# ARM64 supported images for kafka can be found here
+# https://hub.docker.com/r/confluentinc/cp-kafka/tags?page=1&name=arm64
+---
+version: '2'
+services:
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8080:8080
+ depends_on:
+ - kafka0
+ - schema-registry0
+ - kafka-connect0
+ environment:
+ KAFKA_CLUSTERS_0_NAME: local
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
+ KAFKA_CLUSTERS_0_METRICS_PORT: 9997
+ KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schema-registry0:8085
+ KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
+ KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
+ DYNAMIC_CONFIG_ENABLED: 'true' # not necessary, added for tests
+ KAFKA_CLUSTERS_0_AUDIT_TOPICAUDITENABLED: 'true'
+ KAFKA_CLUSTERS_0_AUDIT_CONSOLEAUDITENABLED: 'true'
+
+ kafka0:
+ image: confluentinc/cp-kafka:7.2.1.arm64
+ hostname: kafka0
+ container_name: kafka0
+ ports:
+ - 9092:9092
+ - 9997:9997
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
+ volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
+
+ schema-registry0:
+ image: confluentinc/cp-schema-registry:7.2.1.arm64
+ ports:
+ - 8085:8085
+ depends_on:
+ - kafka0
+ environment:
+ SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
+ SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
+ SCHEMA_REGISTRY_HOST_NAME: schema-registry0
+ SCHEMA_REGISTRY_LISTENERS: http://schema-registry0:8085
+
+ SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
+ SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
+ SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
+
+ kafka-connect0:
+ image: confluentinc/cp-kafka-connect:7.2.1.arm64
+ ports:
+ - 8083:8083
+ depends_on:
+ - kafka0
+ - schema-registry0
+ environment:
+ CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
+ CONNECT_GROUP_ID: compose-connect-group
+ CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
+ CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
+ CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
+ CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
+ CONNECT_STATUS_STORAGE_TOPIC: _connect_status
+ CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
+ CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
+ CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry0:8085
+ CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
+ CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry0:8085
+ CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
+ CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
+ CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
+ CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
+
+ kafka-init-topics:
+ image: confluentinc/cp-kafka:7.2.1.arm64
+ volumes:
+ - ./data/message.json:/data/message.json
+ depends_on:
+ - kafka0
+ command: "bash -c 'echo Waiting for Kafka to be ready... && \
+ cub kafka-ready -b kafka0:29092 1 30 && \
+ kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
+ kafka-topics --create --topic second.messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
+ kafka-topics --create --topic first.messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
+ kafka-console-producer --bootstrap-server kafka0:29092 --topic second.users < /data/message.json'"
diff --git a/documentation/compose/kafka-ui-auth-context.yaml b/documentation/compose/kafka-ui-auth-context.yaml
index a3c4bee36b8..69eebbfeebb 100644
--- a/documentation/compose/kafka-ui-auth-context.yaml
+++ b/documentation/compose/kafka-ui-auth-context.yaml
@@ -8,52 +8,40 @@ services:
ports:
- 8080:8080
depends_on:
- - zookeeper0
- - kafka0
+ - kafka
environment:
KAFKA_CLUSTERS_0_NAME: local
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
- KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper0:2181
- KAFKA_CLUSTERS_0_JMXPORT: 9997
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
+ KAFKA_CLUSTERS_0_METRICS_PORT: 9997
SERVER_SERVLET_CONTEXT_PATH: /kafkaui
AUTH_TYPE: "LOGIN_FORM"
SPRING_SECURITY_USER_NAME: admin
SPRING_SECURITY_USER_PASSWORD: pass
- zookeeper0:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2181:2181
-
- kafka0:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper0
+ kafka:
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka
+ container_name: kafka
ports:
- - 9092:9092
- - 9997:9997
+ - "9092:9092"
+ - "9997:9997"
environment:
KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
- JMX_PORT: 9997
- KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
-
- kafka-init-topics:
- image: confluentinc/cp-kafka:5.3.1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka:29092,CONTROLLER://kafka:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
volumes:
- - ./message.json:/data/message.json
- depends_on:
- - kafka0
- command: "bash -c 'echo Waiting for Kafka to be ready... && \
- cub kafka-ready -b kafka0:29092 1 30 && \
- kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181 && \
- kafka-topics --create --topic second.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181 && \
- kafka-topics --create --topic first.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181 && \
- kafka-console-producer --broker-list kafka0:29092 -topic second.users < /data/message.json'"
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
\ No newline at end of file
diff --git a/documentation/compose/kafka-ui-connectors-auth.yaml b/documentation/compose/kafka-ui-connectors-auth.yaml
index a7367911206..d1f31f79696 100644
--- a/documentation/compose/kafka-ui-connectors-auth.yaml
+++ b/documentation/compose/kafka-ui-connectors-auth.yaml
@@ -1,68 +1,62 @@
---
-version: '2'
+version: "2"
services:
-
kafka-ui:
container_name: kafka-ui
image: provectuslabs/kafka-ui:latest
ports:
- 8080:8080
depends_on:
- - zookeeper0
- - zookeeper1
- kafka0
- - kafka1
- schemaregistry0
- kafka-connect0
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
- KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper0:2181
- KAFKA_CLUSTERS_0_JMXPORT: 9997
+ KAFKA_CLUSTERS_0_METRICS_PORT: 9997
KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry0:8085
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
KAFKA_CLUSTERS_0_KAFKACONNECT_0_USERNAME: admin
KAFKA_CLUSTERS_0_KAFKACONNECT_0_PASSWORD: admin-secret
- KAFKA_CLUSTERS_0_KSQLDBSERVER: http://ksqldb:8088
-
- zookeeper0:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2181:2181
kafka0:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper0
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka0
+ container_name: kafka0
ports:
- - 9092:9092
- - 9997:9997
+ - "9092:9092"
+ - "9997:9997"
environment:
KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT"
+ KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
- JMX_PORT: 9997
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
+ KAFKA_PROCESS_ROLES: "broker,controller"
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: "1@kafka0:29093"
+ KAFKA_LISTENERS: "PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092"
+ KAFKA_INTER_BROKER_LISTENER_NAME: "PLAINTEXT"
+ KAFKA_CONTROLLER_LISTENER_NAMES: "CONTROLLER"
+ KAFKA_LOG_DIRS: "/tmp/kraft-combined-logs"
+ volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: 'bash -c ''if [ ! -f /tmp/update_run.sh ]; then echo "ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'''
schemaregistry0:
- image: confluentinc/cp-schema-registry:5.5.0
+ image: confluentinc/cp-schema-registry:7.2.1
ports:
- 8085:8085
depends_on:
- - zookeeper0
- kafka0
environment:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
- SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper0:2181
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
@@ -71,12 +65,11 @@ services:
SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
-
kafka-connect0:
build:
context: ./kafka-connect
args:
- image: confluentinc/cp-kafka-connect:6.0.1
+ image: confluentinc/cp-kafka-connect:7.2.1
ports:
- 8083:8083
depends_on:
@@ -100,51 +93,22 @@ services:
CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
+ CONNECT_REST_PORT: 8083
CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
CONNECT_REST_EXTENSION_CLASSES: "org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension"
KAFKA_OPTS: "-Djava.security.auth.login.config=/conf/kafka_connect.jaas"
-# AWS_ACCESS_KEY_ID: ""
-# AWS_SECRET_ACCESS_KEY: ""
+ # AWS_ACCESS_KEY_ID: ""
+ # AWS_SECRET_ACCESS_KEY: ""
kafka-init-topics:
- image: confluentinc/cp-kafka:5.3.1
+ image: confluentinc/cp-kafka:7.2.1
volumes:
- - ./message.json:/data/message.json
- depends_on:
- - kafka1
- command: "bash -c 'echo Waiting for Kafka to be ready... && \
- cub kafka-ready -b kafka1:29092 1 30 && \
- kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
- kafka-topics --create --topic second.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
- kafka-topics --create --topic first.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181 && \
- kafka-console-producer --broker-list kafka1:29092 -topic second.users < /data/message.json'"
-
- create-connectors:
- image: ellerbrock/alpine-bash-curl-ssl
- depends_on:
- - postgres-db
- - kafka-connect0
- volumes:
- - ./connectors:/connectors
- command: bash -c '/connectors/start.sh'
-
- ksqldb:
- image: confluentinc/ksqldb-server:0.18.0
+ - ./data/message.json:/data/message.json
depends_on:
- kafka0
- - kafka-connect0
- - schemaregistry0
- ports:
- - 8088:8088
- environment:
- KSQL_CUB_KAFKA_TIMEOUT: 120
- KSQL_LISTENERS: http://0.0.0.0:8088
- KSQL_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
- KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: "true"
- KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: "true"
- KSQL_KSQL_CONNECT_URL: http://kafka-connect0:8083
- KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- KSQL_KSQL_SERVICE_ID: my_ksql_1
- KSQL_KSQL_HIDDEN_TOPICS: '^_.*'
- KSQL_CACHE_MAX_BYTES_BUFFERING: 0
+ command: "bash -c 'echo Waiting for Kafka to be ready... && \
+ cub kafka-ready -b kafka0:29092 1 30 && \
+ kafka-topics --create --topic users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
+ kafka-topics --create --topic messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
+ kafka-console-producer --bootstrap-server kafka0:29092 --topic users < /data/message.json'"
diff --git a/documentation/compose/kafka-ui-connectors.yaml b/documentation/compose/kafka-ui-connectors.yaml
deleted file mode 100644
index 4b3d4e42755..00000000000
--- a/documentation/compose/kafka-ui-connectors.yaml
+++ /dev/null
@@ -1,201 +0,0 @@
----
-version: '2'
-services:
-
- kafka-ui:
- container_name: kafka-ui
- image: provectuslabs/kafka-ui:latest
- ports:
- - 8080:8080
- depends_on:
- - zookeeper0
- - zookeeper1
- - kafka0
- - kafka1
- - schemaregistry0
- - kafka-connect0
- environment:
- KAFKA_CLUSTERS_0_NAME: local
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
- KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper0:2181
- KAFKA_CLUSTERS_0_JMXPORT: 9997
- KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry0:8085
- KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
- KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
- KAFKA_CLUSTERS_0_KSQLDBSERVER: http://ksqldb:8088
- KAFKA_CLUSTERS_1_NAME: secondLocal
- KAFKA_CLUSTERS_1_BOOTSTRAPSERVERS: kafka1:29092
- KAFKA_CLUSTERS_1_ZOOKEEPER: zookeeper1:2181
- KAFKA_CLUSTERS_1_JMXPORT: 9998
- KAFKA_CLUSTERS_1_SCHEMAREGISTRY: http://schemaregistry1:8085
-
- zookeeper0:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2181:2181
-
- kafka0:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper0
- ports:
- - 9092:9092
- - 9997:9997
- environment:
- KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
- JMX_PORT: 9997
- KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
-
- zookeeper1:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
-
- kafka1:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper1
- ports:
- - 9093:9093
- - 9998:9998
- environment:
- KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper1:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:29092,PLAINTEXT_HOST://localhost:9093
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
- JMX_PORT: 9998
- KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka1 -Dcom.sun.management.jmxremote.rmi.port=9998
-
- schemaregistry0:
- image: confluentinc/cp-schema-registry:5.5.0
- ports:
- - 8085:8085
- depends_on:
- - zookeeper0
- - kafka0
- environment:
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
- SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper0:2181
- SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
- SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
- SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
-
- SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
- SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
- SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
-
- schemaregistry1:
- image: confluentinc/cp-schema-registry:5.5.0
- ports:
- - 18085:8085
- depends_on:
- - zookeeper1
- - kafka1
- environment:
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:29092
- SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper1:2181
- SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
- SCHEMA_REGISTRY_HOST_NAME: schemaregistry1
- SCHEMA_REGISTRY_LISTENERS: http://schemaregistry1:8085
-
- SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
- SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
- SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
-
- kafka-connect0:
- build:
- context: ./kafka-connect
- args:
- image: confluentinc/cp-kafka-connect:6.0.1
- ports:
- - 8083:8083
- depends_on:
- - kafka0
- - schemaregistry0
- environment:
- CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
- CONNECT_GROUP_ID: compose-connect-group
- CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
- CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
- CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_STATUS_STORAGE_TOPIC: _connect_status
- CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
- CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
- CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
- CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
- CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
- CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
-# AWS_ACCESS_KEY_ID: ""
-# AWS_SECRET_ACCESS_KEY: ""
-
- kafka-init-topics:
- image: confluentinc/cp-kafka:5.3.1
- volumes:
- - ./message.json:/data/message.json
- depends_on:
- - kafka1
- command: "bash -c 'echo Waiting for Kafka to be ready... && \
- cub kafka-ready -b kafka1:29092 1 30 && \
- kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
- kafka-topics --create --topic second.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
- kafka-topics --create --topic first.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181 && \
- kafka-console-producer --broker-list kafka1:29092 -topic second.users < /data/message.json'"
-
- postgres-db:
- build:
- context: ./postgres
- args:
- image: postgres:9.6.22
- ports:
- - 5432:5432
- environment:
- POSTGRES_USER: 'dev_user'
- POSTGRES_PASSWORD: '12345'
-
- create-connectors:
- image: ellerbrock/alpine-bash-curl-ssl
- depends_on:
- - postgres-db
- - kafka-connect0
- volumes:
- - ./connectors:/connectors
- command: bash -c '/connectors/start.sh'
-
- ksqldb:
- image: confluentinc/ksqldb-server:0.18.0
- depends_on:
- - kafka0
- - kafka-connect0
- - schemaregistry0
- ports:
- - 8088:8088
- environment:
- KSQL_CUB_KAFKA_TIMEOUT: 120
- KSQL_LISTENERS: http://0.0.0.0:8088
- KSQL_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
- KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: "true"
- KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: "true"
- KSQL_KSQL_CONNECT_URL: http://kafka-connect0:8083
- KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- KSQL_KSQL_SERVICE_ID: my_ksql_1
- KSQL_KSQL_HIDDEN_TOPICS: '^_.*'
- KSQL_CACHE_MAX_BYTES_BUFFERING: 0
\ No newline at end of file
diff --git a/documentation/compose/kafka-ui-jmx-secured.yml b/documentation/compose/kafka-ui-jmx-secured.yml
index 133a19986d5..408f388ba54 100644
--- a/documentation/compose/kafka-ui-jmx-secured.yml
+++ b/documentation/compose/kafka-ui-jmx-secured.yml
@@ -7,56 +7,48 @@ services:
image: provectuslabs/kafka-ui:latest
ports:
- 8080:8080
- - 5005:5005
depends_on:
- - zookeeper0
- kafka0
- - schemaregistry0
- - kafka-connect0
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
- KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper0:2181
KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry0:8085
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
- KAFKA_CLUSTERS_0_JMXPORT: 9997
- KAFKA_CLUSTERS_0_JMXSSL: 'true'
- KAFKA_CLUSTERS_0_JMXUSERNAME: root
- KAFKA_CLUSTERS_0_JMXPASSWORD: password
- JAVA_OPTS: >-
- -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005
- -Djavax.net.ssl.trustStore=/jmx/clienttruststore
- -Djavax.net.ssl.trustStorePassword=12345678
- -Djavax.net.ssl.keyStore=/jmx/clientkeystore
- -Djavax.net.ssl.keyStorePassword=12345678
+ KAFKA_CLUSTERS_0_METRICS_PORT: 9997
+ KAFKA_CLUSTERS_0_METRICS_USERNAME: root
+ KAFKA_CLUSTERS_0_METRICS_PASSWORD: password
+ KAFKA_CLUSTERS_0_METRICS_KEYSTORE_LOCATION: /jmx/clientkeystore
+ KAFKA_CLUSTERS_0_METRICS_KEYSTORE_PASSWORD: '12345678'
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORE_LOCATION: /jmx/clienttruststore
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORE_PASSWORD: '12345678'
volumes:
- ./jmx/clienttruststore:/jmx/clienttruststore
- ./jmx/clientkeystore:/jmx/clientkeystore
- zookeeper0:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2181:2181
-
kafka0:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper0
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka0
+ container_name: kafka0
ports:
- 9092:9092
- 9997:9997
environment:
KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
- JMX_PORT: 9997
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
# CHMOD 700 FOR JMXREMOTE.* FILES
KAFKA_JMX_OPTS: >-
-Dcom.sun.management.jmxremote
@@ -72,65 +64,10 @@ services:
-Dcom.sun.management.jmxremote.access.file=/jmx/jmxremote.access
-Dcom.sun.management.jmxremote.rmi.port=9997
-Djava.rmi.server.hostname=kafka0
- -Djava.rmi.server.logCalls=true
-# -Djavax.net.debug=ssl:handshake
volumes:
- - ./jmx/serverkeystore:/jmx/serverkeystore
- - ./jmx/servertruststore:/jmx/servertruststore
- - ./jmx/jmxremote.password:/jmx/jmxremote.password
- - ./jmx/jmxremote.access:/jmx/jmxremote.access
-
- schemaregistry0:
- image: confluentinc/cp-schema-registry:5.5.0
- ports:
- - 8085:8085
- depends_on:
- - zookeeper0
- - kafka0
- environment:
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
- SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper0:2181
- SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
- SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
- SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
-
- SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
- SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
- SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
-
- kafka-connect0:
- image: confluentinc/cp-kafka-connect:6.0.1
- ports:
- - 8083:8083
- depends_on:
- - kafka0
- - schemaregistry0
- environment:
- CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
- CONNECT_GROUP_ID: compose-connect-group
- CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
- CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
- CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_STATUS_STORAGE_TOPIC: _connect_status
- CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
- CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
- CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
- CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
- CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
- CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
-
- kafka-init-topics:
- image: confluentinc/cp-kafka:5.3.1
- volumes:
- - ./message.json:/data/message.json
- depends_on:
- - kafka0
- command: "bash -c 'echo Waiting for Kafka to be ready... && \
- cub kafka-ready -b kafka0:29092 1 30 && \
- kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181 && \
- kafka-topics --create --topic first.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181 && \
- kafka-console-producer --broker-list kafka0:29092 -topic second.users < /data/message.json'"
+ - ./jmx/serverkeystore:/jmx/serverkeystore
+ - ./jmx/servertruststore:/jmx/servertruststore
+ - ./jmx/jmxremote.password:/jmx/jmxremote.password
+ - ./jmx/jmxremote.access:/jmx/jmxremote.access
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
diff --git a/documentation/compose/kafka-ui-reverse-proxy.yaml b/documentation/compose/kafka-ui-reverse-proxy.yaml
deleted file mode 100644
index 69d94e627a6..00000000000
--- a/documentation/compose/kafka-ui-reverse-proxy.yaml
+++ /dev/null
@@ -1,19 +0,0 @@
----
-version: '2'
-services:
- nginx:
- image: nginx:latest
- volumes:
- - ./proxy.conf:/etc/nginx/conf.d/default.conf
- ports:
- - 8080:80
-
- kafka-ui:
- container_name: kafka-ui
- image: provectuslabs/kafka-ui:latest
- ports:
- - 8082:8080
- environment:
- KAFKA_CLUSTERS_0_NAME: local
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:9092
- SERVER_SERVLET_CONTEXT_PATH: /kafka-ui
diff --git a/documentation/compose/kafka-ui-sasl.yaml b/documentation/compose/kafka-ui-sasl.yaml
index 1c0312f11a2..e4a2b3cc4a7 100644
--- a/documentation/compose/kafka-ui-sasl.yaml
+++ b/documentation/compose/kafka-ui-sasl.yaml
@@ -8,45 +8,45 @@ services:
ports:
- 8080:8080
depends_on:
- - zookeeper
- kafka
environment:
KAFKA_CLUSTERS_0_NAME: local
-# SERVER_SERVLET_CONTEXT_PATH: "/kafkaui"
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:9092
- KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper:2181
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SASL_PLAINTEXT
KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM: PLAIN
KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-secret";'
- zookeeper:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2181:2181
+ DYNAMIC_CONFIG_ENABLED: true # not necessary for sasl auth, added for tests
kafka:
- image: wurstmeister/kafka:latest
+ image: confluentinc/cp-kafka:7.2.1
hostname: kafka
container_name: kafka
- depends_on:
- - zookeeper
ports:
- - '9092:9092'
+ - "9092:9092"
+ - "9997:9997"
environment:
- KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
- KAFKA_LISTENERS: SASL_PLAINTEXT://kafka:9092
- KAFKA_ADVERTISED_LISTENERS: SASL_PLAINTEXT://kafka:9092
- KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
- ALLOW_PLAINTEXT_LISTENER: 'yes'
+ KAFKA_BROKER_ID: 1
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092'
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/jaas/kafka_server.conf"
- KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.auth.SimpleAclAuthorizer
- KAFKA_INTER_BROKER_LISTENER_NAME: SASL_PLAINTEXT
- KAFKA_SASL_ENABLED_MECHANISMS: PLAIN
- KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: PLAIN
- KAFKA_SECURITY_PROTOCOL: SASL_PLAINTEXT
- KAFKA_SUPER_USERS: User:admin,User:enzo
- KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true'
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:29093'
+ KAFKA_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,CONTROLLER://kafka:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'SASL_PLAINTEXT'
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
+ KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT'
+ KAFKA_SUPER_USERS: 'User:admin,User:enzo'
volumes:
- - ./jaas:/etc/kafka/jaas
\ No newline at end of file
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ - ./jaas:/etc/kafka/jaas
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
diff --git a/documentation/compose/kafka-ui-serdes.yaml b/documentation/compose/kafka-ui-serdes.yaml
new file mode 100644
index 00000000000..eee510a13d6
--- /dev/null
+++ b/documentation/compose/kafka-ui-serdes.yaml
@@ -0,0 +1,113 @@
+---
+version: '2'
+services:
+
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8080:8080
+ depends_on:
+ - kafka0
+ - schemaregistry0
+ environment:
+ kafka.clusters.0.name: SerdeExampleCluster
+ kafka.clusters.0.bootstrapServers: kafka0:29092
+ kafka.clusters.0.schemaRegistry: http://schemaregistry0:8085
+
+ # optional SSL settings for cluster (will be used by SchemaRegistry serde, if set)
+ #kafka.clusters.0.ssl.keystoreLocation: /kafka.keystore.jks
+ #kafka.clusters.0.ssl.keystorePassword: "secret"
+ #kafka.clusters.0.ssl.truststoreLocation: /kafka.truststore.jks
+ #kafka.clusters.0.ssl.truststorePassword: "secret"
+
+ # optional auth properties for SR
+ #kafka.clusters.0.schemaRegistryAuth.username: "use"
+ #kafka.clusters.0.schemaRegistryAuth.password: "pswrd"
+
+ kafka.clusters.0.defaultKeySerde: Int32 #optional
+ kafka.clusters.0.defaultValueSerde: String #optional
+
+ kafka.clusters.0.serde.0.name: ProtobufFile
+ kafka.clusters.0.serde.0.topicKeysPattern: "topic1"
+ kafka.clusters.0.serde.0.topicValuesPattern: "topic1"
+ kafka.clusters.0.serde.0.properties.protobufFilesDir: /protofiles/
+ kafka.clusters.0.serde.0.properties.protobufMessageNameForKey: test.MyKey # default type for keys
+ kafka.clusters.0.serde.0.properties.protobufMessageName: test.MyValue # default type for values
+ kafka.clusters.0.serde.0.properties.protobufMessageNameForKeyByTopic.topic1: test.MySpecificTopicKey # keys type for topic "topic1"
+ kafka.clusters.0.serde.0.properties.protobufMessageNameByTopic.topic1: test.MySpecificTopicValue # values type for topic "topic1"
+
+ kafka.clusters.0.serde.1.name: String
+ #kafka.clusters.0.serde.1.properties.encoding: "UTF-16" #optional, default is UTF-8
+ kafka.clusters.0.serde.1.topicValuesPattern: "json-events|text-events"
+
+ kafka.clusters.0.serde.2.name: AsciiString
+ kafka.clusters.0.serde.2.className: com.provectus.kafka.ui.serdes.builtin.StringSerde
+ kafka.clusters.0.serde.2.properties.encoding: "ASCII"
+
+ kafka.clusters.0.serde.3.name: SchemaRegistry # will be configured automatically using cluster SR
+ kafka.clusters.0.serde.3.topicValuesPattern: "sr-topic.*"
+
+ kafka.clusters.0.serde.4.name: AnotherSchemaRegistry
+ kafka.clusters.0.serde.4.className: com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde
+ kafka.clusters.0.serde.4.properties.url: http://schemaregistry0:8085
+ kafka.clusters.0.serde.4.properties.keySchemaNameTemplate: "%s-key"
+ kafka.clusters.0.serde.4.properties.schemaNameTemplate: "%s-value"
+ #kafka.clusters.0.serde.4.topicValuesPattern: "sr2-topic.*"
+ # optional auth and ssl properties for SR (overrides cluster-level):
+ #kafka.clusters.0.serde.4.properties.username: "user"
+ #kafka.clusters.0.serde.4.properties.password: "passw"
+ #kafka.clusters.0.serde.4.properties.keystoreLocation: /kafka.keystore.jks
+ #kafka.clusters.0.serde.4.properties.keystorePassword: "secret"
+ #kafka.clusters.0.serde.4.properties.truststoreLocation: /kafka.truststore.jks
+ #kafka.clusters.0.serde.4.properties.truststorePassword: "secret"
+
+ kafka.clusters.0.serde.5.name: UInt64
+ kafka.clusters.0.serde.5.topicKeysPattern: "topic-with-uint64keys"
+ volumes:
+ - ./proto:/protofiles
+
+ kafka0:
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka0
+ container_name: kafka0
+ ports:
+ - "9092:9092"
+ - "9997:9997"
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092'
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
+ KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
+
+ schemaregistry0:
+ image: confluentinc/cp-schema-registry:7.2.1
+ ports:
+ - 8085:8085
+ depends_on:
+ - kafka0
+ environment:
+ SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
+ SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
+ SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
+ SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
+
+ SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
+ SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
+ SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
diff --git a/documentation/compose/kafka-ui-with-jmx-exporter.yaml b/documentation/compose/kafka-ui-with-jmx-exporter.yaml
new file mode 100644
index 00000000000..b0d940694b1
--- /dev/null
+++ b/documentation/compose/kafka-ui-with-jmx-exporter.yaml
@@ -0,0 +1,44 @@
+---
+version: '2'
+services:
+
+ kafka0:
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka0
+ container_name: kafka0
+ ports:
+ - "9092:9092"
+ - "11001:11001"
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092'
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ KAFKA_OPTS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent.jar=11001:/usr/share/jmx_exporter/kafka-broker.yml
+ volumes:
+ - ./jmx-exporter:/usr/share/jmx_exporter/
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /usr/share/jmx_exporter/kafka-prepare-and-run ; fi'"
+
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8080:8080
+ depends_on:
+ - kafka0
+ environment:
+ KAFKA_CLUSTERS_0_NAME: local
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
+ KAFKA_CLUSTERS_0_METRICS_PORT: 11001
+ KAFKA_CLUSTERS_0_METRICS_TYPE: PROMETHEUS
diff --git a/documentation/compose/kafka-ui.yaml b/documentation/compose/kafka-ui.yaml
index 8afe6b6d2f1..14a269ca7cb 100644
--- a/documentation/compose/kafka-ui.yaml
+++ b/documentation/compose/kafka-ui.yaml
@@ -8,86 +8,88 @@ services:
ports:
- 8080:8080
depends_on:
- - zookeeper0
- - zookeeper1
- kafka0
- kafka1
- schemaregistry0
+ - schemaregistry1
- kafka-connect0
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
- KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper0:2181
- KAFKA_CLUSTERS_0_JMXPORT: 9997
+ KAFKA_CLUSTERS_0_METRICS_PORT: 9997
KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry0:8085
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
KAFKA_CLUSTERS_1_NAME: secondLocal
KAFKA_CLUSTERS_1_BOOTSTRAPSERVERS: kafka1:29092
- KAFKA_CLUSTERS_1_ZOOKEEPER: zookeeper1:2181
- KAFKA_CLUSTERS_1_JMXPORT: 9998
+ KAFKA_CLUSTERS_1_METRICS_PORT: 9998
KAFKA_CLUSTERS_1_SCHEMAREGISTRY: http://schemaregistry1:8085
- KAFKA_CLUSTERS_1_KAFKACONNECT_0_NAME: first
- KAFKA_CLUSTERS_1_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
-
- zookeeper0:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
- ports:
- - 2181:2181
+ DYNAMIC_CONFIG_ENABLED: 'true'
kafka0:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper0
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka0
+ container_name: kafka0
ports:
- - 9092:9092
- - 9997:9997
+ - "9092:9092"
+ - "9997:9997"
environment:
KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
- JMX_PORT: 9997
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
-
- zookeeper1:
- image: confluentinc/cp-zookeeper:5.2.4
- environment:
- ZOOKEEPER_CLIENT_PORT: 2181
- ZOOKEEPER_TICK_TIME: 2000
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
kafka1:
- image: confluentinc/cp-kafka:5.3.1
- depends_on:
- - zookeeper1
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka1
+ container_name: kafka1
ports:
- - 9093:9093
- - 9998:9998
+ - "9093:9092"
+ - "9998:9998"
environment:
KAFKA_BROKER_ID: 1
- KAFKA_ZOOKEEPER_CONNECT: zookeeper1:2181
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:29092,PLAINTEXT_HOST://localhost:9093
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
- KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka1:29092,PLAINTEXT_HOST://localhost:9092'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
- JMX_PORT: 9998
- KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka1 -Dcom.sun.management.jmxremote.rmi.port=9998
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9998
+ KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9998
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka1:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka1:29092,CONTROLLER://kafka1:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
schemaregistry0:
- image: confluentinc/cp-schema-registry:5.5.0
+ image: confluentinc/cp-schema-registry:7.2.1
ports:
- 8085:8085
depends_on:
- - zookeeper0
- kafka0
environment:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
- SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper0:2181
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
@@ -97,15 +99,13 @@ services:
SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
schemaregistry1:
- image: confluentinc/cp-schema-registry:5.5.0
+ image: confluentinc/cp-schema-registry:7.2.1
ports:
- 18085:8085
depends_on:
- - zookeeper1
- kafka1
environment:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:29092
- SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper1:2181
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
SCHEMA_REGISTRY_HOST_NAME: schemaregistry1
SCHEMA_REGISTRY_LISTENERS: http://schemaregistry1:8085
@@ -115,7 +115,7 @@ services:
SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
kafka-connect0:
- image: confluentinc/cp-kafka-connect:6.0.1
+ image: confluentinc/cp-kafka-connect:7.2.1
ports:
- 8083:8083
depends_on:
@@ -140,14 +140,14 @@ services:
CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
kafka-init-topics:
- image: confluentinc/cp-kafka:5.3.1
+ image: confluentinc/cp-kafka:7.2.1
volumes:
- - ./message.json:/data/message.json
+ - ./data/message.json:/data/message.json
depends_on:
- kafka1
command: "bash -c 'echo Waiting for Kafka to be ready... && \
cub kafka-ready -b kafka1:29092 1 30 && \
- kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
- kafka-topics --create --topic second.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
- kafka-topics --create --topic first.messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181 && \
- kafka-console-producer --broker-list kafka1:29092 -topic second.users < /data/message.json'"
+ kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka1:29092 && \
+ kafka-topics --create --topic second.messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka1:29092 && \
+ kafka-topics --create --topic first.messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
+ kafka-console-producer --bootstrap-server kafka1:29092 -topic second.users < /data/message.json'"
diff --git a/documentation/compose/kafka-with-zookeeper.yaml b/documentation/compose/kafka-with-zookeeper.yaml
new file mode 100644
index 00000000000..7342a976314
--- /dev/null
+++ b/documentation/compose/kafka-with-zookeeper.yaml
@@ -0,0 +1,48 @@
+---
+version: '2'
+services:
+
+ zookeeper:
+ image: confluentinc/cp-zookeeper:7.2.1
+ hostname: zookeeper
+ container_name: zookeeper
+ ports:
+ - "2181:2181"
+ environment:
+ ZOOKEEPER_CLIENT_PORT: 2181
+ ZOOKEEPER_TICK_TIME: 2000
+
+ kafka:
+ image: confluentinc/cp-server:7.2.1
+ hostname: kafka
+ container_name: kafka
+ depends_on:
+ - zookeeper
+ ports:
+ - "9092:9092"
+ - "9997:9997"
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: kafka
+
+ kafka-init-topics:
+ image: confluentinc/cp-kafka:7.2.1
+ volumes:
+ - ./data/message.json:/data/message.json
+ depends_on:
+ - kafka
+ command: "bash -c 'echo Waiting for Kafka to be ready... && \
+ cub kafka-ready -b kafka:29092 1 30 && \
+ kafka-topics --create --topic users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka:29092 && \
+ kafka-topics --create --topic messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka:29092 && \
+ kafka-console-producer --bootstrap-server kafka:29092 --topic users < /data/message.json'"
diff --git a/documentation/compose/ldap.yaml b/documentation/compose/ldap.yaml
new file mode 100644
index 00000000000..e4ff68f3ba0
--- /dev/null
+++ b/documentation/compose/ldap.yaml
@@ -0,0 +1,79 @@
+---
+version: '2'
+services:
+
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8080:8080
+ depends_on:
+ - kafka0
+ - schemaregistry0
+ environment:
+ KAFKA_CLUSTERS_0_NAME: local
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
+ KAFKA_CLUSTERS_0_METRICS_PORT: 9997
+ KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry0:8085
+
+ AUTH_TYPE: "LDAP"
+ SPRING_LDAP_URLS: "ldap://ldap:10389"
+ SPRING_LDAP_BASE: "cn={0},ou=people,dc=planetexpress,dc=com"
+ SPRING_LDAP_ADMIN_USER: "cn=admin,dc=planetexpress,dc=com"
+ SPRING_LDAP_ADMIN_PASSWORD: "GoodNewsEveryone"
+ SPRING_LDAP_USER_FILTER_SEARCH_BASE: "dc=planetexpress,dc=com"
+ SPRING_LDAP_USER_FILTER_SEARCH_FILTER: "(&(uid={0})(objectClass=inetOrgPerson))"
+ SPRING_LDAP_GROUP_FILTER_SEARCH_BASE: "ou=people,dc=planetexpress,dc=com"
+# OAUTH2.LDAP.ACTIVEDIRECTORY: true
+# OAUTH2.LDAP.AСTIVEDIRECTORY.DOMAIN: "memelord.lol"
+
+ ldap:
+ image: rroemhild/test-openldap:latest
+ hostname: "ldap"
+ ports:
+ - 10389:10389
+
+ kafka0:
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka0
+ container_name: kafka0
+ ports:
+ - "9092:9092"
+ - "9997:9997"
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092'
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
+ KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka0:29093'
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ volumes:
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
+
+ schemaregistry0:
+ image: confluentinc/cp-schema-registry:7.2.1
+ ports:
+ - 8085:8085
+ depends_on:
+ - kafka0
+ environment:
+ SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
+ SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
+ SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
+ SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
+
+ SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
+ SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
+ SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
diff --git a/documentation/compose/nginx-proxy.yaml b/documentation/compose/nginx-proxy.yaml
new file mode 100644
index 00000000000..9a255a5eb9e
--- /dev/null
+++ b/documentation/compose/nginx-proxy.yaml
@@ -0,0 +1,19 @@
+---
+version: '2'
+services:
+ nginx:
+ image: nginx:latest
+ volumes:
+ - ./data/proxy.conf:/etc/nginx/conf.d/default.conf
+ ports:
+ - 8080:80
+
+ kafka-ui:
+ container_name: kafka-ui
+ image: provectuslabs/kafka-ui:latest
+ ports:
+ - 8082:8080
+ environment:
+ KAFKA_CLUSTERS_0_NAME: local
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:9092
+ SERVER_SERVLET_CONTEXT_PATH: /kafka-ui
diff --git a/documentation/compose/proto/key-types.proto b/documentation/compose/proto/key-types.proto
new file mode 100644
index 00000000000..1f5e22a427d
--- /dev/null
+++ b/documentation/compose/proto/key-types.proto
@@ -0,0 +1,15 @@
+syntax = "proto3";
+package test;
+
+import "google/protobuf/wrappers.proto";
+
+message MyKey {
+ string myKeyF1 = 1;
+ google.protobuf.UInt64Value uint_64_wrapper = 2;
+}
+
+message MySpecificTopicKey {
+ string special_field1 = 1;
+ string special_field2 = 2;
+ google.protobuf.FloatValue float_wrapper = 3;
+}
diff --git a/documentation/compose/proto/values.proto b/documentation/compose/proto/values.proto
new file mode 100644
index 00000000000..fff8d9bbd96
--- /dev/null
+++ b/documentation/compose/proto/values.proto
@@ -0,0 +1,14 @@
+syntax = "proto3";
+package test;
+
+message MySpecificTopicValue {
+ string f1 = 1;
+ string f2 = 2;
+}
+
+message MyValue {
+ int32 version = 1;
+ string payload = 2;
+ map intToStringMap = 3;
+ map strToObjMap = 4;
+}
diff --git a/documentation/compose/scripts/clusterID b/documentation/compose/scripts/clusterID
new file mode 100644
index 00000000000..4417a5a68d7
--- /dev/null
+++ b/documentation/compose/scripts/clusterID
@@ -0,0 +1 @@
+zlFiTJelTOuhnklFwLWixw
\ No newline at end of file
diff --git a/documentation/compose/scripts/create_cluster_id.sh b/documentation/compose/scripts/create_cluster_id.sh
new file mode 100644
index 00000000000..d946fbc4af3
--- /dev/null
+++ b/documentation/compose/scripts/create_cluster_id.sh
@@ -0,0 +1 @@
+kafka-storage random-uuid > /workspace/kafka-ui/documentation/compose/clusterID
\ No newline at end of file
diff --git a/documentation/compose/scripts/update_run.sh b/documentation/compose/scripts/update_run.sh
new file mode 100755
index 00000000000..023c832b4e1
--- /dev/null
+++ b/documentation/compose/scripts/update_run.sh
@@ -0,0 +1,11 @@
+# This script is required to run kafka cluster (without zookeeper)
+#!/bin/sh
+
+# Docker workaround: Remove check for KAFKA_ZOOKEEPER_CONNECT parameter
+sed -i '/KAFKA_ZOOKEEPER_CONNECT/d' /etc/confluent/docker/configure
+
+# Docker workaround: Ignore cub zk-ready
+sed -i 's/cub zk-ready/echo ignore zk-ready/' /etc/confluent/docker/ensure
+
+# KRaft required step: Format the storage directory with a new cluster ID
+echo "kafka-storage format --ignore-formatted -t $(kafka-storage random-uuid) -c /etc/kafka/kafka.properties" >> /etc/confluent/docker/ensure
\ No newline at end of file
diff --git a/documentation/compose/scripts/update_run_cluster.sh b/documentation/compose/scripts/update_run_cluster.sh
new file mode 100644
index 00000000000..31da333aae6
--- /dev/null
+++ b/documentation/compose/scripts/update_run_cluster.sh
@@ -0,0 +1,11 @@
+# This script is required to run kafka cluster (without zookeeper)
+#!/bin/sh
+
+# Docker workaround: Remove check for KAFKA_ZOOKEEPER_CONNECT parameter
+sed -i '/KAFKA_ZOOKEEPER_CONNECT/d' /etc/confluent/docker/configure
+
+# Docker workaround: Ignore cub zk-ready
+sed -i 's/cub zk-ready/echo ignore zk-ready/' /etc/confluent/docker/ensure
+
+# KRaft required step: Format the storage directory with a new cluster ID
+echo "kafka-storage format --ignore-formatted -t $(cat /tmp/clusterID) -c /etc/kafka/kafka.properties" >> /etc/confluent/docker/ensure
\ No newline at end of file
diff --git a/documentation/compose/ssl/generate_certs.sh b/documentation/compose/ssl/generate_certs.sh
old mode 100644
new mode 100755
index ebb916657bd..455321ef580
--- a/documentation/compose/ssl/generate_certs.sh
+++ b/documentation/compose/ssl/generate_certs.sh
@@ -144,7 +144,8 @@ echo "Now the trust store's private key (CA) will sign the keystore's certificat
echo
openssl x509 -req -CA $CA_CERT_FILE -CAkey $trust_store_private_key_file \
-in $KEYSTORE_SIGN_REQUEST -out $KEYSTORE_SIGNED_CERT \
- -days $VALIDITY_IN_DAYS -CAcreateserial
+ -days $VALIDITY_IN_DAYS -CAcreateserial \
+ -extensions kafka -extfile san.cnf
# creates $KEYSTORE_SIGN_REQUEST_SRL which is never used or needed.
echo
diff --git a/documentation/compose/ssl/kafka.keystore.jks b/documentation/compose/ssl/kafka.keystore.jks
index 54b3f3d1bc4..eab29e914a8 100644
Binary files a/documentation/compose/ssl/kafka.keystore.jks and b/documentation/compose/ssl/kafka.keystore.jks differ
diff --git a/documentation/compose/ssl/kafka.truststore.jks b/documentation/compose/ssl/kafka.truststore.jks
index eff350ce4e9..875caf6f156 100644
Binary files a/documentation/compose/ssl/kafka.truststore.jks and b/documentation/compose/ssl/kafka.truststore.jks differ
diff --git a/documentation/compose/ssl/san.cnf b/documentation/compose/ssl/san.cnf
new file mode 100644
index 00000000000..5c69c8eca61
--- /dev/null
+++ b/documentation/compose/ssl/san.cnf
@@ -0,0 +1,2 @@
+[kafka]
+subjectAltName = DNS:kafka0,DNS:schemaregistry0,DNS:kafka-connect0,DNS:ksqldb0
diff --git a/documentation/compose/kafka-ui-traefik-proxy.yaml b/documentation/compose/traefik-proxy.yaml
similarity index 100%
rename from documentation/compose/kafka-ui-traefik-proxy.yaml
rename to documentation/compose/traefik-proxy.yaml
diff --git a/documentation/guides/AWS_IAM.md b/documentation/guides/AWS_IAM.md
deleted file mode 100644
index 80bfab205bc..00000000000
--- a/documentation/guides/AWS_IAM.md
+++ /dev/null
@@ -1,41 +0,0 @@
-# How to configure AWS IAM Authentication
-
-UI for Apache Kafka comes with built-in [aws-msk-iam-auth](https://github.com/aws/aws-msk-iam-auth) library.
-
-You could pass sasl configs in properties section for each cluster.
-
-More details could be found here: [aws-msk-iam-auth](https://github.com/aws/aws-msk-iam-auth)
-
-## Examples:
-
-Please replace
-* with broker list
-* with your aws profile
-
-
-### Running From Docker Image
-
-```sh
-docker run -p 8080:8080 \
- -e KAFKA_CLUSTERS_0_NAME=local \
- -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS= \
- -e KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL \
- -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=AWS_MSK_IAM \
- -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_CLIENT_CALLBACK_HANDLER_CLASS=software.amazon.msk.auth.iam.IAMClientCallbackHandler \
- -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=software.amazon.msk.auth.iam.IAMLoginModule required awsProfileName=""; \
- -d provectuslabs/kafka-ui:latest
-```
-
-### Configuring by application.yaml
-
-```yaml
-kafka:
- clusters:
- - name: local
- bootstrapServers:
- properties:
- security.protocol: SASL_SSL
- sasl.mechanism: AWS_MSK_IAM
- sasl.client.callback.handler.class: software.amazon.msk.auth.iam.IAMClientCallbackHandler
- sasl.jaas.config: software.amazon.msk.auth.iam.IAMLoginModule required awsProfileName="";
-```
\ No newline at end of file
diff --git a/documentation/guides/Protobuf.md b/documentation/guides/Protobuf.md
deleted file mode 100644
index d7c50ffb65e..00000000000
--- a/documentation/guides/Protobuf.md
+++ /dev/null
@@ -1,33 +0,0 @@
-# Kafkaui Protobuf Support
-
-Kafkaui supports deserializing protobuf messages in two ways:
-1. Using Confluent Schema Registry's [protobuf support](https://docs.confluent.io/platform/current/schema-registry/serdes-develop/serdes-protobuf.html).
-2. Supplying a protobuf file as well as a configuration that maps topic names to protobuf types.
-
-## Configuring Kafkaui with a Protobuf File
-
-To configure Kafkaui to deserialize protobuf messages using a supplied protobuf schema add the following to the config:
-```yaml
-kafka:
- clusters:
- - # Cluster configuration omitted.
- # protobufFile is the path to the protobuf schema.
- protobufFile: path/to/my.proto
- # protobufMessageName is the default protobuf type that is used to deserilize
- # the message's value if the topic is not found in protobufMessageNameByTopic.
- protobufMessageName: my.Type1
- # protobufMessageNameByTopic is a mapping of topic names to protobuf types.
- # This mapping is required and is used to deserialize the Kafka message's value.
- protobufMessageNameByTopic:
- topic1: my.Type1
- topic2: my.Type2
- # protobufMessageNameForKey is the default protobuf type that is used to deserilize
- # the message's key if the topic is not found in protobufMessageNameForKeyByTopic.
- protobufMessageNameForKey: my.Type1
- # protobufMessageNameForKeyByTopic is a mapping of topic names to protobuf types.
- # This mapping is optional and is used to deserialize the Kafka message's key.
- # If a protobuf type is not found for a topic's key, the key is deserialized as a string,
- # unless protobufMessageNameForKey is specified.
- protobufMessageNameForKeyByTopic:
- topic1: my.KeyType1
-```
\ No newline at end of file
diff --git a/documentation/guides/SASL_SCRAM.md b/documentation/guides/SASL_SCRAM.md
deleted file mode 100644
index be360cd0aec..00000000000
--- a/documentation/guides/SASL_SCRAM.md
+++ /dev/null
@@ -1,58 +0,0 @@
-# How to configure SASL SCRAM Authentication
-
-You could pass sasl configs in properties section for each cluster.
-
-## Examples:
-
-Please replace
-- with cluster name
-- with broker list
-- with username
-- with password
-
-### Running From Docker Image
-
-```sh
-docker run -p 8080:8080 \
- -e KAFKA_CLUSTERS_0_NAME= \
- -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS= \
- -e KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL \
- -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=SCRAM-SHA-512 \
- -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=org.apache.kafka.common.security.scram.ScramLoginModule required username="" password=""; \
- -d provectuslabs/kafka-ui:latest
-```
-
-### Running From Docker-compose file
-
-```yaml
-
-version: '3.4'
-services:
-
- kafka-ui:
- image: provectuslabs/kafka-ui
- container_name: kafka-ui
- ports:
- - "888:8080"
- restart: always
- environment:
- - KAFKA_CLUSTERS_0_NAME=
- - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=
- - KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL
- - KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=SCRAM-SHA-512
- - KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=org.apache.kafka.common.security.scram.ScramLoginModule required username="" password="";
- - KAFKA_CLUSTERS_0_PROPERTIES_PROTOCOL=SASL
-```
-
-### Configuring by application.yaml
-
-```yaml
-kafka:
- clusters:
- - name: local
- bootstrapServers:
- properties:
- security.protocol: SASL_SSL
- sasl.mechanism: SCRAM-SHA-512
- sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username="" password="";
-```
diff --git a/documentation/guides/SECURE_BROKER.md b/documentation/guides/SECURE_BROKER.md
deleted file mode 100644
index ab15bb63ab3..00000000000
--- a/documentation/guides/SECURE_BROKER.md
+++ /dev/null
@@ -1,7 +0,0 @@
-## Connecting to a Secure Broker
-
-The app supports TLS (SSL) and SASL connections for [encryption and authentication](http://kafka.apache.org/090/documentation.html#security).
-
-### Running From Docker-compose file
-
-See [this](/documentation/compose/kafka-ssl.yml) docker-compose file reference for ssl-enabled kafka
diff --git a/documentation/guides/SSO.md b/documentation/guides/SSO.md
deleted file mode 100644
index 1ddfab2c7fa..00000000000
--- a/documentation/guides/SSO.md
+++ /dev/null
@@ -1,72 +0,0 @@
-# How to configure SSO
-SSO require additionaly to configure TLS for application, in that example we will use self-signed certificate, in case of use legal certificates please skip step 1.
-## Step 1
-At this step we will generate self-signed PKCS12 keypair.
-``` bash
-mkdir cert
-keytool -genkeypair -alias ui-for-apache-kafka -keyalg RSA -keysize 2048 \
- -storetype PKCS12 -keystore cert/ui-for-apache-kafka.p12 -validity 3650
-```
-## Step 2
-Create new application in any SSO provider, we will continue with [Auth0](https://auth0.com).
-
-
-
-After that need to provide callback URLs, in our case we will use `https://127.0.0.1:8080/login/oauth2/code/auth0`
-
-
-
-This is a main parameters required for enabling SSO
-
-
-
-## Step 3
-To launch UI for Apache Kafka with enabled TLS and SSO run following:
-``` bash
-docker run -p 8080:8080 -v `pwd`/cert:/opt/cert -e AUTH_TYPE=LOGIN_FORM \
- -e SECURITY_BASIC_ENABLED=true \
- -e SERVER_SSL_KEY_STORE_TYPE=PKCS12 \
- -e SERVER_SSL_KEY_STORE=/opt/cert/ui-for-apache-kafka.p12 \
- -e SERVER_SSL_KEY_STORE_PASSWORD=123456 \
- -e SERVER_SSL_KEY_ALIAS=ui-for-apache-kafka \
- -e SERVER_SSL_ENABLED=true \
- -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTID=uhvaPKIHU4ZF8Ne4B6PGvF0hWW6OcUSB \
- -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTSECRET=YXfRjmodifiedTujnkVr7zuW9ECCAK4TcnCio-i \
- -e SPRING_SECURITY_OAUTH2_CLIENT_PROVIDER_AUTH0_ISSUER_URI=https://dev-a63ggcut.auth0.com/ \
- -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_SCOPE=openid \
- -e TRUST_STORE=/opt/cert/ui-for-apache-kafka.p12 \
- -e TRUST_STORE_PASSWORD=123456 \
-provectuslabs/kafka-ui:latest
-```
-In the case with trusted CA-signed SSL certificate and SSL termination somewhere outside of application we can pass only SSO related environment variables:
-``` bash
-docker run -p 8080:8080 -v `pwd`/cert:/opt/cert -e AUTH_TYPE=OAUTH2 \
- -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTID=uhvaPKIHU4ZF8Ne4B6PGvF0hWW6OcUSB \
- -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTSECRET=YXfRjmodifiedTujnkVr7zuW9ECCAK4TcnCio-i \
- -e SPRING_SECURITY_OAUTH2_CLIENT_PROVIDER_AUTH0_ISSUER_URI=https://dev-a63ggcut.auth0.com/ \
- -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_SCOPE=openid \
-provectuslabs/kafka-ui:latest
-```
-
-## Step 4 (Load Balancer HTTP) (optional)
-If you're using load balancer/proxy and use HTTP between the proxy and the app, you might want to set `server_forward-headers-strategy` to `native` as well (`SERVER_FORWARDHEADERSSTRATEGY=native`), for more info refer to [this issue](https://github.com/provectus/kafka-ui/issues/1017).
-
-## Step 5 (Azure) (optional)
-For Azure AD (Office365) OAUTH2 you'll want to add additional environment variables:
-
-```bash
-docker run -p 8080:8080 \
- -e KAFKA_CLUSTERS_0_NAME="${cluster_name}"\
- -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS="${kafka_listeners}" \
- -e KAFKA_CLUSTERS_0_ZOOKEEPER="${zookeeper_servers}" \
- -e KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS="${kafka_connect_servers}"
- -e AUTH_TYPE=OAUTH2 \
- -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTID=uhvaPKIHU4ZF8Ne4B6PGvF0hWW6OcUSB \
- -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTSECRET=YXfRjmodifiedTujnkVr7zuW9ECCAK4TcnCio-i \
- -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_SCOPE="https://graph.microsoft.com/User.Read" \
- -e SPRING_SECURITY_OAUTH2_CLIENT_PROVIDER_AUTH0_ISSUER_URI="https://login.microsoftonline.com/{tenant-id}/v2.0" \
- -d provectuslabs/kafka-ui:latest"
-```
-
-Note that scope is created by default when Application registration is done in Azure portal.
-You'll need to update application registration manifest to include `"accessTokenAcceptedVersion": 2`
diff --git a/documentation/project/ROADMAP.md b/documentation/project/ROADMAP.md
deleted file mode 100644
index 407dab725a1..00000000000
--- a/documentation/project/ROADMAP.md
+++ /dev/null
@@ -1,22 +0,0 @@
-Kafka-UI Project Roadmap
-====================
-
-Roadmap exists in a form of a github project board and is located [here](https://github.com/provectus/kafka-ui/projects/8).
-
-### How to use this document
-
-The roadmap provides a list of features we decided to prioritize in project development. It should serve as a reference point to understand projects' goals.
-
-We do prioritize them based on the feedback from the community, our own vision and other conditions and circumstances.
-
-The roadmap sets the general way of development. The roadmap is mostly about long-term features. All the features could be re-prioritized, rescheduled or canceled.
-
-If there's no feature `X`, that **doesn't** mean we're **not** going to implement it. Feel free to raise the issue for the consideration.
-If a feature you want to see live is not present on roadmap, but there's an issue for the feature, feel free to vote for it using reactions in the issue.
-
-
-### How to contribute
-
-Since the roadmap consists mostly of big long-term features, implementing them might be not easy for a beginner outside collaborator.
-
-A good starting point is checking the [CONTRIBUTING.md](https://github.com/provectus/kafka-ui/blob/master/CONTRIBUTING.md) document.
\ No newline at end of file
diff --git a/documentation/project/contributing/README.md b/documentation/project/contributing/README.md
deleted file mode 100644
index f30100ecf61..00000000000
--- a/documentation/project/contributing/README.md
+++ /dev/null
@@ -1,9 +0,0 @@
-# Contributing guidelines
-
-### Set up the local environment for development
-
-* [Prerequisites](software-required.md)
-
-* [Building the app](building.md)
-* [Running the app](running.md)
-* [Writing tests](testing.md)
\ No newline at end of file
diff --git a/documentation/project/contributing/building.md b/documentation/project/contributing/building.md
deleted file mode 100644
index 21562426e5e..00000000000
--- a/documentation/project/contributing/building.md
+++ /dev/null
@@ -1,24 +0,0 @@
-### Building the application locally
-
-Once you installed the prerequisites and cloned the repository, run the following commands in your project directory:
-
-Build a docker container with the app:
-```sh
-./mvnw clean install -Pprod
-```
-Start the app with Kafka clusters:
-```sh
-docker-compose -f ./documentation/compose/kafka-ui.yaml up -d
-```
-To see the app, navigate to http://localhost:8080.
-
-If you want to start only kafka clusters (to run the app via `spring-boot:run`):
-```sh
-docker-compose -f ./documentation/compose/kafka-clusters-only.yaml up -d
-```
-
-Then, start the app.
-
-## Where to go next
-
-In the next section, you'll [learn how to run the application](running.md).
\ No newline at end of file
diff --git a/documentation/project/contributing/running.md b/documentation/project/contributing/running.md
deleted file mode 100644
index a74f198f0c1..00000000000
--- a/documentation/project/contributing/running.md
+++ /dev/null
@@ -1,25 +0,0 @@
-# Running the app
-
-### Running locally via docker
-If you have built a container locally or wish to run a public one you could bring everything up like this:
-```shell
-docker-compose -f documentation/compose/kafka-ui.yaml up -d
-```
-
-### Running locally without docker
-Once you built the app, run the following in `kafka-ui-api/`:
-
-```sh
-./mvnw spring-boot:run -Pprod
-
-# or
-
-./mvnw spring-boot:run -Pprod -Dspring.config.location=file:///path/to/conf.yaml
-```
-
-### Running in kubernetes
-``` bash
-helm repo add kafka-ui https://provectus.github.io/kafka-ui
-helm install kafka-ui kafka-ui/kafka-ui
-```
-To read more please follow to [chart documentation](charts/kafka-ui/README.md)
\ No newline at end of file
diff --git a/documentation/project/contributing/set-up-git.md b/documentation/project/contributing/set-up-git.md
deleted file mode 100644
index 2400b8b509d..00000000000
--- a/documentation/project/contributing/set-up-git.md
+++ /dev/null
@@ -1,8 +0,0 @@
-### Nothing special here yet.
-
\ No newline at end of file
diff --git a/documentation/project/contributing/software-required.md b/documentation/project/contributing/software-required.md
deleted file mode 100644
index 8d3b86c7311..00000000000
--- a/documentation/project/contributing/software-required.md
+++ /dev/null
@@ -1,31 +0,0 @@
-### Get the required software for Linux or macOS
-
-This page explains how to get the software you need to use a Linux or macOS
-machine for local development. Before you begin contributing you must have:
-
-* a GitHub account
-* Java 13 or newer
-* `git`
-* `docker`
-
-### Installing prerequisites on macOS
-1. Install [brew](https://brew.sh/).
-
-2. Install brew cask:
-```sh
-> brew cask
-```
-3Install JDK 13 via Homebrew cask:
-```sh
-> brew tap adoptopenjdk/openjdk
-> brew install adoptopenjdk13
-```
-
-## Tips
-
-Consider allocating not less than 4GB of memory for your docker.
-Otherwise, some apps within a stack (e.g. `kafka-ui.yaml`) might crash.
-
-## Where to go next
-
-In the next section, you'll [learn how to build the application](building.md).
diff --git a/documentation/project/contributing/testing.md b/documentation/project/contributing/testing.md
deleted file mode 100644
index 98c90f8a54d..00000000000
--- a/documentation/project/contributing/testing.md
+++ /dev/null
@@ -1,28 +0,0 @@
-# Testing
-
-
-
-## Test suites
-
-
-## Writing new tests
-
-
-### Writing tests for new features
-
-
-### Writing tests for bug fixes
-
-
-### Writing new integration tests
-
-
-
-## Running tests
-
-### Unit Tests
-
-
-### Integration Tests
-
-
diff --git a/etc/checkstyle/checkstyle-e2e.xml b/etc/checkstyle/checkstyle-e2e.xml
new file mode 100644
index 00000000000..c2af9c987b3
--- /dev/null
+++ b/etc/checkstyle/checkstyle-e2e.xml
@@ -0,0 +1,333 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/etc/checkstyle/checkstyle.xml b/etc/checkstyle/checkstyle.xml
index c47505c74b0..745f1bc368d 100644
--- a/etc/checkstyle/checkstyle.xml
+++ b/etc/checkstyle/checkstyle.xml
@@ -297,7 +297,7 @@
value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF, VARIABLE_DEF"/>
-
+
@@ -318,7 +318,7 @@
-
+
@@ -330,4 +330,4 @@
-
\ No newline at end of file
+
diff --git a/kafka-ui-api/.mvn/wrapper/MavenWrapperDownloader.java b/kafka-ui-api/.mvn/wrapper/MavenWrapperDownloader.java
deleted file mode 100644
index e76d1f3241d..00000000000
--- a/kafka-ui-api/.mvn/wrapper/MavenWrapperDownloader.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright 2007-present the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.net.*;
-import java.io.*;
-import java.nio.channels.*;
-import java.util.Properties;
-
-public class MavenWrapperDownloader {
-
- private static final String WRAPPER_VERSION = "0.5.6";
- /**
- * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
- */
- private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
- + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
-
- /**
- * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
- * use instead of the default one.
- */
- private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
- ".mvn/wrapper/maven-wrapper.properties";
-
- /**
- * Path where the maven-wrapper.jar will be saved to.
- */
- private static final String MAVEN_WRAPPER_JAR_PATH =
- ".mvn/wrapper/maven-wrapper.jar";
-
- /**
- * Name of the property which should be used to override the default download url for the wrapper.
- */
- private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
-
- public static void main(String args[]) {
- System.out.println("- Downloader started");
- File baseDirectory = new File(args[0]);
- System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
-
- // If the maven-wrapper.properties exists, read it and check if it contains a custom
- // wrapperUrl parameter.
- File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
- String url = DEFAULT_DOWNLOAD_URL;
- if(mavenWrapperPropertyFile.exists()) {
- FileInputStream mavenWrapperPropertyFileInputStream = null;
- try {
- mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
- Properties mavenWrapperProperties = new Properties();
- mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
- url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
- } catch (IOException e) {
- System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
- } finally {
- try {
- if(mavenWrapperPropertyFileInputStream != null) {
- mavenWrapperPropertyFileInputStream.close();
- }
- } catch (IOException e) {
- // Ignore ...
- }
- }
- }
- System.out.println("- Downloading from: " + url);
-
- File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
- if(!outputFile.getParentFile().exists()) {
- if(!outputFile.getParentFile().mkdirs()) {
- System.out.println(
- "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
- }
- }
- System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
- try {
- downloadFileFromURL(url, outputFile);
- System.out.println("Done");
- System.exit(0);
- } catch (Throwable e) {
- System.out.println("- Error downloading");
- e.printStackTrace();
- System.exit(1);
- }
- }
-
- private static void downloadFileFromURL(String urlString, File destination) throws Exception {
- if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
- String username = System.getenv("MVNW_USERNAME");
- char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
- Authenticator.setDefault(new Authenticator() {
- @Override
- protected PasswordAuthentication getPasswordAuthentication() {
- return new PasswordAuthentication(username, password);
- }
- });
- }
- URL website = new URL(urlString);
- ReadableByteChannel rbc;
- rbc = Channels.newChannel(website.openStream());
- FileOutputStream fos = new FileOutputStream(destination);
- fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
- fos.close();
- rbc.close();
- }
-
-}
diff --git a/kafka-ui-api/.mvn/wrapper/maven-wrapper.jar b/kafka-ui-api/.mvn/wrapper/maven-wrapper.jar
deleted file mode 100644
index 2cc7d4a55c0..00000000000
Binary files a/kafka-ui-api/.mvn/wrapper/maven-wrapper.jar and /dev/null differ
diff --git a/kafka-ui-api/.mvn/wrapper/maven-wrapper.properties b/kafka-ui-api/.mvn/wrapper/maven-wrapper.properties
deleted file mode 100644
index 642d572ce90..00000000000
--- a/kafka-ui-api/.mvn/wrapper/maven-wrapper.properties
+++ /dev/null
@@ -1,2 +0,0 @@
-distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
-wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
diff --git a/kafka-ui-api/Dockerfile b/kafka-ui-api/Dockerfile
index 5488a771810..98dcdb46ac3 100644
--- a/kafka-ui-api/Dockerfile
+++ b/kafka-ui-api/Dockerfile
@@ -1,7 +1,16 @@
-FROM alpine:3.15.0
+#FROM azul/zulu-openjdk-alpine:17-jre-headless
+FROM azul/zulu-openjdk-alpine@sha256:a36679ac0d28cb835e2a8c00e1e0d95509c6c51c5081c7782b85edb1f37a771a
-RUN apk add --no-cache openjdk13-jre libc6-compat gcompat \
-&& addgroup -S kafkaui && adduser -S kafkaui -G kafkaui
+RUN apk add --no-cache \
+ # snappy codec
+ gcompat \
+ # configuring timezones
+ tzdata
+RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui
+
+# creating folder for dynamic config usage (certificates uploads, etc)
+RUN mkdir /etc/kafkaui/
+RUN chown kafkaui /etc/kafkaui
USER kafkaui
@@ -12,4 +21,5 @@ ENV JAVA_OPTS=
EXPOSE 8080
-CMD java $JAVA_OPTS -jar kafka-ui-api.jar
+# see JmxSslSocketFactory docs to understand why add-opens is needed
+CMD java --add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED $JAVA_OPTS -jar kafka-ui-api.jar
diff --git a/kafka-ui-api/mvnw b/kafka-ui-api/mvnw
deleted file mode 100755
index a16b5431b4c..00000000000
--- a/kafka-ui-api/mvnw
+++ /dev/null
@@ -1,310 +0,0 @@
-#!/bin/sh
-# ----------------------------------------------------------------------------
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# ----------------------------------------------------------------------------
-
-# ----------------------------------------------------------------------------
-# Maven Start Up Batch script
-#
-# Required ENV vars:
-# ------------------
-# JAVA_HOME - location of a JDK home dir
-#
-# Optional ENV vars
-# -----------------
-# M2_HOME - location of maven2's installed home dir
-# MAVEN_OPTS - parameters passed to the Java VM when running Maven
-# e.g. to debug Maven itself, use
-# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
-# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
-# ----------------------------------------------------------------------------
-
-if [ -z "$MAVEN_SKIP_RC" ] ; then
-
- if [ -f /etc/mavenrc ] ; then
- . /etc/mavenrc
- fi
-
- if [ -f "$HOME/.mavenrc" ] ; then
- . "$HOME/.mavenrc"
- fi
-
-fi
-
-# OS specific support. $var _must_ be set to either true or false.
-cygwin=false;
-darwin=false;
-mingw=false
-case "`uname`" in
- CYGWIN*) cygwin=true ;;
- MINGW*) mingw=true;;
- Darwin*) darwin=true
- # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
- # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
- if [ -z "$JAVA_HOME" ]; then
- if [ -x "/usr/libexec/java_home" ]; then
- export JAVA_HOME="`/usr/libexec/java_home`"
- else
- export JAVA_HOME="/Library/Java/Home"
- fi
- fi
- ;;
-esac
-
-if [ -z "$JAVA_HOME" ] ; then
- if [ -r /etc/gentoo-release ] ; then
- JAVA_HOME=`java-config --jre-home`
- fi
-fi
-
-if [ -z "$M2_HOME" ] ; then
- ## resolve links - $0 may be a link to maven's home
- PRG="$0"
-
- # need this for relative symlinks
- while [ -h "$PRG" ] ; do
- ls=`ls -ld "$PRG"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '/.*' > /dev/null; then
- PRG="$link"
- else
- PRG="`dirname "$PRG"`/$link"
- fi
- done
-
- saveddir=`pwd`
-
- M2_HOME=`dirname "$PRG"`/..
-
- # make it fully qualified
- M2_HOME=`cd "$M2_HOME" && pwd`
-
- cd "$saveddir"
- # echo Using m2 at $M2_HOME
-fi
-
-# For Cygwin, ensure paths are in UNIX format before anything is touched
-if $cygwin ; then
- [ -n "$M2_HOME" ] &&
- M2_HOME=`cygpath --unix "$M2_HOME"`
- [ -n "$JAVA_HOME" ] &&
- JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
- [ -n "$CLASSPATH" ] &&
- CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
-fi
-
-# For Mingw, ensure paths are in UNIX format before anything is touched
-if $mingw ; then
- [ -n "$M2_HOME" ] &&
- M2_HOME="`(cd "$M2_HOME"; pwd)`"
- [ -n "$JAVA_HOME" ] &&
- JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
-fi
-
-if [ -z "$JAVA_HOME" ]; then
- javaExecutable="`which javac`"
- if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
- # readlink(1) is not available as standard on Solaris 10.
- readLink=`which readlink`
- if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
- if $darwin ; then
- javaHome="`dirname \"$javaExecutable\"`"
- javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
- else
- javaExecutable="`readlink -f \"$javaExecutable\"`"
- fi
- javaHome="`dirname \"$javaExecutable\"`"
- javaHome=`expr "$javaHome" : '\(.*\)/bin'`
- JAVA_HOME="$javaHome"
- export JAVA_HOME
- fi
- fi
-fi
-
-if [ -z "$JAVACMD" ] ; then
- if [ -n "$JAVA_HOME" ] ; then
- if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
- # IBM's JDK on AIX uses strange locations for the executables
- JAVACMD="$JAVA_HOME/jre/sh/java"
- else
- JAVACMD="$JAVA_HOME/bin/java"
- fi
- else
- JAVACMD="`which java`"
- fi
-fi
-
-if [ ! -x "$JAVACMD" ] ; then
- echo "Error: JAVA_HOME is not defined correctly." >&2
- echo " We cannot execute $JAVACMD" >&2
- exit 1
-fi
-
-if [ -z "$JAVA_HOME" ] ; then
- echo "Warning: JAVA_HOME environment variable is not set."
-fi
-
-CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
-
-# traverses directory structure from process work directory to filesystem root
-# first directory with .mvn subdirectory is considered project base directory
-find_maven_basedir() {
-
- if [ -z "$1" ]
- then
- echo "Path not specified to find_maven_basedir"
- return 1
- fi
-
- basedir="$1"
- wdir="$1"
- while [ "$wdir" != '/' ] ; do
- if [ -d "$wdir"/.mvn ] ; then
- basedir=$wdir
- break
- fi
- # workaround for JBEAP-8937 (on Solaris 10/Sparc)
- if [ -d "${wdir}" ]; then
- wdir=`cd "$wdir/.."; pwd`
- fi
- # end of workaround
- done
- echo "${basedir}"
-}
-
-# concatenates all lines of a file
-concat_lines() {
- if [ -f "$1" ]; then
- echo "$(tr -s '\n' ' ' < "$1")"
- fi
-}
-
-BASE_DIR=`find_maven_basedir "$(pwd)"`
-if [ -z "$BASE_DIR" ]; then
- exit 1;
-fi
-
-##########################################################################################
-# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
-# This allows using the maven wrapper in projects that prohibit checking in binary data.
-##########################################################################################
-if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
- if [ "$MVNW_VERBOSE" = true ]; then
- echo "Found .mvn/wrapper/maven-wrapper.jar"
- fi
-else
- if [ "$MVNW_VERBOSE" = true ]; then
- echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
- fi
- if [ -n "$MVNW_REPOURL" ]; then
- jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
- else
- jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
- fi
- while IFS="=" read key value; do
- case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
- esac
- done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
- if [ "$MVNW_VERBOSE" = true ]; then
- echo "Downloading from: $jarUrl"
- fi
- wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
- if $cygwin; then
- wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
- fi
-
- if command -v wget > /dev/null; then
- if [ "$MVNW_VERBOSE" = true ]; then
- echo "Found wget ... using wget"
- fi
- if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
- wget "$jarUrl" -O "$wrapperJarPath"
- else
- wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath"
- fi
- elif command -v curl > /dev/null; then
- if [ "$MVNW_VERBOSE" = true ]; then
- echo "Found curl ... using curl"
- fi
- if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
- curl -o "$wrapperJarPath" "$jarUrl" -f
- else
- curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
- fi
-
- else
- if [ "$MVNW_VERBOSE" = true ]; then
- echo "Falling back to using Java to download"
- fi
- javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
- # For Cygwin, switch paths to Windows format before running javac
- if $cygwin; then
- javaClass=`cygpath --path --windows "$javaClass"`
- fi
- if [ -e "$javaClass" ]; then
- if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
- if [ "$MVNW_VERBOSE" = true ]; then
- echo " - Compiling MavenWrapperDownloader.java ..."
- fi
- # Compiling the Java class
- ("$JAVA_HOME/bin/javac" "$javaClass")
- fi
- if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
- # Running the downloader
- if [ "$MVNW_VERBOSE" = true ]; then
- echo " - Running MavenWrapperDownloader.java ..."
- fi
- ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
- fi
- fi
- fi
-fi
-##########################################################################################
-# End of extension
-##########################################################################################
-
-export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
-if [ "$MVNW_VERBOSE" = true ]; then
- echo $MAVEN_PROJECTBASEDIR
-fi
-MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
-
-# For Cygwin, switch paths to Windows format before running java
-if $cygwin; then
- [ -n "$M2_HOME" ] &&
- M2_HOME=`cygpath --path --windows "$M2_HOME"`
- [ -n "$JAVA_HOME" ] &&
- JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
- [ -n "$CLASSPATH" ] &&
- CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
- [ -n "$MAVEN_PROJECTBASEDIR" ] &&
- MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
-fi
-
-# Provide a "standardized" way to retrieve the CLI args that will
-# work with both Windows and non-Windows executions.
-MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
-export MAVEN_CMD_LINE_ARGS
-
-WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
-
-exec "$JAVACMD" \
- $MAVEN_OPTS \
- -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
- "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
- ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
diff --git a/kafka-ui-api/mvnw.cmd b/kafka-ui-api/mvnw.cmd
deleted file mode 100644
index c8d43372c98..00000000000
--- a/kafka-ui-api/mvnw.cmd
+++ /dev/null
@@ -1,182 +0,0 @@
-@REM ----------------------------------------------------------------------------
-@REM Licensed to the Apache Software Foundation (ASF) under one
-@REM or more contributor license agreements. See the NOTICE file
-@REM distributed with this work for additional information
-@REM regarding copyright ownership. The ASF licenses this file
-@REM to you under the Apache License, Version 2.0 (the
-@REM "License"); you may not use this file except in compliance
-@REM with the License. You may obtain a copy of the License at
-@REM
-@REM https://www.apache.org/licenses/LICENSE-2.0
-@REM
-@REM Unless required by applicable law or agreed to in writing,
-@REM software distributed under the License is distributed on an
-@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-@REM KIND, either express or implied. See the License for the
-@REM specific language governing permissions and limitations
-@REM under the License.
-@REM ----------------------------------------------------------------------------
-
-@REM ----------------------------------------------------------------------------
-@REM Maven Start Up Batch script
-@REM
-@REM Required ENV vars:
-@REM JAVA_HOME - location of a JDK home dir
-@REM
-@REM Optional ENV vars
-@REM M2_HOME - location of maven2's installed home dir
-@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
-@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
-@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
-@REM e.g. to debug Maven itself, use
-@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
-@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
-@REM ----------------------------------------------------------------------------
-
-@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
-@echo off
-@REM set title of command window
-title %0
-@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
-@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
-
-@REM set %HOME% to equivalent of $HOME
-if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
-
-@REM Execute a user defined script before this one
-if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
-@REM check for pre script, once with legacy .bat ending and once with .cmd ending
-if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
-if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
-:skipRcPre
-
-@setlocal
-
-set ERROR_CODE=0
-
-@REM To isolate internal variables from possible post scripts, we use another setlocal
-@setlocal
-
-@REM ==== START VALIDATION ====
-if not "%JAVA_HOME%" == "" goto OkJHome
-
-echo.
-echo Error: JAVA_HOME not found in your environment. >&2
-echo Please set the JAVA_HOME variable in your environment to match the >&2
-echo location of your Java installation. >&2
-echo.
-goto error
-
-:OkJHome
-if exist "%JAVA_HOME%\bin\java.exe" goto init
-
-echo.
-echo Error: JAVA_HOME is set to an invalid directory. >&2
-echo JAVA_HOME = "%JAVA_HOME%" >&2
-echo Please set the JAVA_HOME variable in your environment to match the >&2
-echo location of your Java installation. >&2
-echo.
-goto error
-
-@REM ==== END VALIDATION ====
-
-:init
-
-@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
-@REM Fallback to current working directory if not found.
-
-set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
-IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
-
-set EXEC_DIR=%CD%
-set WDIR=%EXEC_DIR%
-:findBaseDir
-IF EXIST "%WDIR%"\.mvn goto baseDirFound
-cd ..
-IF "%WDIR%"=="%CD%" goto baseDirNotFound
-set WDIR=%CD%
-goto findBaseDir
-
-:baseDirFound
-set MAVEN_PROJECTBASEDIR=%WDIR%
-cd "%EXEC_DIR%"
-goto endDetectBaseDir
-
-:baseDirNotFound
-set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
-cd "%EXEC_DIR%"
-
-:endDetectBaseDir
-
-IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
-
-@setlocal EnableExtensions EnableDelayedExpansion
-for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
-@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
-
-:endReadAdditionalConfig
-
-SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
-set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
-set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
-
-set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
-
-FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
- IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
-)
-
-@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
-@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
-if exist %WRAPPER_JAR% (
- if "%MVNW_VERBOSE%" == "true" (
- echo Found %WRAPPER_JAR%
- )
-) else (
- if not "%MVNW_REPOURL%" == "" (
- SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
- )
- if "%MVNW_VERBOSE%" == "true" (
- echo Couldn't find %WRAPPER_JAR%, downloading it ...
- echo Downloading from: %DOWNLOAD_URL%
- )
-
- powershell -Command "&{"^
- "$webclient = new-object System.Net.WebClient;"^
- "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
- "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
- "}"^
- "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
- "}"
- if "%MVNW_VERBOSE%" == "true" (
- echo Finished downloading %WRAPPER_JAR%
- )
-)
-@REM End of extension
-
-@REM Provide a "standardized" way to retrieve the CLI args that will
-@REM work with both Windows and non-Windows executions.
-set MAVEN_CMD_LINE_ARGS=%*
-
-%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
-if ERRORLEVEL 1 goto error
-goto end
-
-:error
-set ERROR_CODE=1
-
-:end
-@endlocal & set ERROR_CODE=%ERROR_CODE%
-
-if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
-@REM check for post script, once with legacy .bat ending and once with .cmd ending
-if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
-if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
-:skipRcPost
-
-@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
-if "%MAVEN_BATCH_PAUSE%" == "on" pause
-
-if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
-
-exit /B %ERROR_CODE%
diff --git a/kafka-ui-api/pom.xml b/kafka-ui-api/pom.xml
index 9665aad565f..d572a4ffa34 100644
--- a/kafka-ui-api/pom.xml
+++ b/kafka-ui-api/pom.xml
@@ -12,7 +12,7 @@
kafka-ui-api
- 0.8.8
+ 0.8.10
jacoco
reuseReports
${project.basedir}/target/jacoco.exec
@@ -20,18 +20,6 @@
java
-
-
-
- org.springframework.boot
- spring-boot-dependencies
- ${spring-boot.version}
- pom
- import
-
-
-
-
org.springframework.boot
@@ -54,6 +42,11 @@
kafka-ui-contract
${project.version}
+
+ com.provectus
+ kafka-ui-serde-api
+ ${kafka-ui-serde-api.version}
+
org.apache.kafka
kafka-clients
@@ -62,7 +55,7 @@
org.apache.commons
commons-lang3
- 3.9
+ 3.12.0
org.projectlombok
@@ -88,6 +81,12 @@
io.confluent
kafka-json-schema-serializer
${confluent.version}
+
+
+ commons-collections
+ commons-collections
+
+
io.confluent
@@ -98,7 +97,7 @@
software.amazon.msk
aws-msk-iam-auth
- 1.1.3
+ 1.1.7
@@ -116,6 +115,16 @@
io.projectreactor.addons
reactor-extra
+
+ org.json
+ json
+ ${org.json.version}
+
+
+ io.micrometer
+ micrometer-registry-prometheus
+ runtime
+
org.springframework.boot
@@ -132,28 +141,29 @@
commons-pool2
${apache.commons.version}
+
+ org.apache.commons
+ commons-collections4
+ 4.4
+
org.testcontainers
testcontainers
- ${test.containers.version}
test
org.testcontainers
kafka
- ${test.containers.version}
test
org.testcontainers
junit-jupiter
- ${test.containers.version}
test
org.junit.jupiter
junit-jupiter-engine
- ${junit-jupiter-engine.version}
test
@@ -168,6 +178,12 @@
${mockito.version}
test
+
+ net.bytebuddy
+ byte-buddy
+ ${byte-buddy.version}
+ test
+
org.assertj
assertj-core
@@ -180,6 +196,18 @@
2.2.14
test
+
+ com.squareup.okhttp3
+ mockwebserver
+ ${okhttp3.mockwebserver.version}
+ test
+
+
+ com.squareup.okhttp3
+ okhttp
+ ${okhttp3.mockwebserver.version}
+ test
+
org.springframework.boot
@@ -193,15 +221,34 @@
- org.springframework.boot
- spring-boot-starter-data-ldap
+ org.opendatadiscovery
+ oddrn-generator-java
+ ${odd-oddrn-generator.version}
+
+
+ org.opendatadiscovery
+ ingestion-contract-client
+
+
+ org.springframework.boot
+ spring-boot-starter-webflux
+
+
+ io.projectreactor
+ reactor-core
+
+
+ io.projectreactor.ipc
+ reactor-netty
+
+
+ ${odd-oddrn-client.version}
+
org.springframework.security
spring-security-ldap
-
-
org.codehaus.groovy
groovy-jsr223
@@ -212,6 +259,16 @@
groovy-json
${groovy.version}
+
+ org.apache.datasketches
+ datasketches-java
+ ${datasketches-java.version}
+
+
+ org.springframework.boot
+ spring-boot-devtools
+ true
+
@@ -225,6 +282,7 @@
repackage
+ build-info
@@ -232,10 +290,7 @@
org.apache.maven.plugins
maven-compiler-plugin
- ${maven-compiler-plugin.version}
- ${maven.compiler.source}
- ${maven.compiler.target}
org.mapstruct
@@ -263,7 +318,6 @@
org.apache.maven.plugins
maven-surefire-plugin
- ${maven-surefire-plugin.version}
@{argLine} --illegal-access=permit
@@ -271,12 +325,12 @@
org.apache.maven.plugins
maven-checkstyle-plugin
- 3.1.1
+ 3.3.0
com.puppycrawl.tools
checkstyle
- 8.32
+ 10.3.1
@@ -296,6 +350,7 @@
+
org.antlr
@@ -348,7 +403,7 @@
pl.project13.maven
git-commit-id-plugin
- 4.0.0
+ 4.9.10
get-the-git-infos
@@ -370,7 +425,6 @@
maven-resources-plugin
- ${maven-resources-plugin.version}
copy-resources
@@ -394,48 +448,61 @@
frontend-maven-plugin
${frontend-maven-plugin.version}
+ ${skipUIBuild}
../kafka-ui-react-app
- ${project.version}
- ${git.commit.id.abbrev}
+ ${project.version}
+ ${git.commit.id.abbrev}
- install node and npm
+ install node and pnpm
- install-node-and-npm
+ install-node-and-pnpm
${node.version}
+ ${pnpm.version}
- npm install
+ pnpm install
- npm
+ pnpm
install
- npm run build
+ pnpm build
- npm
+ pnpm
- run build
+ build
- com.spotify
- dockerfile-maven-plugin
- ${dockerfile-maven-plugin.version}
+ io.fabric8
+ docker-maven-plugin
+ ${fabric8-maven-plugin.version}
- true
+ true
+
+
+ provectuslabs/kafka-ui:${git.revision}
+
+ ${project.basedir}
+
+ ${project.build.finalName}.jar
+
+
+
+
@@ -444,14 +511,6 @@
build
-
- ${git.revision}
- provectuslabs/kafka-ui
-
- ${project.build.finalName}.jar
- ${project.artifactId}.jar
-
-
@@ -460,5 +519,4 @@
-
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java
index ded03514fee..8d0eafeff39 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java
@@ -1,16 +1,26 @@
package com.provectus.kafka.ui;
-import org.springframework.boot.SpringApplication;
+import com.provectus.kafka.ui.util.DynamicConfigOperations;
import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
+import org.springframework.boot.builder.SpringApplicationBuilder;
+import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
-@SpringBootApplication
+@SpringBootApplication(exclude = LdapAutoConfiguration.class)
@EnableScheduling
@EnableAsync
public class KafkaUiApplication {
public static void main(String[] args) {
- SpringApplication.run(KafkaUiApplication.class, args);
+ startApplication(args);
+ }
+
+ public static ConfigurableApplicationContext startApplication(String[] args) {
+ return new SpringApplicationBuilder(KafkaUiApplication.class)
+ .initializers(DynamicConfigOperations.dynamicConfigPropertiesInitializer())
+ .build()
+ .run(args);
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/KafkaConnectClients.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/KafkaConnectClients.java
deleted file mode 100644
index de0c9054ae2..00000000000
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/KafkaConnectClients.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package com.provectus.kafka.ui.client;
-
-import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
-import com.provectus.kafka.ui.model.KafkaConnectCluster;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-public final class KafkaConnectClients {
-
- private KafkaConnectClients() {
-
- }
-
- private static final Map CACHE = new ConcurrentHashMap<>();
-
- public static KafkaConnectClientApi withKafkaConnectConfig(KafkaConnectCluster config) {
- return CACHE.computeIfAbsent(config.getAddress(), s -> new RetryingKafkaConnectClient(config));
- }
-}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/KsqlClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/KsqlClient.java
deleted file mode 100644
index 2e8026648d7..00000000000
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/KsqlClient.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package com.provectus.kafka.ui.client;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.provectus.kafka.ui.exception.UnprocessableEntityException;
-import com.provectus.kafka.ui.model.KsqlCommandResponseDTO;
-import com.provectus.kafka.ui.strategy.ksql.statement.BaseStrategy;
-import lombok.RequiredArgsConstructor;
-import lombok.SneakyThrows;
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.MediaType;
-import org.springframework.stereotype.Service;
-import org.springframework.web.reactive.function.BodyInserters;
-import org.springframework.web.reactive.function.client.ClientResponse;
-import org.springframework.web.reactive.function.client.WebClient;
-import reactor.core.publisher.Mono;
-
-@Service
-@RequiredArgsConstructor
-@Slf4j
-public class KsqlClient {
- private final WebClient webClient;
- private final ObjectMapper mapper;
-
- public Mono execute(BaseStrategy ksqlStatement) {
- return webClient.post()
- .uri(ksqlStatement.getUri())
- .accept(new MediaType("application", "vnd.ksql.v1+json"))
- .body(BodyInserters.fromValue(ksqlStatement.getKsqlCommand()))
- .retrieve()
- .onStatus(HttpStatus::isError, this::getErrorMessage)
- .bodyToMono(byte[].class)
- .map(this::toJson)
- .map(ksqlStatement::serializeResponse);
- }
-
- private Mono getErrorMessage(ClientResponse response) {
- return response
- .bodyToMono(byte[].class)
- .map(this::toJson)
- .map(jsonNode -> jsonNode.get("message").asText())
- .flatMap(error -> Mono.error(new UnprocessableEntityException(error)));
- }
-
- @SneakyThrows
- private JsonNode toJson(byte[] content) {
- return this.mapper.readTree(content);
- }
-}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
index 70716613730..74b9485008e 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
@@ -1,22 +1,30 @@
package com.provectus.kafka.ui.client;
+import static com.provectus.kafka.ui.config.ClustersProperties.ConnectCluster;
+
+import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.connect.ApiClient;
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
import com.provectus.kafka.ui.connect.model.Connector;
+import com.provectus.kafka.ui.connect.model.ConnectorPlugin;
+import com.provectus.kafka.ui.connect.model.ConnectorPluginConfigValidationResponse;
+import com.provectus.kafka.ui.connect.model.ConnectorStatus;
+import com.provectus.kafka.ui.connect.model.ConnectorTask;
+import com.provectus.kafka.ui.connect.model.ConnectorTopics;
import com.provectus.kafka.ui.connect.model.NewConnector;
+import com.provectus.kafka.ui.connect.model.TaskStatus;
import com.provectus.kafka.ui.exception.KafkaConnectConflictReponseException;
import com.provectus.kafka.ui.exception.ValidationException;
-import com.provectus.kafka.ui.model.KafkaConnectCluster;
+import com.provectus.kafka.ui.util.WebClientConfigurator;
import java.time.Duration;
import java.util.List;
import java.util.Map;
+import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
-import org.springframework.core.ParameterizedTypeReference;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.HttpMethod;
-import org.springframework.http.MediaType;
-import org.springframework.util.MultiValueMap;
+import org.springframework.http.ResponseEntity;
+import org.springframework.util.unit.DataSize;
import org.springframework.web.client.RestClientException;
+import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.reactive.function.client.WebClientResponseException;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@@ -27,8 +35,10 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
private static final int MAX_RETRIES = 5;
private static final Duration RETRIES_DELAY = Duration.ofMillis(200);
- public RetryingKafkaConnectClient(KafkaConnectCluster config) {
- super(new RetryingApiClient(config));
+ public RetryingKafkaConnectClient(ConnectCluster config,
+ @Nullable ClustersProperties.TruststoreConfig truststoreConfig,
+ DataSize maxBuffSize) {
+ super(new RetryingApiClient(config, truststoreConfig, maxBuffSize));
}
private static Retry conflictCodeRetry() {
@@ -71,43 +81,204 @@ public Mono setConnectorConfig(String connectorName, Map> createConnectorWithHttpInfo(NewConnector newConnector)
+ throws WebClientResponseException {
+ return withRetryOnConflict(super.createConnectorWithHttpInfo(newConnector));
+ }
+
+ @Override
+ public Mono deleteConnector(String connectorName) throws WebClientResponseException {
+ return withRetryOnConflict(super.deleteConnector(connectorName));
+ }
+
+ @Override
+ public Mono> deleteConnectorWithHttpInfo(String connectorName)
+ throws WebClientResponseException {
+ return withRetryOnConflict(super.deleteConnectorWithHttpInfo(connectorName));
+ }
+
+
+ @Override
+ public Mono getConnector(String connectorName) throws WebClientResponseException {
+ return withRetryOnConflict(super.getConnector(connectorName));
+ }
+
+ @Override
+ public Mono> getConnectorWithHttpInfo(String connectorName)
+ throws WebClientResponseException {
+ return withRetryOnConflict(super.getConnectorWithHttpInfo(connectorName));
+ }
+
+ @Override
+ public Mono