diff --git a/learn-kafka/config/connect-distributed.properties b/.docker/connect-distributed.properties similarity index 99% rename from learn-kafka/config/connect-distributed.properties rename to .docker/connect-distributed.properties index cedad9a..d42c818 100644 --- a/learn-kafka/config/connect-distributed.properties +++ b/.docker/connect-distributed.properties @@ -20,7 +20,7 @@ # the `bootstrap.servers` and those specifying replication factors. # A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. -bootstrap.servers=localhost:9092 +bootstrap.servers=kafka-0:9092 # unique name for the cluster, used in forming the Connect cluster group. Note that this must not conflict with consumer group IDs group.id=connect-cluster diff --git a/.docker/images/app/.dockerignore b/.docker/images/app/.dockerignore new file mode 100644 index 0000000..d0f2707 --- /dev/null +++ b/.docker/images/app/.dockerignore @@ -0,0 +1,6 @@ +.pytest_cache +.ruff_cache +__pycache__ +logs/ +.coverage +experiment.ipynb diff --git a/.docker/images/app/Dockerfile b/.docker/images/app/Dockerfile new file mode 100644 index 0000000..42e21b4 --- /dev/null +++ b/.docker/images/app/Dockerfile @@ -0,0 +1,24 @@ +FROM python:3.11-slim + +# Label for github packages +LABEL org.opencontainers.image.source=https://github.com/lelouvincx/Chinh-Dinh-training +LABEL org.opencontainers.image.description="Data generator (called upstream-app), generates data to source_db." + +WORKDIR /app + +# Activate python virtual environment +RUN python3 -m venv .venv +RUN . .venv/bin/activate + +RUN pip install --no-cache-dir --upgrade pip + +# Install requirements +COPY .docker/images/app/requirements.txt /app/requirements.txt +RUN pip install --no-cache-dir -r /app/requirements.txt + +# Install curl +RUN apt-get update && apt-get install -y --no-install-recommends curl && apt-get autoremove -y + +COPY app . + +CMD [ "streamlit", "run", "app/streamlit_app.py", "--server.address=0.0.0.0" ] diff --git a/.docker/images/app/requirements.txt b/.docker/images/app/requirements.txt new file mode 100644 index 0000000..abe80a0 --- /dev/null +++ b/.docker/images/app/requirements.txt @@ -0,0 +1,12 @@ +psycopg2-binary==2.9.7 +Faker==19.6.0 +streamlit==1.26.0 +confluent-kafka==2.2.0 +sqlalchemy==2.0.20 +python-dotenv==1.0.0 +ruff==0.0.287 +black==23.9.1 +pytest==7.4.2 +pytest-dependency==0.5.1 +pytest-ordering==0.6 +pytest-cov==4.1.0 diff --git a/.docker/images/kafka-connect/Dockerfile b/.docker/images/kafka-connect/Dockerfile new file mode 100644 index 0000000..b6f642c --- /dev/null +++ b/.docker/images/kafka-connect/Dockerfile @@ -0,0 +1,13 @@ +FROM confluentinc/cp-server-connect:7.1.1 + +# Label for github packages +LABEL org.opencontainers.image.source=https://github.com/lelouvincx/Chinh-Dinh-training + +# Install debezium-connector-postgresql and kafka-connect-jdbc +RUN echo "INFO: Installing Connectors" +RUN confluent-hub install --no-prompt debezium/debezium-connector-postgresql:2.2.1 +RUN confluent-hub install --no-prompt confluentinc/kafka-connect-jdbc:10.7.4 + +RUN echo "INFO: Launching Kafka Connect workers" + +CMD [ "/etc/confluent/docker/run" ] diff --git a/database-replication/.docker/postgres-add-de-user.sh b/.docker/postgres-add-de-user.sh similarity index 82% rename from database-replication/.docker/postgres-add-de-user.sh rename to .docker/postgres-add-de-user.sh index 115b12a..dde1bd5 100644 --- a/database-replication/.docker/postgres-add-de-user.sh +++ b/.docker/postgres-add-de-user.sh @@ -4,11 +4,15 @@ set -e PGPASSWORD=${POSTGRES_PASSWORD} psql -v ON_ERROR_STOP=1 --username ${POSTGRES_USER} --dbname ${POSTGRES_DB} <<-EOSQL CREATE USER azure_pg_admin; GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO azure_pg_admin; + CREATE USER azure_superuser; ALTER USER azure_superuser WITH SUPERUSER; GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO azure_superuser; + CREATE USER greglow; GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO greglow; - CREATE USER data_engineer; + + CREATE USER data_engineer WITH PASSWORD '${POSTGRES_DE_PASSWORD}'; + ALTER USER data_engineer WITH REPLICATION; GRANT ALL PRIVILEGES ON DATABASE ${POSTGRES_DB} TO data_engineer; EOSQL diff --git a/database-replication/.dockerignore b/.dockerignore similarity index 100% rename from database-replication/.dockerignore rename to .dockerignore diff --git a/database-replication/.gitattributes b/.gitattributes similarity index 100% rename from database-replication/.gitattributes rename to .gitattributes diff --git a/.github/workflows/continuous-integration.yml b/.github/workflows/continuous-integration.yml new file mode 100644 index 0000000..61e7047 --- /dev/null +++ b/.github/workflows/continuous-integration.yml @@ -0,0 +1,194 @@ +name: General Continuous Integration +run-name: ${{ github.actor }} is testing Github Actions + + +on: [push] + + +jobs: + explore-github-actions: + runs-on: ubuntu-22.04 + steps: + - run: echo "The job was automatically triggered by a ${{ github.event_name }} event." + + - run: echo "This job is now running on a ${{ runner.os }} server hosted by Github." + + - run: echo "The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}." + + - name: Checkout repository code + uses: actions/checkout@v4 + + - run: echo "The ${{ github.repository }} repository has been cloned to the runner." + + - run: echo "The workflow is now ready to test your code on the runner." + + - name: View environment variables + run: printenv + + - name: List files in the repository + run: | + ls -lah ${{ github.workspace }} + + - run: echo "This job's status is ${{ job.status }}." + + check-changes: + runs-on: ubuntu-22.04 + + outputs: + upstream-app: ${{ steps.changes.outputs.upstream-app }} + kafka-connect: ${{ steps.changes.outputs.kafka-connect }} + + steps: + - name: Checkout repository code + uses: actions/checkout@v4 + + - name: Check changes + uses: dorny/paths-filter@v2 + id: changes + with: + base: ${{ github.ref }} + ref: ${{ github.ref }} + filters: | + upstream-app: + - ".docker/images/app/**" + kafka-connect: + - ".docker/images/kafka-connect/**" + + build-push-upstream-app: + needs: check-changes + if: ${{ needs.check-changes.outputs.upstream-app == 'true' }} + runs-on: ubuntu-22.04 + + env: + REGISTRY: ghcr.io + UPSTREAM_APP_IMAGE_NAME: upstream-app + + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository code + uses: actions/checkout@v4 + + - name: Setup QEMU + uses: docker/setup-qemu-action@v3 + + - name: Login to the container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ github.actor }}/${{ env.UPSTREAM_APP_IMAGE_NAME }} + + - name: Build and push image + uses: docker/build-push-action@v5 + with: + context: . + file: .docker/images/app/Dockerfile + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + build-push-kafka-connect: + needs: check-changes + if: ${{ needs.check-changes.outputs.kafka-connect == 'true' }} + runs-on: ubuntu-22.04 + + env: + REGISTRY: ghcr.io + KAFKA_CONNECT_IMAGE_NAME: kafka-connect + + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository code + uses: actions/checkout@v4 + + - name: Setup QEMU + uses: docker/setup-qemu-action@v3 + + - name: Login to the container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ github.actor }}/${{ env.KAFKA_CONNECT_IMAGE_NAME }} + + - name: Build and push image + uses: docker/build-push-action@v5 + with: + context: . + file: .docker/images/kafka-connect/Dockerfile + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + unit-test-upstream-app: + needs: build-push-upstream-app + if: | # Always run after build-push-upstream-app + always() && + (needs.build-push-upstream-app.result == 'success' || needs.build-push-upstream-app.result == 'skipped') + runs-on: ubuntu-22.04 + + env: + POSTGRES_USER: admin + POSTGRES_PASSWORD: admin123 + POSTGRES_DB: wideworldimporters + POSTGRES_PORT: 5432 + REGISTRY: ghcr.io + UPSTREAM_APP_IMAGE_NAME: upstream-app + + steps: + - name: Checkout repository code + uses: actions/checkout@v4 + + - name: Setup QEMU + uses: docker/setup-qemu-action@v3 + + - name: Login to the container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) from existing docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ github.actor }}/${{ env.UPSTREAM_APP_IMAGE_NAME }} + + - name: Setup docker-compose + uses: KengoTODA/actions-setup-docker-compose@main + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: View current working dir + run: pwd && ls -a && ls -lah app + + - name: Compose up services + run: docker-compose version && docker-compose -f app/tests/docker-compose.yml --project-directory . up -d + + - name: View running services + run: docker-compose -f app/tests/docker-compose.yml --project-directory . ps -a && sleep 15 + + - name: Unit tests + run: docker-compose -f app/tests/docker-compose.yml --project-directory . exec upstream-app python -m pytest --log-cli-level info -p no:warnings -v /app/tests + + - name: Compose down services + run: docker-compose -f app/tests/docker-compose.yml --project-directory . down diff --git a/.github/workflows/naming-policy.yml b/.github/workflows/naming-policy.yml new file mode 100644 index 0000000..91d00c1 --- /dev/null +++ b/.github/workflows/naming-policy.yml @@ -0,0 +1,15 @@ +name: Check naming policy +run-name: Check naming policy for ${{ github.ref }} + +on: [pull_request] + +jobs: + branch-naming-rules: + runs-on: ubuntu-22.04 + steps: + - uses: deepakputhraya/action-branch-name@master + with: + regex: '([a-z])+\/(\d+)-([a-z])+' # Regex the branch should match. This example enforces grouping + allowed_prefixes: 'feat,fix,refactor,docs' # All branches should start with the given prefix + ignore: main,dev # Ignore exactly matching branch names from convention + max_length: 100 # Max length of the branch name diff --git a/.gitignore b/.gitignore index 0db56f8..8dda43e 100644 --- a/.gitignore +++ b/.gitignore @@ -85,25 +85,25 @@ ipython_config.py # pyenv # For a library or package, you might want to ignore these files since the code is # intended to run in multiple environments; otherwise, check them in: -# .python-version +.python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. -#Pipfile.lock +Pipfile.lock # poetry # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. # This is especially recommended for binary packages to ensure reproducibility, and is more # commonly ignored for libraries. # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock +poetry.lock # pdm # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock +pdm.lock # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it # in version control. # https://pdm.fming.dev/#use-with-ide @@ -130,7 +130,6 @@ venv.bak/ *.pyc **/*.pyc - # Spyder project settings .spyderproject .spyproject @@ -160,7 +159,22 @@ cython_debug/ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ +.idea/ # Ruff .ruff_cache* + +# Docker stuff +.docker/data/* +.docker/backups/* +.docker/log/* + +# Misc +tmp/ +learning/ +learn-kafka/ +learn-sqlserver/ +database-replication/ + +database-replication.code-workspace +restore.sql diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..a00be03 --- /dev/null +++ b/Makefile @@ -0,0 +1,87 @@ +include .env + +# ============ Docker compose ============ +build: + docker compose build + +up: + docker compose up + +up-d: + docker compose up -d + +up-build: + docker compose up --build + +up-build-d: + docker compose up --build -d + +down: + docker compose down + +restart: down up + +restart-d: down up-d + +restart-build-d: down up-build-d + +sleep: + sleep 20 + +# ============ Build images ============ +build-upstream-app: + docker build -t upstream-app:latest -f .docker/images/app/Dockerfile . + +build-kafka-connect: + docker build -t kafka-connect:latest -f .docker/images/kafka-connect/Dockerfile . + +# ============ Testing, formatting, type checks, link checks ============ +app-requirements: + if [ -e "app/requirements.txt" ]; then rm app/requirements.txt; fi && \ + pip freeze > app/requirements.txt + +db-docs: + dbdocs build docs/wideworldimporters.dbml + +diagram: + if [ -e "docs/images/design_architecture.png" ]; then rm docs/images/design_architecture.png; fi && \ + python docs/diagram.py && \ + mv design_architecture.png docs/images/ + +format: + docker compose exec upstream-app python -m black -S --line-length 88 --preview /app/app + +lint: + docker compose exec upstream-app python -m ruff check --fix /app/app + +test: + docker compose exec upstream-app python -m pytest --log-cli-level info -p no:warnings -v /app/tests + +cov: + docker compose exec upstream-app python -m pytest --log-cli-level info -p no:warnings --cov -v /app/tests + +ci: db-docs diagram app-requirements cov format lint + +# ============ Postgres + MSSQL ============ +to-psql-default: + @docker compose exec -it source_db psql postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/postgres + +to-psql: + @docker compose exec -it source_db psql postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} + +to-mssql: + @docker compose exec -it sink_db bash -c '/opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P "${MSSQL_SA_PASSWORD}"' + +# ============ Kafka ============ +check-kafka: + docker run -it --rm --network database-replication_kafka_networks bitnami/kafka:3.5 \ + kafka-topics.sh --list --bootstrap-server kafka-server:9092 + +check-kafka-connect: + curl -s -X GET http://localhost:8083 | jq + +show-connector-plugins: + curl -s -X GET http://localhost:8083/connector-plugins | jq + +show-connectors: + curl -s -X GET http://localhost:8083/connectors | jq diff --git a/README.md b/README.md index c619d0b..e1b616d 100644 --- a/README.md +++ b/README.md @@ -1 +1,224 @@ -# Chinh-Dinh-training \ No newline at end of file +# Project: Database Replication + +[![Testing Continuous Integration](https://github.com/lelouvincx/Chinh-Dinh-training/actions/workflows/continuous-integration.yml/badge.svg)](https://github.com/lelouvincx/Chinh-Dinh-training/actions/workflows/continuous-integration.yml) + +- [Project: Database Replication](#project-database-replication) + - [Tasks](#tasks) + - [Local development guide](#local-development-guide) + - [Prequisites](#prequisites) + - [Install codebase](#install-codebase) + - [Restore the database](#restore-the-database) + - [Design architecture](#design-architecture) + - [Database catalog](#database-catalog) + - [Schema](#schema) + - [Data schemas](#data-schemas) + - [Secure-access schemas](#secure-access-schemas) + - [Development schemas](#development-schemas) + - [Tables](#tables) + - [Application schema](#application-schema) + - [Purchasing schema](#purchasing-schema) + - [Sales schema](#sales-schema) + - [Warehouse schema](#warehouse-schema) + - [Troubleshoot](#troubleshoot) + - [References](#references) + +## Tasks + +Visit here: https://github.com/users/lelouvincx/projects/5/ + +- [x] Design architecture +- [x] Initialize Postgres with dataset + - [x] Database catalog +- [x] Build Flask application to fake generate data in realtime +- [x] Build Kafka Cluster with 3 nodes +- [x] Unittest for `Fake Data Generation` app + - [x] TDD (Test Driven Development) +- [ ] Kafka Connect + - [x] Debezium CDC Postgres connector + - [ ] JDBC Connector (for SQL Server) +- [ ] CI for Pull Requests + +## Local development guide + +### Prequisites +- Python version >= 3.9 (3.11 recommended) +- Docker with docker compose (at least 4 core and 4gb of RAM) + +### Install codebase +1. Clone the repository & go to the project location (/database-replication) +2. Install python dependencies +```bash +python -m venv .venv +source .venv/bin/activate +pip install -r app/requirements.txt +``` +3. Build docker images +```bash +docker build -t data-generator:localdev -f .docker/build/app/Dockerfile . +``` +4. Create necessary volumes +```bash +mkdir -p .docker/backups/postgres +mkdir -p .docker/data/postgres + +mkdir -p .docker/data/zookeeper +sudo chown -R 1001:1001 .docker/data/zookeeper + +mkdir -p .docker/data/kafka +sudo chown -R 1001:1001 .docker/data/kafka +``` +5. Start docker services +``` +make up +``` +6. Visit [Makefile](./Makefile) to short-binding commands + +### Restore the database + +1. Download dump file at https://github.com/Azure/azure-postgresql/blob/master/samples/databases/wide-world-importers/wide_world_importers_pg.dump +2. Place it into ./.docker/backups/postgres +3. Spawn up the postgres container, notice that there's 5 users: admin, azure_pg_admin, azure_superuser, greglow, data_engineer +4. Shell to postgres + +```bash +docker compose exec -it source_db /bin/bash +``` + +5. Restore (inside postgres container) + +```bash +pg_restore -h localhost -p 5432 -U admin -W -v -Fc -d wideworldimporters < /backups/wide_world_importers_pg.dump +``` + +Then enter admin's password and take a coffee. + +## Design architecture + +**Prequisites:** It uses [Graphviz](https://www.graphviz.org/) to render the diagram, so you need to [install Graphviz](https://graphviz.gitlab.io/download/) to use diagrams. After installing graphviz (or already have it), install the diagrams python package. + +For design architecture configurations, visit [diagram.py](./docs/diagram.py). + +![](./docs/images/design_architecture.png) + +## Database catalog + +Visit: https://dbdocs.io/lelouvincx/WideWorldImporters + +### Schema + +WideWorldImporters uses schemas for different purposes, such as storing data, defining how users can access the data, and providing objects for data warehouse development and integration. + +#### Data schemas + +These schemas contain the data. Many tables are needed by all other schemas and are located in the Application schema. + +| Schema | Description | +| ----------- | --------------------------------------------------------------------------------------------------------------------------------------- | +| Application | Application-wide users, contacts, and parameters. This schema also contains reference tables with data that is used by multiple schemas | +| Purchasing | Stock item purchases from suppliers and details about suppliers. | +| Sales | Stock item sales to retail customers, and details about customers and sales people. | +| Warehouse | Stock item inventory and transactions. | + +#### Secure-access schemas + +These schemas are used for external applications that are not allowed to access the data tables directly. They contain views and stored procedures used by external applications. + +| Schema | Description | +| ------- | ---------------------------------------------------------------------------------------------------------- | +| Website | All access to the database from the company website is through this schema. | +| Reports | All access to the database from Reporting Services reports is through this schema. | +| PowerBI | All access to the database from the Power BI dashboards via the Enterprise Gateway is through this schema. | + +The Reports and PowerBI schemas are not used in the initial release of the sample database. However, all Reporting Services and Power BI samples built on top of this database are encouraged to use these schemas. + +#### Development schemas + +Special-purpose schemas + +| Schema | Description | +| ----------- | ---------------------------------------------------------------------------------------------------------------------------------- | +| Integration | Objects and procedures required for data warehouse integration (that is, migrating the data to the WideWorldImportersDW database). | +| Sequences | Holds sequences used by all tables in the application. | + +### Tables + +All tables in the database are in the data schemas. + +#### Application schema + +Details of parameters and people (users and contacts), along with common reference tables (common to multiple other schemas). + +| Table | Description | +| ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| SystemParameters | Contains system-wide configurable parameters. | +| People | Contains user names, contact information, for all who use the application, and for the people that the Wide World Importers deals with at customer organizations. This table includes staff, customers, suppliers, and any other contacts. For people who have been granted permission to use the system or website, the information includes login details. | +| Application.cities | There are many addresses stored in the system, for people, customer organization delivery addresses, pickup addresses at suppliers, etc. Whenever an address is stored, there is a reference to a city in this table. There is also a spatial location for each city. | +| StateProvinces | Application.cities are part of states or provinces. This table has details of those, including spatial data describing the boundaries each state or province. | +| countries | States or Provinces are part of countries/regions. This table has details of those, including spatial data describing the boundaries of each country/region. | +| DeliveryMethods | Choices for delivering stock items (for example, truck/van, post, pickup, courier, etc.) | +| PaymentMethods | Choices for making payments (for example, cash, check, EFT, etc.) | +| TransactionTypes | Types of customer, supplier, or stock transactions (for example, invoice, credit note, etc.) | + +#### Purchasing schema + +Details of suppliers and of stock item purchases. + +| Table | Description | +| -------------------- | ---------------------------------------------------------------------------------- | +| Suppliers | Main entity table for suppliers (organizations) | +| SupplierCategories | Categories for suppliers (for example, novelties, toys, clothing, packaging, etc.) | +| SupplierTransactions | All financial transactions that are supplier-related (invoices, payments) | +| PurchaseOrders | Details of supplier purchase orders | +| PurchaseOrderLines | Detail lines from supplier purchase orders | + +#### Sales schema + +Details of customers, salespeople, and of stock item sales. + +| Table | Description | +| -------------------- | ---------------------------------------------------------------------------------------- | +| Customers | Main entity tables for customers (organizations or individuals) | +| CustomerCategories | Categories for customers (for example, novelty stores, supermarkets, etc.) | +| BuyingGroups | Customer organizations can be part of groups that exert greater buying power | +| CustomerTransactions | All financial transactions that are customer-related (invoices, payments) | +| SpecialDeals | Special pricing. This can include fixed prices, discount in dollars or discount percent. | +| Orders | Detail of customer orders | +| OrderLines | Detail lines from customer orders | +| Invoices | Details of customer invoices | +| InvoiceLines | Detail lines from customer invoices | + +#### Warehouse schema + +Details of stock items, their holdings and transactions. + +| Table | Description | +| --------------------- | ------------------------------------------------------------------------------------------ | +| StockItems | Main entity table for stock items | +| StockItemHoldings | Non-temporal columns for stock items. These are frequently updated columns. | +| StockGroups | Groups for categorizing stock items (for example, novelties, toys, edible novelties, etc.) | +| StockItemStockGroups | Which stock items are in which stock groups (many to many) | +| Colors | Stock items can (optionally) have colors | +| PackageTypes | Ways that stock items can be packaged (for example, box, carton, pallet, kg, etc. | +| StockItemTransactions | Transactions covering all movements of all stock items (receipt, sale, write-off) | +| VehicleTemperatures | Regularly recorded temperatures of vehicle chillers | +| ColdRoomTemperatures | Regularly recorded temperatures of cold room chillers | + +## Troubleshoot + +- Cannot create directory '/bitnami/...' => The user's id and group id of zookeeper/kafka/kafka-connect is `1001:1000`. You should chown to `1001:1001`. [Refer here.](https://github.com/bitnami/containers/issues/41422#issuecomment-1674497129) +```bash +sudo chown -R 1001:1001 .docker/data/kafka* +sudo chown -R 1001:1001 .docker/data/zookeeper +sudo chown -R 1001:1001 .docker/log/kafka* +``` + +## References +- [Very first article to read](https://debezium.io/blog/2017/09/25/streaming-to-another-database/) +- [Kafka Cluster with Zookeeper (3 broker - 1 zoo) as containers](https://www.reddit.com/r/apachekafka/comments/tsr9dx/docker_compose_file_for_kafka_and_kafka_connect/) +- [Bitnami's Kafka image](https://github.com/bitnami/containers/blob/main/bitnami/kafka/README.md) +- [Kafka Concepts (Viet version)](https://viblo.asia/p/010-apache-kafka-connect-concept-gAm5ymNL5db) +- [Kafka Connect's REST API](https://docs.confluent.io/platform/current/connect/references/restapi.html) +- [Mini version of this project](https://medium.com/@parasharprasoon.950/how-to-set-up-cdc-with-kafka-debezium-and-postgres-70a907b8ca20) +- [PostgreSQL Debezium properties for Kafka Connect](https://debezium.io/documentation/reference/1.1/connectors/postgresql.html#postgresql-connector-properties) +- [Unsupported features of SQL Server on Linux (container)](https://learn.microsoft.com/en-us/sql/linux/sql-server-linux-editions-and-components-2019?view=sql-server-ver16#Unsupported) +- [Source - Sink configuration sample](https://medium.com/@shiva.prathipati/real-time-database-replication-using-kafka-d9e7a592e476) diff --git a/app/app/.streamlit/config.toml b/app/app/.streamlit/config.toml new file mode 100644 index 0000000..0830549 --- /dev/null +++ b/app/app/.streamlit/config.toml @@ -0,0 +1,2 @@ +[server] +runOnSave = true diff --git a/database-replication/app/__init__.py b/app/app/__init__.py similarity index 100% rename from database-replication/app/__init__.py rename to app/app/__init__.py diff --git a/app/app/experiment.ipynb b/app/app/experiment.ipynb new file mode 100644 index 0000000..fd16e1a --- /dev/null +++ b/app/app/experiment.ipynb @@ -0,0 +1,167 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from sqlalchemy import text\n", + "from psql_connector import PsqlConnector\n", + "\n", + "from os import environ as env\n", + "from dotenv import load_dotenv\n", + "load_dotenv(dotenv_path='../.env')" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# Init psql connector\n", + "psql_params = {\n", + " \"host\": \"localhost\",\n", + " \"port\": env[\"POSTGRES_PORT\"],\n", + " \"user\": env[\"POSTGRES_USER\"],\n", + " \"password\": env[\"POSTGRES_PASSWORD\"],\n", + " \"database\": env[\"POSTGRES_DB\"],\n", + "}\n", + "\n", + "# Setup psql connector\n", + "psql_connector = PsqlConnector(psql_params)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['application', 'purchasing', 'sales', 'warehouse', 'public']\n" + ] + } + ], + "source": [ + "# Fetch schemas\n", + "schemas = []\n", + "with psql_connector.connect() as engine:\n", + " with engine.connect() as cursor:\n", + " sql_script = \"\"\"\n", + " SELECT schema_name\n", + " FROM information_schema.schemata;\n", + " \"\"\"\n", + " schemas = cursor.execute(text(sql_script)).fetchall()\n", + "\n", + " # Remove system schemas\n", + " schemas = [\n", + " schema[0]\n", + " for schema in schemas\n", + " if schema[0]\n", + " not in [ \"pg_toast\", \"pg_temp_1\", \"pg_toast_temp_1\", \"pg_catalog\", \"information_schema\", ]\n", + " ]\n", + "\n", + " # Remove schemas: data_load_simulation, integration, power_bi, reports, sequences\n", + " schemas = [\n", + " schema for schema in schemas if schema not in [ \"data_load_simulation\", \"integration\", \"power_bi\", \"reports\", \"sequences\", \"website\" ]\n", + " ]\n", + "\n", + "print(schemas)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'int8', 'numeric', 'bool', 'varchar', 'date', 'int4', 'text', 'timestamp', 'bytea', 'bpchar'}\n" + ] + } + ], + "source": [ + "# Fetch attributes from schemas\n", + "attributes = set()\n", + "\n", + "with psql_connector.connect() as engine:\n", + " with engine.connect() as cursor:\n", + " for schema in schemas:\n", + " sql_script = f\"\"\"\n", + " WITH rows AS (\n", + "\t SELECT c.relname AS table_name,\n", + "\t\t\t a.attname AS attribute_name,\n", + "\t\t\t a.attnotnull AS is_attribute_null,\n", + "\t\t\t a.attnum AS attribute_num,\n", + "\t\t\t t.typname AS type_name\n", + "\t FROM pg_catalog.pg_class c\n", + "\t\t JOIN pg_catalog.pg_attribute a\n", + "\t\t ON c.\"oid\" = a.attrelid AND a.attnum >= 0\n", + "\t\t JOIN pg_catalog.pg_type t\n", + "\t\t ON t.\"oid\" = a.atttypid\n", + "\t\t JOIN pg_catalog.pg_namespace n\n", + "\t\t ON c.relnamespace = n.\"oid\"\n", + "\t WHERE n.nspname = '{schema}'\n", + "\t\t AND c.relkind = 'r'\n", + " ),\n", + " agg AS (\n", + "\t SELECT rows.table_name, json_agg(rows ORDER BY attribute_num) AS attrs\n", + "\t FROM rows\n", + "\t GROUP BY rows.table_name\n", + " )\n", + " SELECT json_object_agg(agg.table_name, agg.attrs)\n", + " FROM agg;\n", + " \"\"\"\n", + " fetch_result = cursor.execute(text(sql_script)).fetchone()[0]\n", + " # Loop through all keys in fetch_result\n", + " for key in fetch_result.keys():\n", + " table = fetch_result.get(key)\n", + " for attrs in table:\n", + " # Add attrs.get(\"type_name\") into attributes\n", + " if attrs.get(\"type_name\") not in attributes:\n", + " attributes.add(attrs.get(\"type_name\"))\n", + "\n", + "print(attributes)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/database-replication/app/gen_data.py b/app/app/gen_data.py similarity index 83% rename from database-replication/app/gen_data.py rename to app/app/gen_data.py index 2db021a..f7ccd85 100644 --- a/database-replication/app/gen_data.py +++ b/app/app/gen_data.py @@ -1,6 +1,12 @@ from sqlalchemy import text from faker import Faker -from psql_connector import PsqlConnector + +try: + # Try importing for unit testing + from app.psql_connector import PsqlConnector +except ImportError: + # Try importing for upstream app + from psql_connector import PsqlConnector import os import logging @@ -12,7 +18,11 @@ logger = logging.getLogger(__name__) # File handler -f_handler = logging.FileHandler(os.path.dirname(__file__) + f"/logs/{__name__}.log") +log_dir = os.path.dirname(__file__) + "/logs" +if not os.path.exists(log_dir): + os.mkdir(log_dir) + +f_handler = logging.FileHandler(log_dir + f"/{__name__}.log") f_handler.setLevel(logging.INFO) f_format = logging.Formatter("[ %(asctime)s - %(levelname)s - %(name)s ] %(message)s") f_handler.setFormatter(f_format) @@ -55,12 +65,15 @@ def get_schema(self) -> str: def get_name(self) -> str: return self._name + def get_attributes(self) -> list: + return self._attributes + # Setters def set_attributes(self, attributes: list) -> None: self._attributes = attributes # Methods - def update_attributes(self, connector: PsqlConnector) -> None: + def update_attributes(self, connector: PsqlConnector) -> bool: with connector.connect() as engine: with engine.connect() as cursor: sql_script = f""" @@ -89,20 +102,24 @@ def update_attributes(self, connector: PsqlConnector) -> None: SELECT json_object_agg(agg.table_name, agg.attrs) FROM agg; """ - logger.info(f"Fetching table {self._schema}.{self._name} from database") + logger.info(f"Fetching attributes of table {self._schema}.{self._name}") logger.debug(f"With query {sql_script}") fetch_result = cursor.execute(text(sql_script)).fetchone() or [] # Current type: sqlalchemy.engine.row.Row fetch_result = fetch_result[0] or {} # Current type: dict + logger.debug(f"fetch_result: {fetch_result}") new_attributes = fetch_result.get(self.get_name()) or [] + logger.debug(f"new_attributes: {new_attributes}") if new_attributes == self._attributes: logger.info("There's nothing to change") + return False else: self.set_attributes(new_attributes) logger.info("Table attributes are updated") + return True def gen_public_test(connector: PsqlConnector, num_records: int = 1) -> None: @@ -119,9 +136,3 @@ def gen_public_test(connector: PsqlConnector, num_records: int = 1) -> None: cursor.execute(text(sql_script)) cursor.commit() - - -if __name__ == "__main__": - psql_connector = PsqlConnector(psql_params) - public_test = Table(schema="public", name="test") - result = public_test.update_attributes(psql_connector) diff --git a/database-replication/app/psql_connector.py b/app/app/psql_connector.py similarity index 74% rename from database-replication/app/psql_connector.py rename to app/app/psql_connector.py index aef69b7..184c550 100644 --- a/database-replication/app/psql_connector.py +++ b/app/app/psql_connector.py @@ -1,16 +1,20 @@ -from sqlalchemy import create_engine -from contextlib import contextmanager - import os import logging +from contextlib import contextmanager +from sqlalchemy import create_engine +from psycopg2 import OperationalError # Init logging +log_dir = os.path.dirname(__file__) + "/logs" +if not os.path.exists(log_dir): + os.mkdir(log_dir) + logging.basicConfig( level=logging.INFO, format="[ %(name)s - %(asctime)s %(levelname)s ] %(message)s", handlers=[ - logging.FileHandler(os.path.dirname(__file__) + f"/logs/{__name__}.log"), + logging.FileHandler(log_dir + f"/{__name__}.log"), logging.StreamHandler(), ], ) @@ -33,5 +37,7 @@ def connect(self): db_conn = create_engine(conn_info) try: yield db_conn - except Exception as e: + except OperationalError as e: logging.exception(f"Error when connecting to Postgres: {e}") + finally: + db_conn.dispose() diff --git a/database-replication/app/ui.py b/app/app/streamlit_app.py similarity index 97% rename from database-replication/app/ui.py rename to app/app/streamlit_app.py index c81c671..44e3114 100644 --- a/database-replication/app/ui.py +++ b/app/app/streamlit_app.py @@ -11,13 +11,13 @@ import logging import time -load_dotenv(dotenv_path=".env") +load_dotenv(dotenv_path="../../.env") # Init logging logging.basicConfig( level=logging.NOTSET, format="[ %(name)s - %(asctime)s %(levelname)s ] %(message)s", - handlers=[logging.FileHandler("./logs/streamlit.log"), logging.StreamHandler()], + handlers=[logging.FileHandler("./app/logs/streamlit.log"), logging.StreamHandler()], ) # Init psql connector diff --git a/database-replication/pyproject.toml b/app/pyproject.toml similarity index 88% rename from database-replication/pyproject.toml rename to app/pyproject.toml index 4803780..ee9f51a 100644 --- a/database-replication/pyproject.toml +++ b/app/pyproject.toml @@ -12,11 +12,14 @@ confluent-kafka = "^2.2.0" ruff = "^0.0.287" black = "^23.9.1" pytest = "^7.4.2" +pytest-dependency = "^0.5.1" +pytest-ordering = "^0.6" +pytest-cov = "^4.1.0" [tool.ruff] # Enable rule pycodestyle select = ["E"] -ignore = ["E501", "E101"] +ignore = ["E501", "E101", "E402"] # Allow autofix for all enabled rules (when `--fix`) is provided. fixable = ["ALL"] @@ -24,7 +27,7 @@ unfixable = [] # Maximum line length is same as black line-length = 88 -src = ["app"] +src = ["app/app"] # Exclude a variety of commonly ignored directories. exclude = [ diff --git a/database-replication/app/requirements.txt b/app/requirements.txt similarity index 92% rename from database-replication/app/requirements.txt rename to app/requirements.txt index ba64d2d..30a1dd1 100644 --- a/database-replication/app/requirements.txt +++ b/app/requirements.txt @@ -19,15 +19,18 @@ charset-normalizer==3.2.0 click==8.1.7 comm==0.1.4 confluent-kafka==2.2.0 +coverage==7.3.1 debugpy==1.8.0 decorator==5.1.1 defusedxml==0.7.1 +diagrams==0.23.3 executing==1.2.0 Faker==19.6.0 fastjsonschema==2.18.0 fqdn==1.5.1 gitdb==4.0.10 GitPython==3.1.35 +graphviz==0.20.1 greenlet==2.0.2 idna==3.4 importlib-metadata==6.8.0 @@ -37,6 +40,7 @@ ipython==8.15.0 isoduration==20.11.0 jedi==0.19.0 Jinja2==3.1.2 +jmespath==1.0.1 json5==0.9.14 jsonpointer==2.4 jsonschema==4.19.0 @@ -87,6 +91,9 @@ pydeck==0.8.0 Pygments==2.16.1 Pympler==1.0.1 pytest==7.4.2 +pytest-cov==4.1.0 +pytest-dependency==0.5.1 +pytest-ordering==0.6 python-dateutil==2.8.2 python-dotenv==1.0.0 python-json-logger==2.0.7 @@ -101,6 +108,7 @@ rfc3986-validator==0.1.1 rich==13.5.2 rpds-py==0.10.2 ruff==0.0.287 +s3transfer==0.7.0 Send2Trash==1.8.2 six==1.16.0 smmap==5.0.0 @@ -116,11 +124,12 @@ toml==0.10.2 toolz==0.12.0 tornado==6.3.3 traitlets==5.10.0 +typed-ast==1.5.5 typing_extensions==4.7.1 tzdata==2023.3 tzlocal==4.3.1 uri-template==1.3.0 -urllib3==2.0.4 +urllib3==1.26.17 validators==0.22.0 watchdog==3.0.0 wcwidth==0.2.6 diff --git a/app/tests/conftest.py b/app/tests/conftest.py new file mode 100644 index 0000000..9263f6c --- /dev/null +++ b/app/tests/conftest.py @@ -0,0 +1,22 @@ +from os.path import dirname, abspath +import sys + +parent_dir = dirname(dirname(abspath(__file__))) +sys.path.append(parent_dir) + +from dotenv import load_dotenv +from os import environ as env + +try: + load_dotenv() +except Exception as e: + pass + + +psql_params = { + "host": env["POSTGRES_HOST"], + "port": env["POSTGRES_PORT"], + "user": env["POSTGRES_USER"], + "password": env["POSTGRES_PASSWORD"], + "database": env["POSTGRES_DB"], +} diff --git a/app/tests/docker-compose.yml b/app/tests/docker-compose.yml new file mode 100644 index 0000000..66ba50d --- /dev/null +++ b/app/tests/docker-compose.yml @@ -0,0 +1,33 @@ +version: "3.4" + +services: + source_db: + image: postgres:14-alpine + container_name: "source_db" + ports: + - "5432:5432" + volumes: + - .docker/postgres-add-de-user.sh:/docker-entrypoint-initdb.d/postgres-add-de-user.sh + environment: + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_DB=${POSTGRES_DB} + - PGDATA=/var/lib/postgresql/pgdata + restart: unless-stopped + + upstream-app: + image: ghcr.io/lelouvincx/upstream-app:${DOCKER_METADATA_OUTPUT_VERSION} + container_name: "upstream-app" + ports: + - "8501:8501" + volumes: + - ./app:/app + environment: + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_HOST=source_db + - POSTGRES_PORT=${POSTGRES_PORT} + - POSTGRES_DB=${POSTGRES_DB} + restart: on-failure + depends_on: + - source_db diff --git a/app/tests/test_gen_data.py b/app/tests/test_gen_data.py new file mode 100644 index 0000000..70d6608 --- /dev/null +++ b/app/tests/test_gen_data.py @@ -0,0 +1,66 @@ +from app.psql_connector import PsqlConnector +from app.gen_data import Table +from conftest import psql_params +from sqlalchemy import text +import pytest + + +class TestTable: + @pytest.mark.first + @pytest.mark.dependency(name="TEST_CONNECTING") + def test_connecting(self): + psql_connector = PsqlConnector(psql_params) + is_connected = False + with psql_connector.connect() as engine: + with engine.connect() as cursor: + is_connected = True + cursor.commit() + assert is_connected is True, "Not connected to database." + + @pytest.mark.dependency(depends=["TEST_CONNECTING"]) + def test_update_attributes(self): + psql_connector = PsqlConnector(psql_params) + + with psql_connector.connect() as engine: + with engine.connect() as cursor: + # Create temp_table + sql_script = text( + """ + CREATE TABLE temp_table ( + id SERIAL PRIMARY KEY, + name VARCHAR(50), + age INT + ); + """ + ) + cursor.execute(sql_script) + cursor.commit() + + # Find schema of temp_table + sql_script = text( + """ + SELECT schemaname + FROM pg_tables + WHERE tablename = 'temp_table'; + """ + ) + temp_table_schema = cursor.execute(sql_script).fetchone() or [] + temp_table_schema = temp_table_schema[0] + + # Check if attributes are updated + temp_table = Table(schema=temp_table_schema, name="temp_table") + is_changed = temp_table.update_attributes(psql_connector) + + # Clean created table + with psql_connector.connect() as engine: + with engine.connect() as cursor: + sql_script = text(" DROP TABLE temp_table; ") + cursor.execute(sql_script) + cursor.commit() + + assert is_changed is True, "Attributes not changed." + + @pytest.mark.skip(reason="Not implemented due to WIP") + @pytest.mark.dependency(depends=["TEST_CONNECTING"]) + def test_generate(self): + pass diff --git a/app/tests/test_psql_connector.py b/app/tests/test_psql_connector.py new file mode 100644 index 0000000..0434ac7 --- /dev/null +++ b/app/tests/test_psql_connector.py @@ -0,0 +1,32 @@ +from app.psql_connector import PsqlConnector +from conftest import psql_params +from sqlalchemy import text +import pytest + + +class TestPsqlConnector: + @pytest.mark.first + @pytest.mark.dependency(name="TEST_CONNECTING") + def test_connecting(self): + psql_connector = PsqlConnector(psql_params) + is_connected = False + with psql_connector.connect() as engine: + with engine.connect() as cursor: + is_connected = True + cursor.commit() + assert is_connected is True, "Not connected to database." + + @pytest.mark.dependency(depends=["TEST_CONNECTING"]) + def test_getting_data(self): + psql_connector = PsqlConnector(psql_params) + with psql_connector.connect() as engine: + with engine.connect() as cursor: + sql_script = "SELECT 1;" + fetched_data = 0 + try: + fetched_data = cursor.execute(text(sql_script)).fetchone() or [] + fetched_data = fetched_data[0] or int + except Exception as e: + print(f"Error when retrieving results from database: {e}") + assert False, "Error when retrieving results." + assert fetched_data == 1 diff --git a/connectors/sink-db.json b/connectors/sink-db.json new file mode 100644 index 0000000..9288a4a --- /dev/null +++ b/connectors/sink-db.json @@ -0,0 +1,24 @@ +{ + "name": "sink-public-test-v3", + "config": { + "connector.class": "io.confluent.connect.jdbc.JdbcSinkConnector", + "tasks.max": 2, + "max.poll.records": 4000, + "batch.size": 4000, + "topics": "public.test", + "pk.fields": "id", + "pk.mode": "record_key", + "insert.mode": "upsert", + "delete.enabled": true, + "auto.create": true, + "consumer.fetch.min.bytes": 1000000, + "consumer.fetch.wait.max.ms": 1000, + "transforms": "unwrap,route", + "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState", + "transforms.unwrap.drop.tombstones": true, + "transforms.unwrap.delete.handling.mode": "rewrite", + "transforms.route.type": "org.apache.kafka.connect.transforms.RegexRouter", + "transforms.route.regex": "([^.]+)\\.([^.]+)", + "transforms.route.replacement": "$2" + } +} diff --git a/connectors/sink-db.py b/connectors/sink-db.py new file mode 100644 index 0000000..cbb7001 --- /dev/null +++ b/connectors/sink-db.py @@ -0,0 +1,189 @@ +import requests +import json +import logging +import argparse +from dotenv import load_dotenv +from os import environ as env + + +load_dotenv() + +# Logging to console +logging.basicConfig( + level=logging.INFO, format="[ %(asctime)s - %(levelname)s ] %(message)s" +) + + +def get_name(filename: str) -> str: + with open(f"{filename}", "r") as json_file: + json_tmp = json.load(json_file) + name = json_tmp.get("name", "") + + if name == "": + logging.error("Connector name not found. Please enter it in the config file.") + raise Exception + + return name + + +def get_config(filename: str) -> dict: + with open(f"{filename}", "r") as json_file: + json_tmp = json.load(json_file) + config = json_tmp.get("config") + + if config is None: + logging.error("No config found. Please enter it in the config file.") + raise Exception + + if config.get("io.confluent.connect.jdbc.JdbcSinkConnector"): + logging.warn( + "The connector is not from io.confluent.connect.jdbc.JdbcSinkConnector, some functionalities may not work" + ) + + config["connection.url"] = f"jdbc:sqlserver://sink_db;database={env['MSSQL_DB']};" + config["connection.user"] = "SA" + config["connection.password"] = env["MSSQL_SA_PASSWORD"] + config["table.name.format"] = f"{env['MSSQL_DB']}" + ".dbo.${topic}" + + return config + + +def get_url(action: str, name: str) -> str: + kafka_connect_host = env["CONNECT_REST_ADVERTISED_HOST_NAME"] + kafka_connect_port = env["CONNECT_REST_PORT"] + + url = f"http://{kafka_connect_host}:{kafka_connect_port}/connectors" + options = { + "create": "", + "update": f"/{name}/config", + "restart": f"/{name}/restart", + "show": f"/{name}", + "list": f"/{name}/topics", + "delete": f"/{name}", + } + url += options.get(action, "") + + return url + + +def make_request(action: str, filename: str) -> str: + # Prepare url + name = get_name(filename) + url = get_url(action, name) + logging.info(f"Endpoint: {url}") + + # Prepare headers + headers = {"Content-Type": "application/json", "Accept": "application/json"} + + if action == "create" or action == "update": + # Prepare json_data + config = get_config(filename) + + json_data = ( + json.dumps({"name": name, "config": config}) + if action == "create" + else json.dumps(config) # action == "update" + ) + + # Make request + try: + response = ( + requests.post(url=url, data=json_data, headers=headers) + if action == "create" + else requests.put(url=url, data=json_data, headers=headers) + ) + response.raise_for_status() + + logging.info("Request was successful") + content = json.loads(response.content) + content["config"][ + "connection.password" + ] = "********" # WARN: Do not comment this line due to password security + content["status_code"] = response.status_code + formatted_json = json.dumps(content, indent=4) + return formatted_json + except requests.exceptions.HTTPError as e: + logging.exception(f"HTTP error occurred: {e}") + except requests.exceptions.RequestException as e: + logging.exception(f"POST or PUT request failed due to: {e}") + elif action == "restart": + # Make request + try: + response = requests.post(url=url, headers=headers) + response.raise_for_status() + + if response.status_code == 204: + logging.info("Restarted. Status code 204 No Content") + except requests.exceptions.HTTPError as e: + logging.exception(f"HTTP error occurred: {e}") + except requests.exceptions.RequestException as e: + logging.exception(f"POST request failed due to: {e}") + elif action == "show": + # Make request + try: + response = requests.get(url=url, headers=headers) + response.raise_for_status() + + logging.info("Request was successful") + content = json.loads(response.content) + content["config"][ + "connection.password" + ] = "********" # WARN: Do not comment this line due to password security + content["status_code"] = response.status_code + formatted_json = json.dumps(content, indent=4) + return formatted_json + except requests.exceptions.HTTPError as e: + logging.exception(f"HTTP error occurred: {e}") + except requests.exceptions.RequestException as e: + logging.exception(f"GET request failed due to: {e}") + elif action == "list": + # Make request + try: + response = requests.get(url=url, headers=headers) + response.raise_for_status() + + logging.info("Request was successful") + content = json.loads(response.content) + content["status_code"] = response.status_code + formatted_json = json.dumps(content, indent=4) + return formatted_json + except requests.exceptions.HTTPError as e: + logging.exception(f"HTTP error occurred: {e}") + except requests.exceptions.RequestException as e: + logging.exception(f"GET request failed due to: {e}") + else: # action = "delete" + # Make request + try: + response = requests.delete(url=url, headers=headers) + response.raise_for_status() + + if response.status_code == 204: + logging.info("Restarted. Status code 204 No Content") + except requests.exceptions.HTTPError as e: + logging.exception(f"HTTP error occurred: {e}") + except requests.exceptions.RequestException as e: + logging.exception(f"DELETE request failed due to: {e}") + + return "" + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "-a", + "--action", + choices=["create", "update", "restart", "show", "list", "delete"], + default="update", + help="Choose one: create/update/delete/restart/show/list", + ) + parser.add_argument( + "-c", + "--config", + type=str, + required=True, + help="Provide path to config file. Example: connectors/sink-db.json", + ) + args = parser.parse_args() + + result = make_request(action=args.action, filename=args.config) + logging.info(f"Content \n{result}") diff --git a/connectors/source-db.json b/connectors/source-db.json new file mode 100644 index 0000000..757cb55 --- /dev/null +++ b/connectors/source-db.json @@ -0,0 +1,21 @@ +{ + "name": "source-public-test-v1", + "config": { + "connector.class": "io.debezium.connector.postgresql.PostgresConnector", + "plugin.name": "pgoutput", + "database.history.kafka.bootstrap.servers": "kafka-server:9092", + "database.history.kafka.topic": "dbhistory.public", + "topic.prefix": "source_db", + "topic.creation.default.replication.factor": "3", + "topic.creation.default.partitions": "1", + "include.schema.changes": "true", + "schema.include.list": "public", + "table.include.list": "public.test", + "slot.name": "dbz_public_test", + "slot.drop_on_stop": "true", + "transforms": "route", + "transforms.route.type": "org.apache.kafka.connect.transforms.RegexRouter", + "transforms.route.regex": "([^.]+)\\.([^.]+)\\.([^.]+)", + "transforms.route.replacement": "$2.$3" + } +} diff --git a/connectors/source-db.py b/connectors/source-db.py new file mode 100644 index 0000000..28453b9 --- /dev/null +++ b/connectors/source-db.py @@ -0,0 +1,190 @@ +import requests +import json +import logging +import argparse +from dotenv import load_dotenv +from os import environ as env + + +load_dotenv() + +# Logging to console +logging.basicConfig( + level=logging.INFO, format="[ %(asctime)s - %(levelname)s ] %(message)s" +) + + +def get_name(filename: str) -> str: + with open(f"{filename}", "r") as json_file: + json_tmp = json.load(json_file) + name = json_tmp.get("name", "") + + if name == "": + logging.error("Connector name not found. Please enter it in the config file.") + raise Exception + + return name + + +def get_config(filename: str) -> dict: + with open(f"{filename}", "r") as json_file: + json_tmp = json.load(json_file) + config = json_tmp.get("config") + + if config is None: + logging.error("No config found. Please enter it in the config file.") + raise Exception + + if config.get("io.debezium.connector.postgresql.PostgresConnector"): + logging.warn( + "The connector is not from io.debezium.connector.postgresql.PostgresConnector, some functionalities may not work" + ) + + config["database.hostname"] = "source_db" + config["database.port"] = env["POSTGRES_PORT"] or "5432" + config["database.user"] = env["POSTGRES_USER"] + config["database.password"] = env["POSTGRES_PASSWORD"] + config["database.dbname"] = env["POSTGRES_DB"] + + return config + + +def get_url(action: str, name: str) -> str: + kafka_connect_host = env["CONNECT_REST_ADVERTISED_HOST_NAME"] + kafka_connect_port = env["CONNECT_REST_PORT"] + + url = f"http://{kafka_connect_host}:{kafka_connect_port}/connectors" + options = { + "create": "", + "update": f"/{name}/config", + "restart": f"/{name}/restart", + "show": f"/{name}", + "list": f"/{name}/topics", + "delete": f"/{name}", + } + url += options.get(action, "") + + return url + + +def make_request(action: str, filename: str) -> str: + # Prepare url + name = get_name(filename) + url = get_url(action, name) + logging.info(f"Endpoint: {url}") + + # Prepare headers + headers = {"Content-Type": "application/json", "Accept": "application/json"} + + if action == "create" or action == "update": + # Prepare json_data + config = get_config(filename) + + json_data = ( + json.dumps({"name": name, "config": config}) + if action == "create" + else json.dumps(config) # action == "update" + ) + + # Make request + try: + response = ( + requests.post(url=url, data=json_data, headers=headers) + if action == "create" + else requests.put(url=url, data=json_data, headers=headers) + ) + response.raise_for_status() + + logging.info("Request was successful") + content = json.loads(response.content) + content["config"][ + "database.password" + ] = "********" # WARN: Do not comment this line due to password security + content["status_code"] = response.status_code + formatted_json = json.dumps(content, indent=4) + return formatted_json + except requests.exceptions.HTTPError as e: + logging.exception(f"HTTP error occurred: {e}") + except requests.exceptions.RequestException as e: + logging.exception(f"POST or PUT request failed due to: {e}") + elif action == "restart": + # Make request + try: + response = requests.post(url=url, headers=headers) + response.raise_for_status() + + if response.status_code == 204: + logging.info("Restarted. Status code 204 No Content") + except requests.exceptions.HTTPError as e: + logging.exception(f"HTTP error occurred: {e}") + except requests.exceptions.RequestException as e: + logging.exception(f"POST request failed due to: {e}") + elif action == "show": + # Make request + try: + response = requests.get(url=url, headers=headers) + response.raise_for_status() + + logging.info("Request was successful") + content = json.loads(response.content) + content["config"][ + "database.password" + ] = "********" # WARN: Do not comment this line due to password security + content["status_code"] = response.status_code + formatted_json = json.dumps(content, indent=4) + return formatted_json + except requests.exceptions.HTTPError as e: + logging.exception(f"HTTP error occurred: {e}") + except requests.exceptions.RequestException as e: + logging.exception(f"GET request failed due to: {e}") + elif action == "list": + # Make request + try: + response = requests.get(url=url, headers=headers) + response.raise_for_status() + + logging.info("Request was successful") + content = json.loads(response.content) + content["status_code"] = response.status_code + formatted_json = json.dumps(content, indent=4) + return formatted_json + except requests.exceptions.HTTPError as e: + logging.exception(f"HTTP error occurred: {e}") + except requests.exceptions.RequestException as e: + logging.exception(f"GET request failed due to: {e}") + else: # action = "delete" + # Make request + try: + response = requests.delete(url=url, headers=headers) + response.raise_for_status() + + if response.status_code == 204: + logging.info("Restarted. Status code 204 No Content") + except requests.exceptions.HTTPError as e: + logging.exception(f"HTTP error occurred: {e}") + except requests.exceptions.RequestException as e: + logging.exception(f"DELETE request failed due to: {e}") + + return "" + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "-a", + "--action", + choices=["create", "update", "restart", "show", "list", "delete"], + default="update", + help="Choose one: create/update/delete/restart/show/list", + ) + parser.add_argument( + "-c", + "--config", + type=str, + required=True, + help="Provide path to config file. Example: connectors/source-db.json", + ) + args = parser.parse_args() + + result = make_request(action=args.action, filename=args.config) + logging.info(f"Content \n{result}") diff --git a/database-replication/.docker/images/app/Dockerfile b/database-replication/.docker/images/app/Dockerfile deleted file mode 100644 index 71b5491..0000000 --- a/database-replication/.docker/images/app/Dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -FROM python:3.11-slim - -WORKDIR /app - -RUN python3 -m venv .venv -RUN pip install pip --upgrade - -COPY app/requirements.txt /app/requirements.txt -RUN pip install --no-cache-dir -r /app/requirements.txt - -COPY app/ . - -CMD [ "streamlit", "run", "ui.py", "--server.address=0.0.0.0" ] diff --git a/database-replication/.gitignore b/database-replication/.gitignore deleted file mode 100644 index c762576..0000000 --- a/database-replication/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -.docker/data/* -.docker/backups/* -restore.log -database-replication.code-workspace diff --git a/database-replication/Makefile b/database-replication/Makefile deleted file mode 100644 index 3f4ea2e..0000000 --- a/database-replication/Makefile +++ /dev/null @@ -1,55 +0,0 @@ -include .env - -# ============ Docker compose ============ -build: - docker compose build - -up: - docker compose up - -up-d: - docker compose up -d - -up-build: - docker compose up --build - -up-build-d: - docker compose up --build -d - -down: - docker compose down - -restart: down up - -restart-d: down up-d - -restart-build-d: down up-build-d - -sleep: - sleep 20 - -# ============ Testing, formatting, type checks, link checks ============ -app-requirements: - rm app/requirements.txt && \ - pip freeze > app/requirements.txt - -docs: - dbdocs build docs/wideworldimporters.dbml - -format: - python -m black -S --line-length 88 --preview ./app - -lint: - python -m ruff check --fix ./app - -test: - python -m pytest --log-cli-level info -p no:warnings -v ./app - -ci: docs app-requirements format lint - -# ============ Postgres ============ -to-psql-default: - @docker compose exec -it source_db psql postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/postgres - -to-psql: - @docker compose exec -it source_db psql postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} diff --git a/database-replication/README.md b/database-replication/README.md deleted file mode 100644 index e14bc8e..0000000 --- a/database-replication/README.md +++ /dev/null @@ -1,200 +0,0 @@ -# Project: Database Replication - -- [Project: Database Replication](#project-database-replication) - - [Tasks](#tasks) - - [Local development guide](#local-development-guide) - - [Prequisites](#prequisites) - - [Install codebase](#install-codebase) - - [Restore the database](#restore-the-database) - - [Design architecture](#design-architecture) - - [Database catalog](#database-catalog) - - [Schema](#schema) - - [Data schemas](#data-schemas) - - [Secure-access schemas](#secure-access-schemas) - - [Development schemas](#development-schemas) - - [Tables](#tables) - - [Application schema](#application-schema) - - [Purchasing schema](#purchasing-schema) - - [Sales schema](#sales-schema) - - [Warehouse schema](#warehouse-schema) - -## Tasks - -Visit here: https://github.com/users/lelouvincx/projects/5/ - -- [x] Design architecture -- [x] Initialize Postgres with dataset - - [x] Database catalog -- [x] Build Flask application to fake generate data in realtime -- [ ] Build Kafka Cluster with 3 nodes -- [ ] Unittest for `Fake Data Generation` app - - [ ] TDD (Test Driven Development) - -## Local development guide - -### Prequisites -- Python version >= 3.9 (3.11 recommended) -- Docker with docker compose (at least 4 core and 4gb of RAM) - -### Install codebase -1. Clone the repository & go to the project location (/database-replication) -2. Install python dependencies -```bash -python -m venv .venv -source .venv/bin/activate -pip install -r app/requirements.txt -``` -3. Build docker images -```bash -docker build -t data-generator:localdev -f .docker/build/app/Dockerfile . -``` -4. Create necessary volumes -```bash -mkdir -p .docker/backups/postgres -mkdir -p .docker/data/postgres - -mkdir -p .docker/data/zookeeper -sudo chown -R 1001:1001 .docker/data/zookeeper - -mkdir -p .docker/data/kafka -sudo chown -R 1001:1001 .docker/data/kafka -``` -5. Start docker services -``` -make up -``` -6. Visit [Makefile](./Makefile) to short-binding commands - -### Restore the database - -1. Download dump file at https://github.com/Azure/azure-postgresql/blob/master/samples/databases/wide-world-importers/wide_world_importers_pg.dump -2. Place it into ./.docker/backups/postgres -3. Spawn up the postgres container, notice that there's 5 users: admin, azure_pg_admin, azure_superuser, greglow, data_engineer -4. Shell to postgres - -```bash -docker compose exec -it source_db /bin/bash -``` - -5. Restore (inside postgres container) - -```bash -pg_restore -h localhost -p 5432 -U admin -W -v -Fc -d wideworldimporters < /backups/wide_world_importers_pg.dump -``` - -Then enter admin's password and take a coffee. - -## Design architecture - -![](./docs/images/database-replication.drawio.png) - -## Database catalog - -Visit: https://dbdocs.io/lelouvincx/WideWorldImporters - -### Schema - -WideWorldImporters uses schemas for different purposes, such as storing data, defining how users can access the data, and providing objects for data warehouse development and integration. - -#### Data schemas - -These schemas contain the data. Many tables are needed by all other schemas and are located in the Application schema. - -| Schema | Description | -| ----------- | --------------------------------------------------------------------------------------------------------------------------------------- | -| Application | Application-wide users, contacts, and parameters. This schema also contains reference tables with data that is used by multiple schemas | -| Purchasing | Stock item purchases from suppliers and details about suppliers. | -| Sales | Stock item sales to retail customers, and details about customers and sales people. | -| Warehouse | Stock item inventory and transactions. | - -#### Secure-access schemas - -These schemas are used for external applications that are not allowed to access the data tables directly. They contain views and stored procedures used by external applications. - -| Schema | Description | -| ------- | ---------------------------------------------------------------------------------------------------------- | -| Website | All access to the database from the company website is through this schema. | -| Reports | All access to the database from Reporting Services reports is through this schema. | -| PowerBI | All access to the database from the Power BI dashboards via the Enterprise Gateway is through this schema. | - -The Reports and PowerBI schemas are not used in the initial release of the sample database. However, all Reporting Services and Power BI samples built on top of this database are encouraged to use these schemas. - -#### Development schemas - -Special-purpose schemas - -| Schema | Description | -| ----------- | ---------------------------------------------------------------------------------------------------------------------------------- | -| Integration | Objects and procedures required for data warehouse integration (that is, migrating the data to the WideWorldImportersDW database). | -| Sequences | Holds sequences used by all tables in the application. | - -### Tables - -All tables in the database are in the data schemas. - -#### Application schema - -Details of parameters and people (users and contacts), along with common reference tables (common to multiple other schemas). - -| Table | Description | -| ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| SystemParameters | Contains system-wide configurable parameters. | -| People | Contains user names, contact information, for all who use the application, and for the people that the Wide World Importers deals with at customer organizations. This table includes staff, customers, suppliers, and any other contacts. For people who have been granted permission to use the system or website, the information includes login details. | -| Application.cities | There are many addresses stored in the system, for people, customer organization delivery addresses, pickup addresses at suppliers, etc. Whenever an address is stored, there is a reference to a city in this table. There is also a spatial location for each city. | -| StateProvinces | Application.cities are part of states or provinces. This table has details of those, including spatial data describing the boundaries each state or province. | -| countries | States or Provinces are part of countries/regions. This table has details of those, including spatial data describing the boundaries of each country/region. | -| DeliveryMethods | Choices for delivering stock items (for example, truck/van, post, pickup, courier, etc.) | -| PaymentMethods | Choices for making payments (for example, cash, check, EFT, etc.) | -| TransactionTypes | Types of customer, supplier, or stock transactions (for example, invoice, credit note, etc.) | - -#### Purchasing schema - -Details of suppliers and of stock item purchases. - -| Table | Description | -| -------------------- | ---------------------------------------------------------------------------------- | -| Suppliers | Main entity table for suppliers (organizations) | -| SupplierCategories | Categories for suppliers (for example, novelties, toys, clothing, packaging, etc.) | -| SupplierTransactions | All financial transactions that are supplier-related (invoices, payments) | -| PurchaseOrders | Details of supplier purchase orders | -| PurchaseOrderLines | Detail lines from supplier purchase orders | - -#### Sales schema - -Details of customers, salespeople, and of stock item sales. - -| Table | Description | -| -------------------- | ---------------------------------------------------------------------------------------- | -| Customers | Main entity tables for customers (organizations or individuals) | -| CustomerCategories | Categories for customers (for example, novelty stores, supermarkets, etc.) | -| BuyingGroups | Customer organizations can be part of groups that exert greater buying power | -| CustomerTransactions | All financial transactions that are customer-related (invoices, payments) | -| SpecialDeals | Special pricing. This can include fixed prices, discount in dollars or discount percent. | -| Orders | Detail of customer orders | -| OrderLines | Detail lines from customer orders | -| Invoices | Details of customer invoices | -| InvoiceLines | Detail lines from customer invoices | - -#### Warehouse schema - -Details of stock items, their holdings and transactions. - -| Table | Description | -| --------------------- | ------------------------------------------------------------------------------------------ | -| StockItems | Main entity table for stock items | -| StockItemHoldings | Non-temporal columns for stock items. These are frequently updated columns. | -| StockGroups | Groups for categorizing stock items (for example, novelties, toys, edible novelties, etc.) | -| StockItemStockGroups | Which stock items are in which stock groups (many to many) | -| Colors | Stock items can (optionally) have colors | -| PackageTypes | Ways that stock items can be packaged (for example, box, carton, pallet, kg, etc. | -| StockItemTransactions | Transactions covering all movements of all stock items (receipt, sale, write-off) | -| VehicleTemperatures | Regularly recorded temperatures of vehicle chillers | -| ColdRoomTemperatures | Regularly recorded temperatures of cold room chillers | - -## Troubleshoot - -- Cannot create directory '/bitnami/...'. You should chown to your docker user's id. [Refer here](https://github.com/bitnami/containers/issues/41422#issuecomment-1674497129) -```bash -sudo chown -R 1001:1001 .docker/data/kafka -sudo chown -R 1001:1001 .docker/data/zookeeper -``` diff --git a/database-replication/app/.streamlit/config.toml b/database-replication/app/.streamlit/config.toml deleted file mode 100644 index 374d7a7..0000000 --- a/database-replication/app/.streamlit/config.toml +++ /dev/null @@ -1,2 +0,0 @@ -[server] -runOnSave = true \ No newline at end of file diff --git a/database-replication/app/experiment.ipynb b/database-replication/app/experiment.ipynb deleted file mode 100644 index b7ec575..0000000 --- a/database-replication/app/experiment.ipynb +++ /dev/null @@ -1,167 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from sqlalchemy import text\n", - "from psql_connector import PsqlConnector\n", - "\n", - "from os import environ as env\n", - "from dotenv import load_dotenv\n", - "load_dotenv(dotenv_path='../.env')" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "# Init psql connector\n", - "psql_params = {\n", - " \"host\": \"localhost\",\n", - " \"port\": env[\"POSTGRES_PORT\"],\n", - " \"user\": env[\"POSTGRES_USER\"],\n", - " \"password\": env[\"POSTGRES_PASSWORD\"],\n", - " \"database\": env[\"POSTGRES_DB\"],\n", - "}\n", - "\n", - "# Setup psql connector\n", - "psql_connector = PsqlConnector(psql_params)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['application', 'purchasing', 'sales', 'warehouse', 'public']\n" - ] - } - ], - "source": [ - "# Fetch schemas\n", - "schemas = []\n", - "with psql_connector.connect() as engine:\n", - " with engine.connect() as cursor:\n", - " sql_script = \"\"\"\n", - " SELECT schema_name\n", - " FROM information_schema.schemata;\n", - " \"\"\"\n", - " schemas = cursor.execute(text(sql_script)).fetchall()\n", - "\n", - " # Remove system schemas\n", - " schemas = [\n", - " schema[0]\n", - " for schema in schemas\n", - " if schema[0]\n", - " not in [ \"pg_toast\", \"pg_temp_1\", \"pg_toast_temp_1\", \"pg_catalog\", \"information_schema\", ]\n", - " ]\n", - "\n", - " # Remove schemas: data_load_simulation, integration, power_bi, reports, sequences\n", - " schemas = [\n", - " schema for schema in schemas if schema not in [ \"data_load_simulation\", \"integration\", \"power_bi\", \"reports\", \"sequences\", \"website\" ]\n", - " ]\n", - "\n", - "print(schemas)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'int8', 'numeric', 'bool', 'varchar', 'date', 'int4', 'text', 'timestamp', 'bytea', 'bpchar'}\n" - ] - } - ], - "source": [ - "# Fetch attributes from schemas\n", - "attributes = set()\n", - "\n", - "with psql_connector.connect() as engine:\n", - " with engine.connect() as cursor:\n", - " for schema in schemas:\n", - " sql_script = f\"\"\"\n", - " WITH rows AS (\n", - "\t SELECT c.relname AS table_name,\n", - "\t\t\t a.attname AS attribute_name,\n", - "\t\t\t a.attnotnull AS is_attribute_null,\n", - "\t\t\t a.attnum AS attribute_num,\n", - "\t\t\t t.typname AS type_name\n", - "\t FROM pg_catalog.pg_class c\n", - "\t\t JOIN pg_catalog.pg_attribute a\n", - "\t\t ON c.\"oid\" = a.attrelid AND a.attnum >= 0\n", - "\t\t JOIN pg_catalog.pg_type t\n", - "\t\t ON t.\"oid\" = a.atttypid\n", - "\t\t JOIN pg_catalog.pg_namespace n\n", - "\t\t ON c.relnamespace = n.\"oid\"\n", - "\t WHERE n.nspname = '{schema}'\n", - "\t\t AND c.relkind = 'r'\n", - " ),\n", - " agg AS (\n", - "\t SELECT rows.table_name, json_agg(rows ORDER BY attribute_num) AS attrs\n", - "\t FROM rows\n", - "\t GROUP BY rows.table_name\n", - " )\n", - " SELECT json_object_agg(agg.table_name, agg.attrs)\n", - " FROM agg;\n", - " \"\"\"\n", - " fetch_result = cursor.execute(text(sql_script)).fetchone()[0]\n", - " # Loop through all keys in fetch_result\n", - " for key in fetch_result.keys():\n", - " table = fetch_result.get(key)\n", - " for attrs in table:\n", - " # Add attrs.get(\"type_name\") into attributes\n", - " if attrs.get(\"type_name\") not in attributes:\n", - " attributes.add(attrs.get(\"type_name\"))\n", - "\n", - "print(attributes)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.4" - }, - "orig_nbformat": 4 - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/database-replication/docker-compose.yml b/database-replication/docker-compose.yml deleted file mode 100644 index 2f77f5b..0000000 --- a/database-replication/docker-compose.yml +++ /dev/null @@ -1,99 +0,0 @@ -version: "3.4" -name: "db-replication" - -services: - source_db: - image: postgres:14-alpine - container_name: "source_db" - ports: - - "5432:5432" - volumes: - - .docker/data/postgres:/var/lib/postgresql/pgdata - - .docker/backups/postgres:/backups - - .docker/postgres-add-de-user.sh:/docker-entrypoint-initdb.d/postgres-add-de-user.sh - environment: - - POSTGRES_USER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_DB=${POSTGRES_DB} - - PGDATA=/var/lib/postgresql/pgdata - restart: unless-stopped - healthcheck: - test: pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB} - interval: 1m30s - timeout: 30s - retries: 5 - start_period: 30s - - adminer: - image: adminer:standalone - container_name: "adminer" - ports: - - "8080:8080" - depends_on: - - source_db - restart: unless-stopped - - data_gen: - image: data-generator:localdev - container_name: "data_gen" - ports: - - "8501:8501" - volumes: - - ./app:/app - env_file: .env - restart: on-failure - depends_on: - - source_db - healthcheck: - test: curl --fail http://localhost:8501/healthz || exit 1 - interval: 1m30s - timeout: 30s - retries: 5 - start_period: 30s - - zookeeper: - image: docker.io/bitnami/zookeeper:3.8 - container_name: zookeeper - ports: - - "2181:2181" - volumes: - - .docker/data/zookeeper:/bitnami/zookeeper - environment: - - ALLOW_ANONYMOUS_LOGIN=yes - restart: unless-stopped - healthcheck: - test: nc -z localhost 2181 | exit 1 - interval: 1m30s - timeout: 30s - retries: 5 - start_period: 30s - - kafka-0: - image: docker.io/bitnami/kafka:3.4 - container_name: kafka-broker-0 - ports: - - "9092:9092" - volumes: - - .docker/data/kafka:/bitnami/kafka - environment: - - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 - depends_on: - - zookeeper - restart: unless-stopped - healthcheck: - test: nc -z localhost 9092 | exit 1 - interval: 1m30s - timeout: 30s - retries: 5 - start_period: 30s - - kafka-manager: - image: docker.io/sheepkiller/kafka-manager - container_name: kafka-manager - ports: - - "9000:9000" - environment: - - ZK_HOSTS=zookeeper:2181 - - APPLICATION_SECRET=letmein - depends_on: - - zookeeper diff --git a/database-replication/env.example b/database-replication/env.example deleted file mode 100644 index dff34ff..0000000 --- a/database-replication/env.example +++ /dev/null @@ -1,5 +0,0 @@ -POSTGRES_USER=admin -POSTGRES_PASSWORD=admin123 -POSTGRES_HOST=127.0.0.1 -POSTGRES_PORT=5432 -POSTGRES_DB=wideworldimporters diff --git a/database-replication/restore.sql b/database-replication/restore.sql deleted file mode 100644 index e223bef..0000000 --- a/database-replication/restore.sql +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e6abe7af16b22dfbc126ca416f1d0270818e40a7e58563c7645901667b37534a -size 128733190 diff --git a/database-replication/test-table.sql b/database-replication/test-table.sql deleted file mode 100644 index 5d885bc..0000000 --- a/database-replication/test-table.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE TABLE public.test ( - id SERIAL PRIMARY KEY, - name VARCHAR(20) NOT NULL, - address VARCHAR(100) NOT NULL, - zipcode CHAR(5), - introduction TEXT -); diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..e9691a8 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,252 @@ +version: "3.4" +name: "database-replication" + +services: + source_db: + image: postgres:14-alpine + container_name: "source_db" + ports: + - "5432:5432" + volumes: + - .docker/data/postgres:/var/lib/postgresql/pgdata + - .docker/backups/postgres:/backups + - .docker/postgres-add-de-user.sh:/docker-entrypoint-initdb.d/postgres-add-de-user.sh + environment: + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_DB=${POSTGRES_DB} + - PGDATA=/var/lib/postgresql/pgdata + restart: unless-stopped + networks: + - upstream_networks + - kafka_networks + healthcheck: + test: pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB} + interval: 30s + timeout: 10s + retries: 5 + start_period: 10s + + adminer: + image: adminer:standalone + container_name: "adminer" + ports: + - "8080:8080" + depends_on: + - source_db + networks: + - upstream_networks + restart: unless-stopped + + upstream-app: + build: + context: . + dockerfile: .docker/images/app/Dockerfile + container_name: "upstream-app" + ports: + - "8501:8501" + volumes: + - ./app:/app + environment: + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_HOST=source_db + - POSTGRES_PORT=${POSTGRES_PORT} + - POSTGRES_DB=${POSTGRES_DB} + restart: on-failure + depends_on: + - source_db + networks: + - upstream_networks + healthcheck: + test: curl --fail http://localhost:8501/healthz || exit 1 + interval: 30s + timeout: 30s + retries: 5 + start_period: 10s + + zookeeper: + image: bitnami/zookeeper:3.8 + container_name: zookeeper + volumes: + - .docker/data/zookeeper:/bitnami/zookeeper + environment: + - ZOO_SERVER_ID=0 + - ALLOW_ANONYMOUS_LOGIN=yes + # How far out of date a server can be from a leader + - ZOO_SYNC_LIMIT=7 + - ZOO_LOG_LEVEL=INFO + restart: unless-stopped + networks: + - kafka_networks + + kafka-0: + image: bitnami/kafka:3.5 + container_name: kafka-server + ports: + - "9092:9092" + volumes: + - .docker/data/kafka-0:/bitnami/kafka + - .docker/log/kafka-0:/tmp/kafka_mounts/logs + environment: + - KAFKA_BROKER_ID=0 + - ALLOW_PLAINTEXT_LISTENER=true + - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092 + - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka-0:9092 + - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 + - KAFKA_CFG_LOG_DIRS=/tmp/kafka_mounts/logs + restart: unless-stopped + depends_on: + - zookeeper + networks: + - kafka_networks + healthcheck: + test: kafka-topics.sh --list --bootstrap-server kafka-server:9092 || exit -1 + interval: 5s + timeout: 5s + retries: 5 + start_period: 30s + + kafka-1: + image: bitnami/kafka:3.5 + container_name: kafka-1 + ports: + - ":9092" + volumes: + - .docker/data/kafka-1:/bitnami/kafka + - .docker/log/kafka-1:/tmp/kafka_mounts/logs + environment: + - KAFKA_BROKER_ID=1 + - ALLOW_PLAINTEXT_LISTENER=true + - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092 + - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka-1:9092 + - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 + - KAFKA_CFG_LOG_DIRS=/tmp/kafka_mounts/logs + restart: unless-stopped + depends_on: + - zookeeper + networks: + - kafka_networks + healthcheck: + test: kafka-topics.sh --list --bootstrap-server kafka-1:9092 || exit -1 + interval: 5s + timeout: 5s + retries: 5 + start_period: 30s + + kafka-2: + image: bitnami/kafka:3.5 + container_name: kafka-2 + ports: + - ":9092" + volumes: + - .docker/data/kafka-2:/bitnami/kafka + - .docker/log/kafka-2:/tmp/kafka_mounts/logs + environment: + - KAFKA_BROKER_ID=2 + - ALLOW_PLAINTEXT_LISTENER=true + - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092 + - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka-2:9092 + - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 + - KAFKA_CFG_LOG_DIRS=/tmp/kafka_mounts/logs + restart: unless-stopped + depends_on: + - zookeeper + networks: + - kafka_networks + healthcheck: + test: kafka-topics.sh --list --bootstrap-server kafka-2:9092 || exit -1 + interval: 5s + timeout: 5s + retries: 5 + start_period: 30s + + kafka-connect: + build: + context: . + dockerfile: .docker/images/kafka-connect/Dockerfile + container_name: kafka-connect + ports: + - "8083:8083" + volumes: + - .docker/data/kafka-connect:/data + environment: + - CONNECT_BOOTSTRAP_SERVERS=${CONNECT_BOOTSTRAP_SERVERS} + - CONNECT_GROUP_ID=kafka-connectx + - CONNECT_REPLICATION_FACTOR=3 + - CONNECT_CONFIG_STORAGE_TOPIC=_kafka-connectx-config + - CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR=3 + - CONNECT_OFFSET_STORAGE_TOPIC=_kafka-connectx-offsets + - CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR=3 + - CONNECT_STATUS_STORAGE_TOPIC=_kafka-connectx-status + - CONNECT_STATUS_STORAGE_REPLICATION_FACTOR=3 + # Converters + - CONNECT_KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter + - CONNECT_VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter + - CONNECT_INTERNAL_KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter + - CONNECT_INTERNAL_VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter + # Misc + - CONNECT_REST_ADVERTISED_HOST_NAME=${CONNECT_REST_ADVERTISED_HOST_NAME} + - CONNECT_REST_PORT=8083 + - CONNECT_LISTENERS=http://kafka-connect:8083 + - CONNECT_PLUGIN_PATH=/usr/share/java,/usr/share/confluent-hub-components + restart: unless-stopped + depends_on: + - zookeeper + - kafka-0 + networks: + - kafka_networks + + kafka-ui: + image: provectuslabs/kafka-ui:latest + container_name: kafka-manager-ui + ports: + - "8000:8080" + environment: + - KAFKA_CLUSTERS_0_NAME=${KAFKA_CLUSTERS_0_NAME} + - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=${KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS} + - KAFKA_CLUSTERS_0_ZOOKEEPER=${KAFKA_CLUSTERS_0_ZOOKEEPER} + - KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=${KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL} + - KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=${KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM} + - KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=${KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG} + - KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME=kafka-connect + - KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS=http://kafka-connect:8083 + depends_on: + - zookeeper + - kafka-0 + networks: + - kafka_networks + + sink_db: + image: mcr.microsoft.com/mssql/server:2022-CU8-ubuntu-20.04 + container_name: sink_db + user: root + ports: + - "1433:1433" + environment: + - ACCEPT_EULA=Y + - MSSQL_PID=Developer + - MSSQL_USER=SA + - MSSQL_SA_PASSWORD=${MSSQL_SA_PASSWORD} + volumes: + - .docker/data/mssql:/var/opt/mssql/data + - .docker/log/mssql:/var/opt/mssql/log + - .docker/backups/mssql:/var/opt/mssql/backup + restart: on-failure + networks: + - downstream_networks + - kafka_networks + healthcheck: + test: /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P "$$MSSQL_SA_PASSWORD" -Q "SELECT 1;" || exit + interval: 30s + timeout: 10s + retries: 5 + start_period: 10s + +networks: + upstream_networks: + driver: bridge + kafka_networks: + driver: bridge + downstream_networks: + driver: bridge diff --git a/docs/diagram.py b/docs/diagram.py new file mode 100644 index 0000000..a95943e --- /dev/null +++ b/docs/diagram.py @@ -0,0 +1,64 @@ +from diagrams import Diagram, Cluster, Edge +from diagrams.programming.language import Python +from diagrams.generic.compute import Rack +from diagrams.generic.database import SQL +from diagrams.onprem.database import PostgreSQL, Mssql +from diagrams.onprem.queue import Kafka +from diagrams.onprem.network import Zookeeper + + +ATTRS = { + "fontname": "JetBrainsMono Nerd Font Mono", + "nodesep": "0.90", + "ranksep": "1.0", +} + + +with Diagram( + "Design Architecture", + show=False, + direction="LR", + graph_attr=ATTRS, + node_attr=ATTRS, + edge_attr=ATTRS, +): + upstream_app = Python("Upstream app") + source_db = PostgreSQL("Source database") + sink_db = Mssql("Sink database") + + with Cluster("Kafka Connect", graph_attr=ATTRS): + with Cluster("Debezium CDC Source Connector", graph_attr=ATTRS): + source_workers = Rack("Producer") + + with Cluster("JDBC Sink Connector", graph_attr=ATTRS): + consumer_1 = SQL("Consumer Topic 1") + consumer_2 = SQL("Consumer Topic 2") + + with Cluster("Kafka Ecosystem", graph_attr=ATTRS): + with Cluster("Kafka Cluster", graph_attr=ATTRS): + kafka_0 = Kafka("Broker 0") + kafka_1 = Kafka("Broker 1") + kafka_2 = Kafka("Broker 2") + kafka_0 >> kafka_1 >> kafka_0 + kafka_2 >> kafka_1 >> kafka_2 + brokers = [kafka_0, kafka_1, kafka_2] + zookeeper = Zookeeper("Zookeeper") + + # Upstream app generate data to PostgreSQL + upstream_app >> Edge(label="generate data") >> source_db + + # PostgreSQL publish to Debezium + source_db >> Edge(label="produce") >> source_workers + + # DBZ workers produce msg to kafka + source_workers >> kafka_1 + zookeeper - Edge(label="get broker id", style="dashed") - source_workers + + # JDBC workers consume msg from kafka + kafka_1 >> consumer_1 + kafka_1 >> consumer_2 + zookeeper - Edge(label="udpate offset", style="dashed") - consumer_1 + + # Pull to MSSQL + consumer_1 >> sink_db + consumer_2 >> sink_db diff --git a/database-replication/docs/images/database-replication.drawio.png b/docs/images/database-replication.drawio.png similarity index 100% rename from database-replication/docs/images/database-replication.drawio.png rename to docs/images/database-replication.drawio.png diff --git a/docs/images/design_architecture.png b/docs/images/design_architecture.png new file mode 100644 index 0000000..5191881 Binary files /dev/null and b/docs/images/design_architecture.png differ diff --git a/database-replication/docs/wideworldimporters.dbml b/docs/wideworldimporters.dbml similarity index 97% rename from database-replication/docs/wideworldimporters.dbml rename to docs/wideworldimporters.dbml index 26ef47c..d888e65 100644 --- a/database-replication/docs/wideworldimporters.dbml +++ b/docs/wideworldimporters.dbml @@ -11,17 +11,18 @@ Project WideWorldImporters { These schemas contain the data. Many tables are needed by all other schemas and are located in the Application schema. -| Schema | Description | -| --- | ----------- | -| Application | Application-wide users, contacts, and parameters. This schema also contains reference tables with data that is used by multiple schemas | -| Purchasing | Stock item purchases from suppliers and details about suppliers. | -| Sales | Stock item sales to retail customers, and details about customers and sales people. | -| Warehouse | Stock item inventory and transactions. | -| Website | All access to the database from the company website is through this schema. | -| Reports | All access to the database from Reporting Services reports is through this schema. | -| PowerBI | All access to the database from the Power BI dashboards via the Enterprise Gateway is through this schema. | -| Integration | Objects and procedures required for data warehouse integration (that is, migrating the data to the WideWorldImportersDW database). | -| Sequences | Holds sequences used by all tables in the application. | +| Schema | Description | Status +| --- | ----------- | --- +| Application | Application-wide users, contacts, and parameters. This schema also contains reference tables with data that is used by multiple schemas | In use +| Purchasing | Stock item purchases from suppliers and details about suppliers. | In use +| Sales | Stock item sales to retail customers, and details about customers and sales people. | In use +| Warehouse | Stock item inventory and transactions. | In use +| Public | Public schema to hold dump tables | In use +| Website | All access to the database from the company website is through this schema. | Idle +| Reports | All access to the database from Reporting Services reports is through this schema. | Idle +| PowerBI | All access to the database from the Power BI dashboards via the Enterprise Gateway is through this schema. | Idle +| Integration | Objects and procedures required for data warehouse integration (that is, migrating the data to the WideWorldImportersDW database). | Idle +| Sequences | Holds sequences used by all tables in the application. | Idle ''' } @@ -458,6 +459,15 @@ TABLE Warehouse.stock_items { note: "Main entity table for stock items" } +TABLE Public.test { + id integer [primary key] + name varchar(20) + address varchar(100) + zipcode char(5) + introduction text + note: "Sample table" +} + Ref: Application.cities.last_edited_by > Application.people.person_id Ref: Application.cities.state_province_id > Application.state_provinces.state_province_id Ref: Application.countries.last_edited_by > Application.people.person_id diff --git a/env.example b/env.example new file mode 100644 index 0000000..8499802 --- /dev/null +++ b/env.example @@ -0,0 +1,37 @@ +# Source_db +POSTGRES_USER=admin +POSTGRES_PASSWORD=admin123 +POSTGRES_DE_USER=data_engineer +POSTGRES_DE_PASSWORD=data_engineer_123 +POSTGRES_HOST=127.0.0.1 +POSTGRES_PORT=5432 +POSTGRES_DB=wideworldimporters + +# Kafka manager UI +KAFKA_CLUSTERS_0_NAME=kafka-server +KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka-server:9092 +KAFKA_CLUSTERS_0_ZOOKEEPER=zookeeper-0:2181 +KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=PLAINTEXT +KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=PLAIN +KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin123" + +# Kafka server +BOOTSTRAP_SERVERS=kafka-server:9092 + +# Kafka Connect +CONNECT_BOOTSTRAP_SERVERS=kafka-server:9092 +CONNECT_GROUP_ID=kafka-connectx +CONNECT_CONFIG_STORAGE_TOPIC=_kafka-connectx-config +CONNECT_OFFSET_STORAGE_TOPIC=_kafka-connectx-offsets +CONNECT_STATUS_STORAGE_TOPIC=_kafka-connectx-status +CONNECT_KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter +CONNECT_VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter +CONNECT_INTERNAL_KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter +CONNECT_INTERNAL_VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter +CONNECT_REST_ADVERTISED_HOST_NAME=localhost +CONNECT_REST_PORT=8083 +CONNECT_PLUGIN_PATH=/usr/share/java,/usr/share/confluent-hub-components + +# MSSQL Server +MSSQL_DB=wideworldimporters +MSSQL_SA_PASSWORD=Admin123@ diff --git a/learn-kafka/.gitignore b/learn-kafka/.gitignore deleted file mode 100644 index 6a975e0..0000000 --- a/learn-kafka/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -.venv/* -logs -test.sink.txt -.docker/ diff --git a/learn-kafka/LICENSE b/learn-kafka/LICENSE deleted file mode 100644 index 233c99e..0000000 --- a/learn-kafka/LICENSE +++ /dev/null @@ -1,326 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -------------------------------------------------------------------------------- -This project bundles some components that are also licensed under the Apache -License Version 2.0: - -audience-annotations-0.13.0 -commons-cli-1.4 -commons-lang3-3.8.1 -jackson-annotations-2.13.5 -jackson-core-2.13.5 -jackson-databind-2.13.5 -jackson-dataformat-csv-2.13.5 -jackson-datatype-jdk8-2.13.5 -jackson-jaxrs-base-2.13.5 -jackson-jaxrs-json-provider-2.13.5 -jackson-module-jaxb-annotations-2.13.5 -jackson-module-scala_2.13-2.13.5 -jackson-module-scala_2.12-2.13.5 -jakarta.validation-api-2.0.2 -javassist-3.29.2-GA -jetty-client-9.4.51.v20230217 -jetty-continuation-9.4.51.v20230217 -jetty-http-9.4.51.v20230217 -jetty-io-9.4.51.v20230217 -jetty-security-9.4.51.v20230217 -jetty-server-9.4.51.v20230217 -jetty-servlet-9.4.51.v20230217 -jetty-servlets-9.4.51.v20230217 -jetty-util-9.4.51.v20230217 -jetty-util-ajax-9.4.51.v20230217 -jose4j-0.9.3 -lz4-java-1.8.0 -maven-artifact-3.8.8 -metrics-core-4.1.12.1 -metrics-core-2.2.0 -netty-buffer-4.1.94.Final -netty-codec-4.1.94.Final -netty-common-4.1.94.Final -netty-handler-4.1.94.Final -netty-resolver-4.1.94.Final -netty-transport-4.1.94.Final -netty-transport-classes-epoll-4.1.94.Final -netty-transport-native-epoll-4.1.94.Final -netty-transport-native-unix-common-4.1.94.Final -plexus-utils-3.3.1 -reload4j-1.2.25 -rocksdbjni-7.1.2 -scala-collection-compat_2.13-2.10.0 -scala-library-2.13.10 -scala-logging_2.13-3.9.4 -scala-reflect-2.13.10 -scala-java8-compat_2.13-1.0.2 -snappy-java-1.1.10.1 -swagger-annotations-2.2.8 -zookeeper-3.6.4 -zookeeper-jute-3.6.4 - -=============================================================================== -This product bundles various third-party components under other open source -licenses. This section summarizes those components and their licenses. -See licenses/ for text of these licenses. - ---------------------------------------- -Eclipse Distribution License - v 1.0 -see: licenses/eclipse-distribution-license-1.0 - -jakarta.activation-api-1.2.2 -jakarta.xml.bind-api-2.3.3 - ---------------------------------------- -Eclipse Public License - v 2.0 -see: licenses/eclipse-public-license-2.0 - -jakarta.annotation-api-1.3.5 -jakarta.ws.rs-api-2.1.6 -javax.annotation-api-1.3.2 -javax.ws.rs-api-2.1.1 -hk2-api-2.6.1 -hk2-locator-2.6.1 -hk2-utils-2.6.1 -osgi-resource-locator-1.0.3 -aopalliance-repackaged-2.6.1 -jakarta.inject-2.6.1 -jersey-client-2.39.1 -jersey-common-2.39.1 -jersey-container-servlet-2.39.1 -jersey-container-servlet-core-2.39.1 -jersey-hk2-2.39.1 -jersey-server-2.39.1 - ---------------------------------------- -CDDL 1.1 + GPLv2 with classpath exception -see: licenses/CDDL+GPL-1.1 -javax.activation-api-1.2.0 -javax.annotation-api-1.3.2 -javax.servlet-api-3.1.0 -javax.ws.rs-api-2.1.1 -jaxb-api-2.3.1 -activation-1.1.1 - ---------------------------------------- -MIT License - -argparse4j-0.7.0, see: licenses/argparse-MIT -jopt-simple-5.0.4, see: licenses/jopt-simple-MIT -slf4j-api-1.7.36, see: licenses/slf4j-MIT -slf4j-reload4j-1.7.36, see: licenses/slf4j-MIT - ---------------------------------------- -BSD 2-Clause - -zstd-jni-1.5.5-1 see: licenses/zstd-jni-BSD-2-clause - ---------------------------------------- -BSD 3-Clause - -jline-3.22.0, see: licenses/jline-BSD-3-clause -paranamer-2.8, see: licenses/paranamer-BSD-3-clause - ---------------------------------------- -Do What The F*ck You Want To Public License -see: licenses/DWTFYWTPL - -reflections-0.9.12 diff --git a/learn-kafka/NOTICE b/learn-kafka/NOTICE deleted file mode 100644 index a50c86d..0000000 --- a/learn-kafka/NOTICE +++ /dev/null @@ -1,856 +0,0 @@ -Apache Kafka -Copyright 2021 The Apache Software Foundation. - -This product includes software developed at -The Apache Software Foundation (https://www.apache.org/). - -This distribution has a binary dependency on jersey, which is available under the CDDL -License. The source code of jersey can be found at https://github.com/jersey/jersey/. - -This distribution has a binary test dependency on jqwik, which is available under -the Eclipse Public License 2.0. The source code can be found at -https://github.com/jlink/jqwik. - -The streams-scala (streams/streams-scala) module was donated by Lightbend and the original code was copyrighted by them: -Copyright (C) 2018 Lightbend Inc. -Copyright (C) 2017-2018 Alexis Seigneurin. - -This project contains the following code copied from Apache Hadoop: -clients/src/main/java/org/apache/kafka/common/utils/PureJavaCrc32C.java -Some portions of this file Copyright (c) 2004-2006 Intel Corporation and licensed under the BSD license. - -This project contains the following code copied from Apache Hive: -streams/src/main/java/org/apache/kafka/streams/state/internals/Murmur3.java - -// ------------------------------------------------------------------ -// NOTICE file corresponding to the section 4d of The Apache License, -// Version 2.0, in this case for -// ------------------------------------------------------------------ - -# Notices for Eclipse GlassFish - -This content is produced and maintained by the Eclipse GlassFish project. - -* Project home: https://projects.eclipse.org/projects/ee4j.glassfish - -## Trademarks - -Eclipse GlassFish, and GlassFish are trademarks of the Eclipse Foundation. - -## Copyright - -All content is the property of the respective authors or their employers. For -more information regarding authorship of content, please consult the listed -source code repository logs. - -## Declared Project Licenses - -This program and the accompanying materials are made available under the terms -of the Eclipse Public License v. 2.0 which is available at -http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made -available under the following Secondary Licenses when the conditions for such -availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU -General Public License, version 2 with the GNU Classpath Exception which is -available at https://www.gnu.org/software/classpath/license.html. - -SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 - -## Source Code - -The project maintains the following source code repositories: - -* https://github.com/eclipse-ee4j/glassfish-ha-api -* https://github.com/eclipse-ee4j/glassfish-logging-annotation-processor -* https://github.com/eclipse-ee4j/glassfish-shoal -* https://github.com/eclipse-ee4j/glassfish-cdi-porting-tck -* https://github.com/eclipse-ee4j/glassfish-jsftemplating -* https://github.com/eclipse-ee4j/glassfish-hk2-extra -* https://github.com/eclipse-ee4j/glassfish-hk2 -* https://github.com/eclipse-ee4j/glassfish-fighterfish - -## Third-party Content - -This project leverages the following third party content. - -None - -## Cryptography - -Content may contain encryption software. The country in which you are currently -may have restrictions on the import, possession, and use, and/or re-export to -another country, of encryption software. BEFORE using any encryption software, -please check the country's laws, regulations and policies concerning the import, -possession, or use, and re-export of encryption software, to see if this is -permitted. - - -Apache Yetus - Audience Annotations -Copyright 2015-2017 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - - -Apache Commons CLI -Copyright 2001-2017 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - - -Apache Commons Lang -Copyright 2001-2018 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - - -# Jackson JSON processor - -Jackson is a high-performance, Free/Open Source JSON processing library. -It was originally written by Tatu Saloranta (tatu.saloranta@iki.fi), and has -been in development since 2007. -It is currently developed by a community of developers, as well as supported -commercially by FasterXML.com. - -## Licensing - -Jackson core and extension components may licensed under different licenses. -To find the details that apply to this artifact see the accompanying LICENSE file. -For more information, including possible other licensing options, contact -FasterXML.com (http://fasterxml.com). - -## Credits - -A list of contributors may be found from CREDITS file, which is included -in some artifacts (usually source distributions); but is always available -from the source code management (SCM) system project uses. - - -# Notices for Eclipse Project for JAF - -This content is produced and maintained by the Eclipse Project for JAF project. - -* Project home: https://projects.eclipse.org/projects/ee4j.jaf - -## Copyright - -All content is the property of the respective authors or their employers. For -more information regarding authorship of content, please consult the listed -source code repository logs. - -## Declared Project Licenses - -This program and the accompanying materials are made available under the terms -of the Eclipse Distribution License v. 1.0, -which is available at http://www.eclipse.org/org/documents/edl-v10.php. - -SPDX-License-Identifier: BSD-3-Clause - -## Source Code - -The project maintains the following source code repositories: - -* https://github.com/eclipse-ee4j/jaf - -## Third-party Content - -This project leverages the following third party content. - -JUnit (4.12) - -* License: Eclipse Public License - - -# Notices for Jakarta Annotations - -This content is produced and maintained by the Jakarta Annotations project. - - * Project home: https://projects.eclipse.org/projects/ee4j.ca - -## Trademarks - -Jakarta Annotations is a trademark of the Eclipse Foundation. - -## Declared Project Licenses - -This program and the accompanying materials are made available under the terms -of the Eclipse Public License v. 2.0 which is available at -http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made -available under the following Secondary Licenses when the conditions for such -availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU -General Public License, version 2 with the GNU Classpath Exception which is -available at https://www.gnu.org/software/classpath/license.html. - -SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 - -## Source Code - -The project maintains the following source code repositories: - - * https://github.com/eclipse-ee4j/common-annotations-api - -## Third-party Content - -## Cryptography - -Content may contain encryption software. The country in which you are currently -may have restrictions on the import, possession, and use, and/or re-export to -another country, of encryption software. BEFORE using any encryption software, -please check the country's laws, regulations and policies concerning the import, -possession, or use, and re-export of encryption software, to see if this is -permitted. - - -# Notices for the Jakarta RESTful Web Services Project - -This content is produced and maintained by the **Jakarta RESTful Web Services** -project. - -* Project home: https://projects.eclipse.org/projects/ee4j.jaxrs - -## Trademarks - -**Jakarta RESTful Web Services** is a trademark of the Eclipse Foundation. - -## Copyright - -All content is the property of the respective authors or their employers. For -more information regarding authorship of content, please consult the listed -source code repository logs. - -## Declared Project Licenses - -This program and the accompanying materials are made available under the terms -of the Eclipse Public License v. 2.0 which is available at -http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made -available under the following Secondary Licenses when the conditions for such -availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU -General Public License, version 2 with the GNU Classpath Exception which is -available at https://www.gnu.org/software/classpath/license.html. - -SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 - -## Source Code - -The project maintains the following source code repositories: - -* https://github.com/eclipse-ee4j/jaxrs-api - -## Third-party Content - -This project leverages the following third party content. - -javaee-api (7.0) - -* License: Apache-2.0 AND W3C - -JUnit (4.11) - -* License: Common Public License 1.0 - -Mockito (2.16.0) - -* Project: http://site.mockito.org -* Source: https://github.com/mockito/mockito/releases/tag/v2.16.0 - -## Cryptography - -Content may contain encryption software. The country in which you are currently -may have restrictions on the import, possession, and use, and/or re-export to -another country, of encryption software. BEFORE using any encryption software, -please check the country's laws, regulations and policies concerning the import, -possession, or use, and re-export of encryption software, to see if this is -permitted. - - -# Notices for Eclipse Project for JAXB - -This content is produced and maintained by the Eclipse Project for JAXB project. - -* Project home: https://projects.eclipse.org/projects/ee4j.jaxb - -## Trademarks - -Eclipse Project for JAXB is a trademark of the Eclipse Foundation. - -## Copyright - -All content is the property of the respective authors or their employers. For -more information regarding authorship of content, please consult the listed -source code repository logs. - -## Declared Project Licenses - -This program and the accompanying materials are made available under the terms -of the Eclipse Distribution License v. 1.0 which is available -at http://www.eclipse.org/org/documents/edl-v10.php. - -SPDX-License-Identifier: BSD-3-Clause - -## Source Code - -The project maintains the following source code repositories: - -* https://github.com/eclipse-ee4j/jaxb-api - -## Third-party Content - -This project leverages the following third party content. - -None - -## Cryptography - -Content may contain encryption software. The country in which you are currently -may have restrictions on the import, possession, and use, and/or re-export to -another country, of encryption software. BEFORE using any encryption software, -please check the country's laws, regulations and policies concerning the import, -possession, or use, and re-export of encryption software, to see if this is -permitted. - - -# Notice for Jersey -This content is produced and maintained by the Eclipse Jersey project. - -* Project home: https://projects.eclipse.org/projects/ee4j.jersey - -## Trademarks -Eclipse Jersey is a trademark of the Eclipse Foundation. - -## Copyright - -All content is the property of the respective authors or their employers. For -more information regarding authorship of content, please consult the listed -source code repository logs. - -## Declared Project Licenses - -This program and the accompanying materials are made available under the terms -of the Eclipse Public License v. 2.0 which is available at -http://www.eclipse.org/legal/epl-2.0. This Source Code may also be made -available under the following Secondary Licenses when the conditions for such -availability set forth in the Eclipse Public License v. 2.0 are satisfied: GNU -General Public License, version 2 with the GNU Classpath Exception which is -available at https://www.gnu.org/software/classpath/license.html. - -SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 - -## Source Code -The project maintains the following source code repositories: - -* https://github.com/eclipse-ee4j/jersey - -## Third-party Content - -Angular JS, v1.6.6 -* License MIT (http://www.opensource.org/licenses/mit-license.php) -* Project: http://angularjs.org -* Coyright: (c) 2010-2017 Google, Inc. - -aopalliance Version 1 -* License: all the source code provided by AOP Alliance is Public Domain. -* Project: http://aopalliance.sourceforge.net -* Copyright: Material in the public domain is not protected by copyright - -Bean Validation API 2.0.2 -* License: Apache License, 2.0 -* Project: http://beanvalidation.org/1.1/ -* Copyright: 2009, Red Hat, Inc. and/or its affiliates, and individual contributors -* by the @authors tag. - -Hibernate Validator CDI, 6.1.2.Final -* License: Apache License, 2.0 -* Project: https://beanvalidation.org/ -* Repackaged in org.glassfish.jersey.server.validation.internal.hibernate - -Bootstrap v3.3.7 -* License: MIT license (https://github.com/twbs/bootstrap/blob/master/LICENSE) -* Project: http://getbootstrap.com -* Copyright: 2011-2016 Twitter, Inc - -Google Guava Version 18.0 -* License: Apache License, 2.0 -* Copyright (C) 2009 The Guava Authors - -javax.inject Version: 1 -* License: Apache License, 2.0 -* Copyright (C) 2009 The JSR-330 Expert Group - -Javassist Version 3.25.0-GA -* License: Apache License, 2.0 -* Project: http://www.javassist.org/ -* Copyright (C) 1999- Shigeru Chiba. All Rights Reserved. - -Jackson JAX-RS Providers Version 2.10.1 -* License: Apache License, 2.0 -* Project: https://github.com/FasterXML/jackson-jaxrs-providers -* Copyright: (c) 2009-2011 FasterXML, LLC. All rights reserved unless otherwise indicated. - -jQuery v1.12.4 -* License: jquery.org/license -* Project: jquery.org -* Copyright: (c) jQuery Foundation - -jQuery Barcode plugin 0.3 -* License: MIT & GPL (http://www.opensource.org/licenses/mit-license.php & http://www.gnu.org/licenses/gpl.html) -* Project: http://www.pasella.it/projects/jQuery/barcode -* Copyright: (c) 2009 Antonello Pasella antonello.pasella@gmail.com - -JSR-166 Extension - JEP 266 -* License: CC0 -* No copyright -* Written by Doug Lea with assistance from members of JCP JSR-166 Expert Group and released to the public domain, as explained at http://creativecommons.org/publicdomain/zero/1.0/ - -KineticJS, v4.7.1 -* License: MIT license (http://www.opensource.org/licenses/mit-license.php) -* Project: http://www.kineticjs.com, https://github.com/ericdrowell/KineticJS -* Copyright: Eric Rowell - -org.objectweb.asm Version 8.0 -* License: Modified BSD (http://asm.objectweb.org/license.html) -* Copyright (c) 2000-2011 INRIA, France Telecom. All rights reserved. - -org.osgi.core version 6.0.0 -* License: Apache License, 2.0 -* Copyright (c) OSGi Alliance (2005, 2008). All Rights Reserved. - -org.glassfish.jersey.server.internal.monitoring.core -* License: Apache License, 2.0 -* Copyright (c) 2015-2018 Oracle and/or its affiliates. All rights reserved. -* Copyright 2010-2013 Coda Hale and Yammer, Inc. - -W3.org documents -* License: W3C License -* Copyright: Copyright (c) 1994-2001 World Wide Web Consortium, (Massachusetts Institute of Technology, Institut National de Recherche en Informatique et en Automatique, Keio University). All Rights Reserved. http://www.w3.org/Consortium/Legal/ - - -============================================================== - Jetty Web Container - Copyright 1995-2018 Mort Bay Consulting Pty Ltd. -============================================================== - -The Jetty Web Container is Copyright Mort Bay Consulting Pty Ltd -unless otherwise noted. - -Jetty is dual licensed under both - - * The Apache 2.0 License - http://www.apache.org/licenses/LICENSE-2.0.html - - and - - * The Eclipse Public 1.0 License - http://www.eclipse.org/legal/epl-v10.html - -Jetty may be distributed under either license. - ------- -Eclipse - -The following artifacts are EPL. - * org.eclipse.jetty.orbit:org.eclipse.jdt.core - -The following artifacts are EPL and ASL2. - * org.eclipse.jetty.orbit:javax.security.auth.message - - -The following artifacts are EPL and CDDL 1.0. - * org.eclipse.jetty.orbit:javax.mail.glassfish - - ------- -Oracle - -The following artifacts are CDDL + GPLv2 with classpath exception. -https://glassfish.dev.java.net/nonav/public/CDDL+GPL.html - - * javax.servlet:javax.servlet-api - * javax.annotation:javax.annotation-api - * javax.transaction:javax.transaction-api - * javax.websocket:javax.websocket-api - ------- -Oracle OpenJDK - -If ALPN is used to negotiate HTTP/2 connections, then the following -artifacts may be included in the distribution or downloaded when ALPN -module is selected. - - * java.sun.security.ssl - -These artifacts replace/modify OpenJDK classes. The modififications -are hosted at github and both modified and original are under GPL v2 with -classpath exceptions. -http://openjdk.java.net/legal/gplv2+ce.html - - ------- -OW2 - -The following artifacts are licensed by the OW2 Foundation according to the -terms of http://asm.ow2.org/license.html - -org.ow2.asm:asm-commons -org.ow2.asm:asm - - ------- -Apache - -The following artifacts are ASL2 licensed. - -org.apache.taglibs:taglibs-standard-spec -org.apache.taglibs:taglibs-standard-impl - - ------- -MortBay - -The following artifacts are ASL2 licensed. Based on selected classes from -following Apache Tomcat jars, all ASL2 licensed. - -org.mortbay.jasper:apache-jsp - org.apache.tomcat:tomcat-jasper - org.apache.tomcat:tomcat-juli - org.apache.tomcat:tomcat-jsp-api - org.apache.tomcat:tomcat-el-api - org.apache.tomcat:tomcat-jasper-el - org.apache.tomcat:tomcat-api - org.apache.tomcat:tomcat-util-scan - org.apache.tomcat:tomcat-util - -org.mortbay.jasper:apache-el - org.apache.tomcat:tomcat-jasper-el - org.apache.tomcat:tomcat-el-api - - ------- -Mortbay - -The following artifacts are CDDL + GPLv2 with classpath exception. - -https://glassfish.dev.java.net/nonav/public/CDDL+GPL.html - -org.eclipse.jetty.toolchain:jetty-schemas - ------- -Assorted - -The UnixCrypt.java code implements the one way cryptography used by -Unix systems for simple password protection. Copyright 1996 Aki Yoshida, -modified April 2001 by Iris Van den Broeke, Daniel Deville. -Permission to use, copy, modify and distribute UnixCrypt -for non-commercial or commercial purposes and without fee is -granted provided that the copyright notice appears in all copies. - - -Apache log4j -Copyright 2007 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - - -Maven Artifact -Copyright 2001-2019 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - - -This product includes software developed by the Indiana University - Extreme! Lab (http://www.extreme.indiana.edu/). - -This product includes software developed by -The Apache Software Foundation (http://www.apache.org/). - -This product includes software developed by -ThoughtWorks (http://www.thoughtworks.com). - -This product includes software developed by -javolution (http://javolution.org/). - -This product includes software developed by -Rome (https://rome.dev.java.net/). - - -Scala -Copyright (c) 2002-2020 EPFL -Copyright (c) 2011-2020 Lightbend, Inc. - -Scala includes software developed at -LAMP/EPFL (https://lamp.epfl.ch/) and -Lightbend, Inc. (https://www.lightbend.com/). - -Licensed under the Apache License, Version 2.0 (the "License"). -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -This software includes projects with other licenses -- see `doc/LICENSE.md`. - - -Apache ZooKeeper - Server -Copyright 2008-2021 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - - -Apache ZooKeeper - Jute -Copyright 2008-2021 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - - -The Netty Project - ================= - -Please visit the Netty web site for more information: - - * https://netty.io/ - -Copyright 2014 The Netty Project - -The Netty Project licenses this file to you under the Apache License, -version 2.0 (the "License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at: - - https://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -License for the specific language governing permissions and limitations -under the License. - -Also, please refer to each LICENSE..txt file, which is located in -the 'license' directory of the distribution file, for the license terms of the -components that this product depends on. - -------------------------------------------------------------------------------- -This product contains the extensions to Java Collections Framework which has -been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: - - * LICENSE: - * license/LICENSE.jsr166y.txt (Public Domain) - * HOMEPAGE: - * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ - * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ - -This product contains a modified version of Robert Harder's Public Domain -Base64 Encoder and Decoder, which can be obtained at: - - * LICENSE: - * license/LICENSE.base64.txt (Public Domain) - * HOMEPAGE: - * http://iharder.sourceforge.net/current/java/base64/ - -This product contains a modified portion of 'Webbit', an event based -WebSocket and HTTP server, which can be obtained at: - - * LICENSE: - * license/LICENSE.webbit.txt (BSD License) - * HOMEPAGE: - * https://github.com/joewalnes/webbit - -This product contains a modified portion of 'SLF4J', a simple logging -facade for Java, which can be obtained at: - - * LICENSE: - * license/LICENSE.slf4j.txt (MIT License) - * HOMEPAGE: - * https://www.slf4j.org/ - -This product contains a modified portion of 'Apache Harmony', an open source -Java SE, which can be obtained at: - - * NOTICE: - * license/NOTICE.harmony.txt - * LICENSE: - * license/LICENSE.harmony.txt (Apache License 2.0) - * HOMEPAGE: - * https://archive.apache.org/dist/harmony/ - -This product contains a modified portion of 'jbzip2', a Java bzip2 compression -and decompression library written by Matthew J. Francis. It can be obtained at: - - * LICENSE: - * license/LICENSE.jbzip2.txt (MIT License) - * HOMEPAGE: - * https://code.google.com/p/jbzip2/ - -This product contains a modified portion of 'libdivsufsort', a C API library to construct -the suffix array and the Burrows-Wheeler transformed string for any input string of -a constant-size alphabet written by Yuta Mori. It can be obtained at: - - * LICENSE: - * license/LICENSE.libdivsufsort.txt (MIT License) - * HOMEPAGE: - * https://github.com/y-256/libdivsufsort - -This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, - which can be obtained at: - - * LICENSE: - * license/LICENSE.jctools.txt (ASL2 License) - * HOMEPAGE: - * https://github.com/JCTools/JCTools - -This product optionally depends on 'JZlib', a re-implementation of zlib in -pure Java, which can be obtained at: - - * LICENSE: - * license/LICENSE.jzlib.txt (BSD style License) - * HOMEPAGE: - * http://www.jcraft.com/jzlib/ - -This product optionally depends on 'Compress-LZF', a Java library for encoding and -decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: - - * LICENSE: - * license/LICENSE.compress-lzf.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/ning/compress - -This product optionally depends on 'lz4', a LZ4 Java compression -and decompression library written by Adrien Grand. It can be obtained at: - - * LICENSE: - * license/LICENSE.lz4.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/jpountz/lz4-java - -This product optionally depends on 'lzma-java', a LZMA Java compression -and decompression library, which can be obtained at: - - * LICENSE: - * license/LICENSE.lzma-java.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/jponge/lzma-java - -This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression -and decompression library written by William Kinney. It can be obtained at: - - * LICENSE: - * license/LICENSE.jfastlz.txt (MIT License) - * HOMEPAGE: - * https://code.google.com/p/jfastlz/ - -This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data -interchange format, which can be obtained at: - - * LICENSE: - * license/LICENSE.protobuf.txt (New BSD License) - * HOMEPAGE: - * https://github.com/google/protobuf - -This product optionally depends on 'Bouncy Castle Crypto APIs' to generate -a temporary self-signed X.509 certificate when the JVM does not provide the -equivalent functionality. It can be obtained at: - - * LICENSE: - * license/LICENSE.bouncycastle.txt (MIT License) - * HOMEPAGE: - * https://www.bouncycastle.org/ - -This product optionally depends on 'Snappy', a compression library produced -by Google Inc, which can be obtained at: - - * LICENSE: - * license/LICENSE.snappy.txt (New BSD License) - * HOMEPAGE: - * https://github.com/google/snappy - -This product optionally depends on 'JBoss Marshalling', an alternative Java -serialization API, which can be obtained at: - - * LICENSE: - * license/LICENSE.jboss-marshalling.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/jboss-remoting/jboss-marshalling - -This product optionally depends on 'Caliper', Google's micro- -benchmarking framework, which can be obtained at: - - * LICENSE: - * license/LICENSE.caliper.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/google/caliper - -This product optionally depends on 'Apache Commons Logging', a logging -framework, which can be obtained at: - - * LICENSE: - * license/LICENSE.commons-logging.txt (Apache License 2.0) - * HOMEPAGE: - * https://commons.apache.org/logging/ - -This product optionally depends on 'Apache Log4J', a logging framework, which -can be obtained at: - - * LICENSE: - * license/LICENSE.log4j.txt (Apache License 2.0) - * HOMEPAGE: - * https://logging.apache.org/log4j/ - -This product optionally depends on 'Aalto XML', an ultra-high performance -non-blocking XML processor, which can be obtained at: - - * LICENSE: - * license/LICENSE.aalto-xml.txt (Apache License 2.0) - * HOMEPAGE: - * http://wiki.fasterxml.com/AaltoHome - -This product contains a modified version of 'HPACK', a Java implementation of -the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: - - * LICENSE: - * license/LICENSE.hpack.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/twitter/hpack - -This product contains a modified version of 'HPACK', a Java implementation of -the HTTP/2 HPACK algorithm written by Cory Benfield. It can be obtained at: - - * LICENSE: - * license/LICENSE.hyper-hpack.txt (MIT License) - * HOMEPAGE: - * https://github.com/python-hyper/hpack/ - -This product contains a modified version of 'HPACK', a Java implementation of -the HTTP/2 HPACK algorithm written by Tatsuhiro Tsujikawa. It can be obtained at: - - * LICENSE: - * license/LICENSE.nghttp2-hpack.txt (MIT License) - * HOMEPAGE: - * https://github.com/nghttp2/nghttp2/ - -This product contains a modified portion of 'Apache Commons Lang', a Java library -provides utilities for the java.lang API, which can be obtained at: - - * LICENSE: - * license/LICENSE.commons-lang.txt (Apache License 2.0) - * HOMEPAGE: - * https://commons.apache.org/proper/commons-lang/ - - -This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. - - * LICENSE: - * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/takari/maven-wrapper - -This product contains the dnsinfo.h header file, that provides a way to retrieve the system DNS configuration on MacOS. -This private header is also used by Apple's open source - mDNSResponder (https://opensource.apple.com/tarballs/mDNSResponder/). - - * LICENSE: - * license/LICENSE.dnsinfo.txt (Apple Public Source License 2.0) - * HOMEPAGE: - * https://www.opensource.apple.com/source/configd/configd-453.19/dnsinfo/dnsinfo.h \ No newline at end of file diff --git a/learn-kafka/bin/connect-distributed.sh b/learn-kafka/bin/connect-distributed.sh deleted file mode 100644 index b8088ad..0000000 --- a/learn-kafka/bin/connect-distributed.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ $# -lt 1 ]; -then - echo "USAGE: $0 [-daemon] connect-distributed.properties" - exit 1 -fi - -base_dir=$(dirname $0) - -if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -fi - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xms256M -Xmx2G" -fi - -EXTRA_ARGS=${EXTRA_ARGS-'-name connectDistributed'} - -COMMAND=$1 -case $COMMAND in - -daemon) - EXTRA_ARGS="-daemon "$EXTRA_ARGS - shift - ;; - *) - ;; -esac - -exec $(dirname $0)/kafka-run-class.sh $EXTRA_ARGS org.apache.kafka.connect.cli.ConnectDistributed "$@" diff --git a/learn-kafka/bin/connect-mirror-maker.sh b/learn-kafka/bin/connect-mirror-maker.sh deleted file mode 100644 index 8e2b2e1..0000000 --- a/learn-kafka/bin/connect-mirror-maker.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ $# -lt 1 ]; -then - echo "USAGE: $0 [-daemon] mm2.properties" - exit 1 -fi - -base_dir=$(dirname $0) - -if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -fi - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xms256M -Xmx2G" -fi - -EXTRA_ARGS=${EXTRA_ARGS-'-name mirrorMaker'} - -COMMAND=$1 -case $COMMAND in - -daemon) - EXTRA_ARGS="-daemon "$EXTRA_ARGS - shift - ;; - *) - ;; -esac - -exec $(dirname $0)/kafka-run-class.sh $EXTRA_ARGS org.apache.kafka.connect.mirror.MirrorMaker "$@" diff --git a/learn-kafka/bin/connect-standalone.sh b/learn-kafka/bin/connect-standalone.sh deleted file mode 100644 index 441069f..0000000 --- a/learn-kafka/bin/connect-standalone.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ $# -lt 1 ]; -then - echo "USAGE: $0 [-daemon] connect-standalone.properties" - exit 1 -fi - -base_dir=$(dirname $0) - -if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -fi - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xms256M -Xmx2G" -fi - -EXTRA_ARGS=${EXTRA_ARGS-'-name connectStandalone'} - -COMMAND=$1 -case $COMMAND in - -daemon) - EXTRA_ARGS="-daemon "$EXTRA_ARGS - shift - ;; - *) - ;; -esac - -exec $(dirname $0)/kafka-run-class.sh $EXTRA_ARGS org.apache.kafka.connect.cli.ConnectStandalone "$@" diff --git a/learn-kafka/bin/kafka-acls.sh b/learn-kafka/bin/kafka-acls.sh deleted file mode 100644 index 8fa6554..0000000 --- a/learn-kafka/bin/kafka-acls.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.AclCommand "$@" diff --git a/learn-kafka/bin/kafka-broker-api-versions.sh b/learn-kafka/bin/kafka-broker-api-versions.sh deleted file mode 100644 index 4f560a0..0000000 --- a/learn-kafka/bin/kafka-broker-api-versions.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.BrokerApiVersionsCommand "$@" diff --git a/learn-kafka/bin/kafka-cluster.sh b/learn-kafka/bin/kafka-cluster.sh deleted file mode 100644 index f09858c..0000000 --- a/learn-kafka/bin/kafka-cluster.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.ClusterTool "$@" diff --git a/learn-kafka/bin/kafka-configs.sh b/learn-kafka/bin/kafka-configs.sh deleted file mode 100644 index 2f9eb8c..0000000 --- a/learn-kafka/bin/kafka-configs.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.ConfigCommand "$@" diff --git a/learn-kafka/bin/kafka-console-consumer.sh b/learn-kafka/bin/kafka-console-consumer.sh deleted file mode 100644 index dbaac2b..0000000 --- a/learn-kafka/bin/kafka-console-consumer.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xmx512M" -fi - -exec $(dirname $0)/kafka-run-class.sh kafka.tools.ConsoleConsumer "$@" diff --git a/learn-kafka/bin/kafka-console-producer.sh b/learn-kafka/bin/kafka-console-producer.sh deleted file mode 100644 index e5187b8..0000000 --- a/learn-kafka/bin/kafka-console-producer.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xmx512M" -fi -exec $(dirname $0)/kafka-run-class.sh kafka.tools.ConsoleProducer "$@" diff --git a/learn-kafka/bin/kafka-consumer-groups.sh b/learn-kafka/bin/kafka-consumer-groups.sh deleted file mode 100644 index feb063d..0000000 --- a/learn-kafka/bin/kafka-consumer-groups.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.ConsumerGroupCommand "$@" diff --git a/learn-kafka/bin/kafka-consumer-perf-test.sh b/learn-kafka/bin/kafka-consumer-perf-test.sh deleted file mode 100644 index 4eebe87..0000000 --- a/learn-kafka/bin/kafka-consumer-perf-test.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xmx512M" -fi -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.ConsumerPerformance "$@" diff --git a/learn-kafka/bin/kafka-delegation-tokens.sh b/learn-kafka/bin/kafka-delegation-tokens.sh deleted file mode 100644 index 9f8bb13..0000000 --- a/learn-kafka/bin/kafka-delegation-tokens.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.DelegationTokenCommand "$@" diff --git a/learn-kafka/bin/kafka-delete-records.sh b/learn-kafka/bin/kafka-delete-records.sh deleted file mode 100644 index 8726f91..0000000 --- a/learn-kafka/bin/kafka-delete-records.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.DeleteRecordsCommand "$@" diff --git a/learn-kafka/bin/kafka-dump-log.sh b/learn-kafka/bin/kafka-dump-log.sh deleted file mode 100644 index a97ea7d..0000000 --- a/learn-kafka/bin/kafka-dump-log.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.tools.DumpLogSegments "$@" diff --git a/learn-kafka/bin/kafka-e2e-latency.sh b/learn-kafka/bin/kafka-e2e-latency.sh deleted file mode 100644 index 32d1063..0000000 --- a/learn-kafka/bin/kafka-e2e-latency.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.EndToEndLatency "$@" diff --git a/learn-kafka/bin/kafka-features.sh b/learn-kafka/bin/kafka-features.sh deleted file mode 100644 index 9dd9f16..0000000 --- a/learn-kafka/bin/kafka-features.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.FeatureCommand "$@" diff --git a/learn-kafka/bin/kafka-get-offsets.sh b/learn-kafka/bin/kafka-get-offsets.sh deleted file mode 100644 index 993a202..0000000 --- a/learn-kafka/bin/kafka-get-offsets.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.tools.GetOffsetShell "$@" diff --git a/learn-kafka/bin/kafka-jmx.sh b/learn-kafka/bin/kafka-jmx.sh deleted file mode 100644 index 88b3874..0000000 --- a/learn-kafka/bin/kafka-jmx.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.JmxTool "$@" diff --git a/learn-kafka/bin/kafka-leader-election.sh b/learn-kafka/bin/kafka-leader-election.sh deleted file mode 100644 index 88baef3..0000000 --- a/learn-kafka/bin/kafka-leader-election.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.LeaderElectionCommand "$@" diff --git a/learn-kafka/bin/kafka-log-dirs.sh b/learn-kafka/bin/kafka-log-dirs.sh deleted file mode 100644 index dc16edc..0000000 --- a/learn-kafka/bin/kafka-log-dirs.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.LogDirsCommand "$@" diff --git a/learn-kafka/bin/kafka-metadata-quorum.sh b/learn-kafka/bin/kafka-metadata-quorum.sh deleted file mode 100644 index 3b25c7d..0000000 --- a/learn-kafka/bin/kafka-metadata-quorum.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.MetadataQuorumCommand "$@" diff --git a/learn-kafka/bin/kafka-metadata-shell.sh b/learn-kafka/bin/kafka-metadata-shell.sh deleted file mode 100644 index 289f0c1..0000000 --- a/learn-kafka/bin/kafka-metadata-shell.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.shell.MetadataShell "$@" diff --git a/learn-kafka/bin/kafka-mirror-maker.sh b/learn-kafka/bin/kafka-mirror-maker.sh deleted file mode 100644 index 981f271..0000000 --- a/learn-kafka/bin/kafka-mirror-maker.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.tools.MirrorMaker "$@" diff --git a/learn-kafka/bin/kafka-producer-perf-test.sh b/learn-kafka/bin/kafka-producer-perf-test.sh deleted file mode 100644 index 73a6288..0000000 --- a/learn-kafka/bin/kafka-producer-perf-test.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xmx512M" -fi -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.ProducerPerformance "$@" diff --git a/learn-kafka/bin/kafka-reassign-partitions.sh b/learn-kafka/bin/kafka-reassign-partitions.sh deleted file mode 100644 index 4c7f1bc..0000000 --- a/learn-kafka/bin/kafka-reassign-partitions.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.ReassignPartitionsCommand "$@" diff --git a/learn-kafka/bin/kafka-replica-verification.sh b/learn-kafka/bin/kafka-replica-verification.sh deleted file mode 100644 index 4960836..0000000 --- a/learn-kafka/bin/kafka-replica-verification.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.tools.ReplicaVerificationTool "$@" diff --git a/learn-kafka/bin/kafka-run-class.sh b/learn-kafka/bin/kafka-run-class.sh deleted file mode 100644 index 025fd8d..0000000 --- a/learn-kafka/bin/kafka-run-class.sh +++ /dev/null @@ -1,347 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ $# -lt 1 ]; -then - echo "USAGE: $0 [-daemon] [-name servicename] [-loggc] classname [opts]" - exit 1 -fi - -# CYGWIN == 1 if Cygwin is detected, else 0. -if [[ $(uname -a) =~ "CYGWIN" ]]; then - CYGWIN=1 -else - CYGWIN=0 -fi - -if [ -z "$INCLUDE_TEST_JARS" ]; then - INCLUDE_TEST_JARS=false -fi - -# Exclude jars not necessary for running commands. -regex="(-(test|test-sources|src|scaladoc|javadoc)\.jar|jar.asc|connect-file.*\.jar)$" -should_include_file() { - if [ "$INCLUDE_TEST_JARS" = true ]; then - return 0 - fi - file=$1 - if [ -z "$(echo "$file" | grep -E "$regex")" ] ; then - return 0 - else - return 1 - fi -} - -base_dir=$(dirname $0)/.. - -if [ -z "$SCALA_VERSION" ]; then - SCALA_VERSION=2.13.10 - if [[ -f "$base_dir/gradle.properties" ]]; then - SCALA_VERSION=`grep "^scalaVersion=" "$base_dir/gradle.properties" | cut -d= -f 2` - fi -fi - -if [ -z "$SCALA_BINARY_VERSION" ]; then - SCALA_BINARY_VERSION=$(echo $SCALA_VERSION | cut -f 1-2 -d '.') -fi - -# run ./gradlew copyDependantLibs to get all dependant jars in a local dir -shopt -s nullglob -if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then - for dir in "$base_dir"/core/build/dependant-libs-${SCALA_VERSION}*; - do - CLASSPATH="$CLASSPATH:$dir/*" - done -fi - -for file in "$base_dir"/examples/build/libs/kafka-examples*.jar; -do - if should_include_file "$file"; then - CLASSPATH="$CLASSPATH":"$file" - fi -done - -if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then - clients_lib_dir=$(dirname $0)/../clients/build/libs - streams_lib_dir=$(dirname $0)/../streams/build/libs - streams_dependant_clients_lib_dir=$(dirname $0)/../streams/build/dependant-libs-${SCALA_VERSION} -else - clients_lib_dir=/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs - streams_lib_dir=$clients_lib_dir - streams_dependant_clients_lib_dir=$streams_lib_dir -fi - - -for file in "$clients_lib_dir"/kafka-clients*.jar; -do - if should_include_file "$file"; then - CLASSPATH="$CLASSPATH":"$file" - fi -done - -for file in "$streams_lib_dir"/kafka-streams*.jar; -do - if should_include_file "$file"; then - CLASSPATH="$CLASSPATH":"$file" - fi -done - -if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then - for file in "$base_dir"/streams/examples/build/libs/kafka-streams-examples*.jar; - do - if should_include_file "$file"; then - CLASSPATH="$CLASSPATH":"$file" - fi - done -else - VERSION_NO_DOTS=`echo $UPGRADE_KAFKA_STREAMS_TEST_VERSION | sed 's/\.//g'` - SHORT_VERSION_NO_DOTS=${VERSION_NO_DOTS:0:((${#VERSION_NO_DOTS} - 1))} # remove last char, ie, bug-fix number - for file in "$base_dir"/streams/upgrade-system-tests-$SHORT_VERSION_NO_DOTS/build/libs/kafka-streams-upgrade-system-tests*.jar; - do - if should_include_file "$file"; then - CLASSPATH="$file":"$CLASSPATH" - fi - done - if [ "$SHORT_VERSION_NO_DOTS" = "0100" ]; then - CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zkclient-0.8.jar":"$CLASSPATH" - CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zookeeper-3.4.6.jar":"$CLASSPATH" - fi - if [ "$SHORT_VERSION_NO_DOTS" = "0101" ]; then - CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zkclient-0.9.jar":"$CLASSPATH" - CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zookeeper-3.4.8.jar":"$CLASSPATH" - fi -fi - -for file in "$streams_dependant_clients_lib_dir"/rocksdb*.jar; -do - CLASSPATH="$CLASSPATH":"$file" -done - -for file in "$streams_dependant_clients_lib_dir"/*hamcrest*.jar; -do - CLASSPATH="$CLASSPATH":"$file" -done - -for file in "$base_dir"/shell/build/libs/kafka-shell*.jar; -do - if should_include_file "$file"; then - CLASSPATH="$CLASSPATH":"$file" - fi -done - -for dir in "$base_dir"/shell/build/dependant-libs-${SCALA_VERSION}*; -do - CLASSPATH="$CLASSPATH:$dir/*" -done - -for file in "$base_dir"/tools/build/libs/kafka-tools*.jar; -do - if should_include_file "$file"; then - CLASSPATH="$CLASSPATH":"$file" - fi -done - -for dir in "$base_dir"/tools/build/dependant-libs-${SCALA_VERSION}*; -do - CLASSPATH="$CLASSPATH:$dir/*" -done - -for file in "$base_dir"/trogdor/build/libs/trogdor-*.jar; -do - if should_include_file "$file"; then - CLASSPATH="$CLASSPATH":"$file" - fi -done - -for dir in "$base_dir"/trogdor/build/dependant-libs-${SCALA_VERSION}*; -do - CLASSPATH="$CLASSPATH:$dir/*" -done - -for cc_pkg in "api" "transforms" "runtime" "mirror" "mirror-client" "json" "tools" "basic-auth-extension" -do - for file in "$base_dir"/connect/${cc_pkg}/build/libs/connect-${cc_pkg}*.jar; - do - if should_include_file "$file"; then - CLASSPATH="$CLASSPATH":"$file" - fi - done - if [ -d "$base_dir/connect/${cc_pkg}/build/dependant-libs" ] ; then - CLASSPATH="$CLASSPATH:$base_dir/connect/${cc_pkg}/build/dependant-libs/*" - fi -done - -# classpath addition for release -for file in "$base_dir"/libs/*; -do - if should_include_file "$file"; then - CLASSPATH="$CLASSPATH":"$file" - fi -done - -for file in "$base_dir"/core/build/libs/kafka_${SCALA_BINARY_VERSION}*.jar; -do - if should_include_file "$file"; then - CLASSPATH="$CLASSPATH":"$file" - fi -done -shopt -u nullglob - -if [ -z "$CLASSPATH" ] ; then - echo "Classpath is empty. Please build the project first e.g. by running './gradlew jar -PscalaVersion=$SCALA_VERSION'" - exit 1 -fi - -# JMX settings -if [ -z "$KAFKA_JMX_OPTS" ]; then - KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false " -fi - -# JMX port to use -if [ $JMX_PORT ]; then - KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT " - if ! echo "$KAFKA_JMX_OPTS" | grep -qF -- '-Dcom.sun.management.jmxremote.rmi.port=' ; then - # If unset, set the RMI port to address issues with monitoring Kafka running in containers - KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT" - fi -fi - -# Log directory to use -if [ "x$LOG_DIR" = "x" ]; then - LOG_DIR="$base_dir/logs" -fi - -# Log4j settings -if [ -z "$KAFKA_LOG4J_OPTS" ]; then - # Log to console. This is a tool. - LOG4J_DIR="$base_dir/config/tools-log4j.properties" - # If Cygwin is detected, LOG4J_DIR is converted to Windows format. - (( CYGWIN )) && LOG4J_DIR=$(cygpath --path --mixed "${LOG4J_DIR}") - KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:${LOG4J_DIR}" -else - # create logs directory - if [ ! -d "$LOG_DIR" ]; then - mkdir -p "$LOG_DIR" - fi -fi - -# If Cygwin is detected, LOG_DIR is converted to Windows format. -(( CYGWIN )) && LOG_DIR=$(cygpath --path --mixed "${LOG_DIR}") -KAFKA_LOG4J_OPTS="-Dkafka.logs.dir=$LOG_DIR $KAFKA_LOG4J_OPTS" - -# Generic jvm settings you want to add -if [ -z "$KAFKA_OPTS" ]; then - KAFKA_OPTS="" -fi - -# Set Debug options if enabled -if [ "x$KAFKA_DEBUG" != "x" ]; then - - # Use default ports - DEFAULT_JAVA_DEBUG_PORT="5005" - - if [ -z "$JAVA_DEBUG_PORT" ]; then - JAVA_DEBUG_PORT="$DEFAULT_JAVA_DEBUG_PORT" - fi - - # Use the defaults if JAVA_DEBUG_OPTS was not set - DEFAULT_JAVA_DEBUG_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=${DEBUG_SUSPEND_FLAG:-n},address=$JAVA_DEBUG_PORT" - if [ -z "$JAVA_DEBUG_OPTS" ]; then - JAVA_DEBUG_OPTS="$DEFAULT_JAVA_DEBUG_OPTS" - fi - - echo "Enabling Java debug options: $JAVA_DEBUG_OPTS" - KAFKA_OPTS="$JAVA_DEBUG_OPTS $KAFKA_OPTS" -fi - -# Which java to use -if [ -z "$JAVA_HOME" ]; then - JAVA="java" -else - JAVA="$JAVA_HOME/bin/java" -fi - -# Memory options -if [ -z "$KAFKA_HEAP_OPTS" ]; then - KAFKA_HEAP_OPTS="-Xmx256M" -fi - -# JVM performance options -# MaxInlineLevel=15 is the default since JDK 14 and can be removed once older JDKs are no longer supported -if [ -z "$KAFKA_JVM_PERFORMANCE_OPTS" ]; then - KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent -XX:MaxInlineLevel=15 -Djava.awt.headless=true" -fi - -while [ $# -gt 0 ]; do - COMMAND=$1 - case $COMMAND in - -name) - DAEMON_NAME=$2 - CONSOLE_OUTPUT_FILE=$LOG_DIR/$DAEMON_NAME.out - shift 2 - ;; - -loggc) - if [ -z "$KAFKA_GC_LOG_OPTS" ]; then - GC_LOG_ENABLED="true" - fi - shift - ;; - -daemon) - DAEMON_MODE="true" - shift - ;; - *) - break - ;; - esac -done - -# GC options -GC_FILE_SUFFIX='-gc.log' -GC_LOG_FILE_NAME='' -if [ "x$GC_LOG_ENABLED" = "xtrue" ]; then - GC_LOG_FILE_NAME=$DAEMON_NAME$GC_FILE_SUFFIX - - # The first segment of the version number, which is '1' for releases before Java 9 - # it then becomes '9', '10', ... - # Some examples of the first line of `java --version`: - # 8 -> java version "1.8.0_152" - # 9.0.4 -> java version "9.0.4" - # 10 -> java version "10" 2018-03-20 - # 10.0.1 -> java version "10.0.1" 2018-04-17 - # We need to match to the end of the line to prevent sed from printing the characters that do not match - JAVA_MAJOR_VERSION=$("$JAVA" -version 2>&1 | sed -E -n 's/.* version "([0-9]*).*$/\1/p') - if [[ "$JAVA_MAJOR_VERSION" -ge "9" ]] ; then - KAFKA_GC_LOG_OPTS="-Xlog:gc*:file=$LOG_DIR/$GC_LOG_FILE_NAME:time,tags:filecount=10,filesize=100M" - else - KAFKA_GC_LOG_OPTS="-Xloggc:$LOG_DIR/$GC_LOG_FILE_NAME -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=100M" - fi -fi - -# Remove a possible colon prefix from the classpath (happens at lines like `CLASSPATH="$CLASSPATH:$file"` when CLASSPATH is blank) -# Syntax used on the right side is native Bash string manipulation; for more details see -# http://tldp.org/LDP/abs/html/string-manipulation.html, specifically the section titled "Substring Removal" -CLASSPATH=${CLASSPATH#:} - -# If Cygwin is detected, classpath is converted to Windows format. -(( CYGWIN )) && CLASSPATH=$(cygpath --path --mixed "${CLASSPATH}") - -# Launch mode -if [ "x$DAEMON_MODE" = "xtrue" ]; then - nohup "$JAVA" $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp "$CLASSPATH" $KAFKA_OPTS "$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null & -else - exec "$JAVA" $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp "$CLASSPATH" $KAFKA_OPTS "$@" -fi diff --git a/learn-kafka/bin/kafka-server-start.sh b/learn-kafka/bin/kafka-server-start.sh deleted file mode 100644 index 5a53126..0000000 --- a/learn-kafka/bin/kafka-server-start.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ $# -lt 1 ]; -then - echo "USAGE: $0 [-daemon] server.properties [--override property=value]*" - exit 1 -fi -base_dir=$(dirname $0) - -if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" -fi - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xmx1G -Xms1G" -fi - -EXTRA_ARGS=${EXTRA_ARGS-'-name kafkaServer -loggc'} - -COMMAND=$1 -case $COMMAND in - -daemon) - EXTRA_ARGS="-daemon "$EXTRA_ARGS - shift - ;; - *) - ;; -esac - -exec $base_dir/kafka-run-class.sh $EXTRA_ARGS kafka.Kafka "$@" diff --git a/learn-kafka/bin/kafka-server-stop.sh b/learn-kafka/bin/kafka-server-stop.sh deleted file mode 100644 index 437189f..0000000 --- a/learn-kafka/bin/kafka-server-stop.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -SIGNAL=${SIGNAL:-TERM} - -OSNAME=$(uname -s) -if [[ "$OSNAME" == "OS/390" ]]; then - if [ -z $JOBNAME ]; then - JOBNAME="KAFKSTRT" - fi - PIDS=$(ps -A -o pid,jobname,comm | grep -i $JOBNAME | grep java | grep -v grep | awk '{print $1}') -elif [[ "$OSNAME" == "OS400" ]]; then - PIDS=$(ps -Af | grep -i 'kafka\.Kafka' | grep java | grep -v grep | awk '{print $2}') -else - PIDS=$(ps ax | grep ' kafka\.Kafka ' | grep java | grep -v grep | awk '{print $1}') -fi - -if [ -z "$PIDS" ]; then - echo "No kafka server to stop" - exit 1 -else - kill -s $SIGNAL $PIDS -fi diff --git a/learn-kafka/bin/kafka-storage.sh b/learn-kafka/bin/kafka-storage.sh deleted file mode 100644 index eef9342..0000000 --- a/learn-kafka/bin/kafka-storage.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.tools.StorageTool "$@" diff --git a/learn-kafka/bin/kafka-streams-application-reset.sh b/learn-kafka/bin/kafka-streams-application-reset.sh deleted file mode 100644 index 26ab766..0000000 --- a/learn-kafka/bin/kafka-streams-application-reset.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xmx512M" -fi - -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.StreamsResetter "$@" diff --git a/learn-kafka/bin/kafka-topics.sh b/learn-kafka/bin/kafka-topics.sh deleted file mode 100644 index ad6a2d4..0000000 --- a/learn-kafka/bin/kafka-topics.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.TopicCommand "$@" diff --git a/learn-kafka/bin/kafka-transactions.sh b/learn-kafka/bin/kafka-transactions.sh deleted file mode 100644 index 6fb5233..0000000 --- a/learn-kafka/bin/kafka-transactions.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.TransactionsCommand "$@" diff --git a/learn-kafka/bin/kafka-verifiable-consumer.sh b/learn-kafka/bin/kafka-verifiable-consumer.sh deleted file mode 100644 index 852847d..0000000 --- a/learn-kafka/bin/kafka-verifiable-consumer.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xmx512M" -fi -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.VerifiableConsumer "$@" diff --git a/learn-kafka/bin/kafka-verifiable-producer.sh b/learn-kafka/bin/kafka-verifiable-producer.sh deleted file mode 100644 index b59bae7..0000000 --- a/learn-kafka/bin/kafka-verifiable-producer.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xmx512M" -fi -exec $(dirname $0)/kafka-run-class.sh org.apache.kafka.tools.VerifiableProducer "$@" diff --git a/learn-kafka/bin/trogdor.sh b/learn-kafka/bin/trogdor.sh deleted file mode 100644 index 3324c4e..0000000 --- a/learn-kafka/bin/trogdor.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -usage() { - cat <nul 2>&1 - IF NOT ERRORLEVEL 1 ( - rem 32-bit OS - set KAFKA_HEAP_OPTS=-Xmx512M -Xms512M - ) ELSE ( - rem 64-bit OS - set KAFKA_HEAP_OPTS=-Xmx1G -Xms1G - ) -) -"%~dp0kafka-run-class.bat" kafka.Kafka %* -EndLocal diff --git a/learn-kafka/bin/windows/kafka-server-stop.bat b/learn-kafka/bin/windows/kafka-server-stop.bat deleted file mode 100644 index 676577c..0000000 --- a/learn-kafka/bin/windows/kafka-server-stop.bat +++ /dev/null @@ -1,18 +0,0 @@ -@echo off -rem Licensed to the Apache Software Foundation (ASF) under one or more -rem contributor license agreements. See the NOTICE file distributed with -rem this work for additional information regarding copyright ownership. -rem The ASF licenses this file to You under the Apache License, Version 2.0 -rem (the "License"); you may not use this file except in compliance with -rem the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, software -rem distributed under the License is distributed on an "AS IS" BASIS, -rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -rem See the License for the specific language governing permissions and -rem limitations under the License. - -wmic process where (commandline like "%%kafka.Kafka%%" and not name="wmic.exe") delete -rem ps ax | grep -i 'kafka.Kafka' | grep -v grep | awk '{print $1}' | xargs kill -SIGTERM diff --git a/learn-kafka/bin/windows/kafka-storage.bat b/learn-kafka/bin/windows/kafka-storage.bat deleted file mode 100644 index 4a0e458..0000000 --- a/learn-kafka/bin/windows/kafka-storage.bat +++ /dev/null @@ -1,17 +0,0 @@ -@echo off -rem Licensed to the Apache Software Foundation (ASF) under one or more -rem contributor license agreements. See the NOTICE file distributed with -rem this work for additional information regarding copyright ownership. -rem The ASF licenses this file to You under the Apache License, Version 2.0 -rem (the "License"); you may not use this file except in compliance with -rem the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, software -rem distributed under the License is distributed on an "AS IS" BASIS, -rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -rem See the License for the specific language governing permissions and -rem limitations under the License. - -"%~dp0kafka-run-class.bat" kafka.tools.StorageTool %* diff --git a/learn-kafka/bin/windows/kafka-streams-application-reset.bat b/learn-kafka/bin/windows/kafka-streams-application-reset.bat deleted file mode 100644 index 77ffc7d..0000000 --- a/learn-kafka/bin/windows/kafka-streams-application-reset.bat +++ /dev/null @@ -1,23 +0,0 @@ -@echo off -rem Licensed to the Apache Software Foundation (ASF) under one or more -rem contributor license agreements. See the NOTICE file distributed with -rem this work for additional information regarding copyright ownership. -rem The ASF licenses this file to You under the Apache License, Version 2.0 -rem (the "License"); you may not use this file except in compliance with -rem the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, software -rem distributed under the License is distributed on an "AS IS" BASIS, -rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -rem See the License for the specific language governing permissions and -rem limitations under the License. - -SetLocal -IF ["%KAFKA_HEAP_OPTS%"] EQU [""] ( - set KAFKA_HEAP_OPTS=-Xmx512M -) - -"%~dp0kafka-run-class.bat" org.apache.kafka.tools.StreamsResetter %* -EndLocal diff --git a/learn-kafka/bin/windows/kafka-topics.bat b/learn-kafka/bin/windows/kafka-topics.bat deleted file mode 100644 index 677b09d..0000000 --- a/learn-kafka/bin/windows/kafka-topics.bat +++ /dev/null @@ -1,17 +0,0 @@ -@echo off -rem Licensed to the Apache Software Foundation (ASF) under one or more -rem contributor license agreements. See the NOTICE file distributed with -rem this work for additional information regarding copyright ownership. -rem The ASF licenses this file to You under the Apache License, Version 2.0 -rem (the "License"); you may not use this file except in compliance with -rem the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, software -rem distributed under the License is distributed on an "AS IS" BASIS, -rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -rem See the License for the specific language governing permissions and -rem limitations under the License. - -"%~dp0kafka-run-class.bat" kafka.admin.TopicCommand %* diff --git a/learn-kafka/bin/windows/kafka-transactions.bat b/learn-kafka/bin/windows/kafka-transactions.bat deleted file mode 100644 index 9bb7585..0000000 --- a/learn-kafka/bin/windows/kafka-transactions.bat +++ /dev/null @@ -1,17 +0,0 @@ -@echo off -rem Licensed to the Apache Software Foundation (ASF) under one or more -rem contributor license agreements. See the NOTICE file distributed with -rem this work for additional information regarding copyright ownership. -rem The ASF licenses this file to You under the Apache License, Version 2.0 -rem (the "License"); you may not use this file except in compliance with -rem the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, software -rem distributed under the License is distributed on an "AS IS" BASIS, -rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -rem See the License for the specific language governing permissions and -rem limitations under the License. - -"%~dp0kafka-run-class.bat" org.apache.kafka.tools.TransactionsCommand %* diff --git a/learn-kafka/bin/windows/zookeeper-server-start.bat b/learn-kafka/bin/windows/zookeeper-server-start.bat deleted file mode 100644 index f201a58..0000000 --- a/learn-kafka/bin/windows/zookeeper-server-start.bat +++ /dev/null @@ -1,30 +0,0 @@ -@echo off -rem Licensed to the Apache Software Foundation (ASF) under one or more -rem contributor license agreements. See the NOTICE file distributed with -rem this work for additional information regarding copyright ownership. -rem The ASF licenses this file to You under the Apache License, Version 2.0 -rem (the "License"); you may not use this file except in compliance with -rem the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, software -rem distributed under the License is distributed on an "AS IS" BASIS, -rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -rem See the License for the specific language governing permissions and -rem limitations under the License. - -IF [%1] EQU [] ( - echo USAGE: %0 zookeeper.properties - EXIT /B 1 -) - -SetLocal -IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties -) -IF ["%KAFKA_HEAP_OPTS%"] EQU [""] ( - set KAFKA_HEAP_OPTS=-Xmx512M -Xms512M -) -"%~dp0kafka-run-class.bat" org.apache.zookeeper.server.quorum.QuorumPeerMain %* -EndLocal diff --git a/learn-kafka/bin/windows/zookeeper-server-stop.bat b/learn-kafka/bin/windows/zookeeper-server-stop.bat deleted file mode 100644 index 8b57dd8..0000000 --- a/learn-kafka/bin/windows/zookeeper-server-stop.bat +++ /dev/null @@ -1,17 +0,0 @@ -@echo off -rem Licensed to the Apache Software Foundation (ASF) under one or more -rem contributor license agreements. See the NOTICE file distributed with -rem this work for additional information regarding copyright ownership. -rem The ASF licenses this file to You under the Apache License, Version 2.0 -rem (the "License"); you may not use this file except in compliance with -rem the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, software -rem distributed under the License is distributed on an "AS IS" BASIS, -rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -rem See the License for the specific language governing permissions and -rem limitations under the License. - -wmic process where (commandline like "%%zookeeper%%" and not name="wmic.exe") delete diff --git a/learn-kafka/bin/windows/zookeeper-shell.bat b/learn-kafka/bin/windows/zookeeper-shell.bat deleted file mode 100644 index f1c86c4..0000000 --- a/learn-kafka/bin/windows/zookeeper-shell.bat +++ /dev/null @@ -1,22 +0,0 @@ -@echo off -rem Licensed to the Apache Software Foundation (ASF) under one or more -rem contributor license agreements. See the NOTICE file distributed with -rem this work for additional information regarding copyright ownership. -rem The ASF licenses this file to You under the Apache License, Version 2.0 -rem (the "License"); you may not use this file except in compliance with -rem the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, software -rem distributed under the License is distributed on an "AS IS" BASIS, -rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -rem See the License for the specific language governing permissions and -rem limitations under the License. - -IF [%1] EQU [] ( - echo USAGE: %0 zookeeper_host:port[/path] [-zk-tls-config-file file] [args...] - EXIT /B 1 -) - -"%~dp0kafka-run-class.bat" org.apache.zookeeper.ZooKeeperMainWithTlsSupportForKafka -server %* diff --git a/learn-kafka/bin/zookeeper-security-migration.sh b/learn-kafka/bin/zookeeper-security-migration.sh deleted file mode 100644 index 722bde7..0000000 --- a/learn-kafka/bin/zookeeper-security-migration.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -exec $(dirname $0)/kafka-run-class.sh kafka.admin.ZkSecurityMigrator "$@" diff --git a/learn-kafka/bin/zookeeper-server-start.sh b/learn-kafka/bin/zookeeper-server-start.sh deleted file mode 100644 index bd9c114..0000000 --- a/learn-kafka/bin/zookeeper-server-start.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ $# -lt 1 ]; -then - echo "USAGE: $0 [-daemon] zookeeper.properties" - exit 1 -fi -base_dir=$(dirname $0) - -if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" -fi - -if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then - export KAFKA_HEAP_OPTS="-Xmx512M -Xms512M" -fi - -EXTRA_ARGS=${EXTRA_ARGS-'-name zookeeper -loggc'} - -COMMAND=$1 -case $COMMAND in - -daemon) - EXTRA_ARGS="-daemon "$EXTRA_ARGS - shift - ;; - *) - ;; -esac - -exec $base_dir/kafka-run-class.sh $EXTRA_ARGS org.apache.zookeeper.server.quorum.QuorumPeerMain "$@" diff --git a/learn-kafka/bin/zookeeper-server-stop.sh b/learn-kafka/bin/zookeeper-server-stop.sh deleted file mode 100644 index 11665f3..0000000 --- a/learn-kafka/bin/zookeeper-server-stop.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -SIGNAL=${SIGNAL:-TERM} - -OSNAME=$(uname -s) -if [[ "$OSNAME" == "OS/390" ]]; then - if [ -z $JOBNAME ]; then - JOBNAME="ZKEESTRT" - fi - PIDS=$(ps -A -o pid,jobname,comm | grep -i $JOBNAME | grep java | grep -v grep | awk '{print $1}') -elif [[ "$OSNAME" == "OS400" ]]; then - PIDS=$(ps -Af | grep java | grep -i QuorumPeerMain | grep -v grep | awk '{print $2}') -else - PIDS=$(ps ax | grep java | grep -i QuorumPeerMain | grep -v grep | awk '{print $1}') -fi - -if [ -z "$PIDS" ]; then - echo "No zookeeper server to stop" - exit 1 -else - kill -s $SIGNAL $PIDS -fi diff --git a/learn-kafka/bin/zookeeper-shell.sh b/learn-kafka/bin/zookeeper-shell.sh deleted file mode 100644 index 2f1d0f2..0000000 --- a/learn-kafka/bin/zookeeper-shell.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -if [ $# -lt 1 ]; -then - echo "USAGE: $0 zookeeper_host:port[/path] [-zk-tls-config-file file] [args...]" - exit 1 -fi - -exec $(dirname $0)/kafka-run-class.sh org.apache.zookeeper.ZooKeeperMainWithTlsSupportForKafka -server "$@" diff --git a/learn-kafka/config/connect-console-sink.properties b/learn-kafka/config/connect-console-sink.properties deleted file mode 100644 index e240a8f..0000000 --- a/learn-kafka/config/connect-console-sink.properties +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name=local-console-sink -connector.class=org.apache.kafka.connect.file.FileStreamSinkConnector -tasks.max=1 -topics=connect-test \ No newline at end of file diff --git a/learn-kafka/config/connect-console-source.properties b/learn-kafka/config/connect-console-source.properties deleted file mode 100644 index d0e2069..0000000 --- a/learn-kafka/config/connect-console-source.properties +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name=local-console-source -connector.class=org.apache.kafka.connect.file.FileStreamSourceConnector -tasks.max=1 -topic=connect-test \ No newline at end of file diff --git a/learn-kafka/config/connect-file-sink.properties b/learn-kafka/config/connect-file-sink.properties deleted file mode 100644 index 594ccc6..0000000 --- a/learn-kafka/config/connect-file-sink.properties +++ /dev/null @@ -1,20 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name=local-file-sink -connector.class=FileStreamSink -tasks.max=1 -file=test.sink.txt -topics=connect-test \ No newline at end of file diff --git a/learn-kafka/config/connect-file-source.properties b/learn-kafka/config/connect-file-source.properties deleted file mode 100644 index 599cf4c..0000000 --- a/learn-kafka/config/connect-file-source.properties +++ /dev/null @@ -1,20 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name=local-file-source -connector.class=FileStreamSource -tasks.max=1 -file=test.txt -topic=connect-test \ No newline at end of file diff --git a/learn-kafka/config/connect-log4j.properties b/learn-kafka/config/connect-log4j.properties deleted file mode 100644 index 2e049a5..0000000 --- a/learn-kafka/config/connect-log4j.properties +++ /dev/null @@ -1,41 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=INFO, stdout, connectAppender - -# Send the logs to the console. -# -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout - -# Send the logs to a file, rolling the file at midnight local time. For example, the `File` option specifies the -# location of the log files (e.g. ${kafka.logs.dir}/connect.log), and at midnight local time the file is closed -# and copied in the same directory but with a filename that ends in the `DatePattern` option. -# -log4j.appender.connectAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.connectAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.connectAppender.File=${kafka.logs.dir}/connect.log -log4j.appender.connectAppender.layout=org.apache.log4j.PatternLayout - -# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information -# in the log messages, where appropriate. This makes it easier to identify those log messages that apply to a -# specific connector. -# -connect.log.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n - -log4j.appender.stdout.layout.ConversionPattern=${connect.log.pattern} -log4j.appender.connectAppender.layout.ConversionPattern=${connect.log.pattern} - -log4j.logger.org.reflections=ERROR diff --git a/learn-kafka/config/connect-mirror-maker.properties b/learn-kafka/config/connect-mirror-maker.properties deleted file mode 100644 index 40afda5..0000000 --- a/learn-kafka/config/connect-mirror-maker.properties +++ /dev/null @@ -1,59 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under A or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# see org.apache.kafka.clients.consumer.ConsumerConfig for more details - -# Sample MirrorMaker 2.0 top-level configuration file -# Run with ./bin/connect-mirror-maker.sh connect-mirror-maker.properties - -# specify any number of cluster aliases -clusters = A, B - -# connection information for each cluster -# This is a comma separated host:port pairs for each cluster -# for e.g. "A_host1:9092, A_host2:9092, A_host3:9092" -A.bootstrap.servers = A_host1:9092, A_host2:9092, A_host3:9092 -B.bootstrap.servers = B_host1:9092, B_host2:9092, B_host3:9092 - -# enable and configure individual replication flows -A->B.enabled = true - -# regex which defines which topics gets replicated. For eg "foo-.*" -A->B.topics = .* - -B->A.enabled = true -B->A.topics = .* - -# Setting replication factor of newly created remote topics -replication.factor=1 - -############################# Internal Topic Settings ############################# -# The replication factor for mm2 internal topics "heartbeats", "B.checkpoints.internal" and -# "mm2-offset-syncs.B.internal" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -checkpoints.topic.replication.factor=1 -heartbeats.topic.replication.factor=1 -offset-syncs.topic.replication.factor=1 - -# The replication factor for connect internal topics "mm2-configs.B.internal", "mm2-offsets.B.internal" and -# "mm2-status.B.internal" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offset.storage.replication.factor=1 -status.storage.replication.factor=1 -config.storage.replication.factor=1 - -# customize as needed -# replication.policy.separator = _ -# sync.topic.acls.enabled = false -# emit.heartbeats.interval.seconds = 5 diff --git a/learn-kafka/config/connect-standalone.properties b/learn-kafka/config/connect-standalone.properties deleted file mode 100644 index 6d7e3dd..0000000 --- a/learn-kafka/config/connect-standalone.properties +++ /dev/null @@ -1,41 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# These are defaults. This file just demonstrates how to override some settings. -bootstrap.servers=localhost:9092 - -# The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will -# need to configure these based on the format they want their data in when loaded from or stored into Kafka -key.converter=org.apache.kafka.connect.json.JsonConverter -value.converter=org.apache.kafka.connect.json.JsonConverter -# Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply -# it to -key.converter.schemas.enable=true -value.converter.schemas.enable=true - -offset.storage.file.filename=/tmp/connect.offsets -# Flush much faster than normal, which is useful for testing/debugging -offset.flush.interval.ms=10000 - -# Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins -# (connectors, converters, transformations). The list should consist of top level directories that include -# any combination of: -# a) directories immediately containing jars with plugins and their dependencies -# b) uber-jars with plugins and their dependencies -# c) directories immediately containing the package directory structure of classes of plugins and their dependencies -# Note: symlinks will be followed to discover dependencies or plugins. -# Examples: -# plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors, -plugin.path=libs/connect-file-3.5.1.jar diff --git a/learn-kafka/config/consumer.properties b/learn-kafka/config/consumer.properties deleted file mode 100644 index 01bb12e..0000000 --- a/learn-kafka/config/consumer.properties +++ /dev/null @@ -1,26 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# see org.apache.kafka.clients.consumer.ConsumerConfig for more details - -# list of brokers used for bootstrapping knowledge about the rest of the cluster -# format: host1:port1,host2:port2 ... -bootstrap.servers=localhost:9092 - -# consumer group id -group.id=test-consumer-group - -# What to do when there is no initial offset in Kafka or if the current -# offset does not exist any more on the server: latest, earliest, none -#auto.offset.reset= diff --git a/learn-kafka/config/kraft/broker.properties b/learn-kafka/config/kraft/broker.properties deleted file mode 100644 index 4edcc12..0000000 --- a/learn-kafka/config/kraft/broker.properties +++ /dev/null @@ -1,129 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# -# This configuration file is intended for use in KRaft mode, where -# Apache ZooKeeper is not present. See config/kraft/README.md for details. -# - -############################# Server Basics ############################# - -# The role of this server. Setting this puts us in KRaft mode -process.roles=broker - -# The node id associated with this instance's roles -node.id=2 - -# The connect string for the controller quorum -controller.quorum.voters=1@localhost:9093 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. If not configured, the host name will be equal to the value of -# java.net.InetAddress.getCanonicalHostName(), with PLAINTEXT listener name, and port 9092. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 -listeners=PLAINTEXT://localhost:9092 - -# Name of listener used for communication between brokers. -inter.broker.listener.name=PLAINTEXT - -# Listener name, hostname and port the broker will advertise to clients. -# If not set, it uses the value for "listeners". -advertised.listeners=PLAINTEXT://localhost:9092 - -# A comma-separated list of the names of the listeners used by the controller. -# This is required if running in KRaft mode. On a node with `process.roles=broker`, only the first listed listener will be used by the broker. -controller.listener.names=CONTROLLER - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -num.network.threads=3 - -# The number of threads that the server uses for processing requests, which may include disk I/O -num.io.threads=8 - -# The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 - -# The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 - -# The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 - - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/tmp/kraft-broker-logs - -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -num.partitions=1 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -#log.flush.interval.messages=10000 - -# The maximum amount of time a message can sit in a log before we force a flush -#log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -log.retention.hours=168 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -#log.retention.bytes=1073741824 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -log.segment.bytes=1073741824 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 diff --git a/learn-kafka/config/kraft/controller.properties b/learn-kafka/config/kraft/controller.properties deleted file mode 100644 index 9e8ad62..0000000 --- a/learn-kafka/config/kraft/controller.properties +++ /dev/null @@ -1,122 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# -# This configuration file is intended for use in KRaft mode, where -# Apache ZooKeeper is not present. See config/kraft/README.md for details. -# - -############################# Server Basics ############################# - -# The role of this server. Setting this puts us in KRaft mode -process.roles=controller - -# The node id associated with this instance's roles -node.id=1 - -# The connect string for the controller quorum -controller.quorum.voters=1@localhost:9093 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. -# Note that only the controller listeners are allowed here when `process.roles=controller`, and this listener should be consistent with `controller.quorum.voters` value. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 -listeners=CONTROLLER://:9093 - -# A comma-separated list of the names of the listeners used by the controller. -# This is required if running in KRaft mode. -controller.listener.names=CONTROLLER - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -num.network.threads=3 - -# The number of threads that the server uses for processing requests, which may include disk I/O -num.io.threads=8 - -# The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 - -# The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 - -# The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 - - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/tmp/kraft-controller-logs - -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -num.partitions=1 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -#log.flush.interval.messages=10000 - -# The maximum amount of time a message can sit in a log before we force a flush -#log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -log.retention.hours=168 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -#log.retention.bytes=1073741824 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -log.segment.bytes=1073741824 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 diff --git a/learn-kafka/config/kraft/server.properties b/learn-kafka/config/kraft/server.properties deleted file mode 100644 index ea84818..0000000 --- a/learn-kafka/config/kraft/server.properties +++ /dev/null @@ -1,132 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# -# This configuration file is intended for use in KRaft mode, where -# Apache ZooKeeper is not present. See config/kraft/README.md for details. -# - -############################# Server Basics ############################# - -# The role of this server. Setting this puts us in KRaft mode -process.roles=broker,controller - -# The node id associated with this instance's roles -node.id=1 - -# The connect string for the controller quorum -controller.quorum.voters=1@localhost:9093 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. -# Combined nodes (i.e. those with `process.roles=broker,controller`) must list the controller listener here at a minimum. -# If the broker listener is not defined, the default listener will use a host name that is equal to the value of java.net.InetAddress.getCanonicalHostName(), -# with PLAINTEXT listener name, and port 9092. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 -listeners=PLAINTEXT://:9092,CONTROLLER://:9093 - -# Name of listener used for communication between brokers. -inter.broker.listener.name=PLAINTEXT - -# Listener name, hostname and port the broker will advertise to clients. -# If not set, it uses the value for "listeners". -advertised.listeners=PLAINTEXT://localhost:9092 - -# A comma-separated list of the names of the listeners used by the controller. -# If no explicit mapping set in `listener.security.protocol.map`, default will be using PLAINTEXT protocol -# This is required if running in KRaft mode. -controller.listener.names=CONTROLLER - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -listener.security.protocol.map=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -num.network.threads=3 - -# The number of threads that the server uses for processing requests, which may include disk I/O -num.io.threads=8 - -# The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 - -# The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 - -# The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 - - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/tmp/kraft-combined-logs - -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -num.partitions=1 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -#log.flush.interval.messages=10000 - -# The maximum amount of time a message can sit in a log before we force a flush -#log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -log.retention.hours=168 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -#log.retention.bytes=1073741824 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -log.segment.bytes=1073741824 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 diff --git a/learn-kafka/config/log4j.properties b/learn-kafka/config/log4j.properties deleted file mode 100644 index 4dbdd83..0000000 --- a/learn-kafka/config/log4j.properties +++ /dev/null @@ -1,96 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Unspecified loggers and loggers with additivity=true output to server.log and stdout -# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise -log4j.rootLogger=INFO, stdout, kafkaAppender - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log -log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log -log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log -log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log -log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log -log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log -log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -# Change the line below to adjust ZK client logging -log4j.logger.org.apache.zookeeper=INFO - -# Change the two lines below to adjust the general broker logging level (output to server.log and stdout) -log4j.logger.kafka=INFO -log4j.logger.org.apache.kafka=INFO - -# Change to DEBUG or TRACE to enable request logging -log4j.logger.kafka.request.logger=WARN, requestAppender -log4j.additivity.kafka.request.logger=false - -# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output -# related to the handling of requests -#log4j.logger.kafka.network.Processor=TRACE, requestAppender -#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender -#log4j.additivity.kafka.server.KafkaApis=false -log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender -log4j.additivity.kafka.network.RequestChannel$=false - -# Change the line below to adjust KRaft mode controller logging -log4j.logger.org.apache.kafka.controller=INFO, controllerAppender -log4j.additivity.org.apache.kafka.controller=false - -# Change the line below to adjust ZK mode controller logging -log4j.logger.kafka.controller=TRACE, controllerAppender -log4j.additivity.kafka.controller=false - -log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender -log4j.additivity.kafka.log.LogCleaner=false - -log4j.logger.state.change.logger=INFO, stateChangeAppender -log4j.additivity.state.change.logger=false - -# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses -log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender -log4j.additivity.kafka.authorizer.logger=false - diff --git a/learn-kafka/config/producer.properties b/learn-kafka/config/producer.properties deleted file mode 100644 index 3a999e7..0000000 --- a/learn-kafka/config/producer.properties +++ /dev/null @@ -1,46 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# see org.apache.kafka.clients.producer.ProducerConfig for more details - -############################# Producer Basics ############################# - -# list of brokers used for bootstrapping knowledge about the rest of the cluster -# format: host1:port1,host2:port2 ... -bootstrap.servers=localhost:9092 - -# specify the compression codec for all data generated: none, gzip, snappy, lz4, zstd -compression.type=none - -# name of the partitioner class for partitioning records; -# The default uses "sticky" partitioning logic which spreads the load evenly between partitions, but improves throughput by attempting to fill the batches sent to each partition. -#partitioner.class= - -# the maximum amount of time the client will wait for the response of a request -#request.timeout.ms= - -# how long `KafkaProducer.send` and `KafkaProducer.partitionsFor` will block for -#max.block.ms= - -# the producer will wait for up to the given delay to allow other records to be sent so that the sends can be batched together -#linger.ms= - -# the maximum size of a request in bytes -#max.request.size= - -# the default batch size in bytes when batching multiple records sent to a partition -#batch.size= - -# the total bytes of memory the producer can use to buffer records waiting to be sent to the server -#buffer.memory= diff --git a/learn-kafka/config/server.properties b/learn-kafka/config/server.properties deleted file mode 100644 index 21ba1c7..0000000 --- a/learn-kafka/config/server.properties +++ /dev/null @@ -1,138 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# -# This configuration file is intended for use in ZK-based mode, where Apache ZooKeeper is required. -# See kafka.server.KafkaConfig for additional details and defaults -# - -############################# Server Basics ############################# - -# The id of the broker. This must be set to a unique integer for each broker. -broker.id=0 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. If not configured, the host name will be equal to the value of -# java.net.InetAddress.getCanonicalHostName(), with PLAINTEXT listener name, and port 9092. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 -#listeners=PLAINTEXT://:9092 - -# Listener name, hostname and port the broker will advertise to clients. -# If not set, it uses the value for "listeners". -#advertised.listeners=PLAINTEXT://your.host.name:9092 - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -num.network.threads=3 - -# The number of threads that the server uses for processing requests, which may include disk I/O -num.io.threads=8 - -# The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 - -# The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 - -# The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 - - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/tmp/kafka-logs - -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -num.partitions=1 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -#log.flush.interval.messages=10000 - -# The maximum amount of time a message can sit in a log before we force a flush -#log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -log.retention.hours=168 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -#log.retention.bytes=1073741824 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -#log.segment.bytes=1073741824 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 - -############################# Zookeeper ############################# - -# Zookeeper connection string (see zookeeper docs for details). -# This is a comma separated host:port pairs, each corresponding to a zk -# server. e.g. "127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002". -# You can also append an optional chroot string to the urls to specify the -# root directory for all kafka znodes. -zookeeper.connect=localhost:2181 - -# Timeout in ms for connecting to zookeeper -zookeeper.connection.timeout.ms=18000 - - -############################# Group Coordinator Settings ############################# - -# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance. -# The rebalance will be further delayed by the value of group.initial.rebalance.delay.ms as new members join the group, up to a maximum of max.poll.interval.ms. -# The default value for this is 3 seconds. -# We override this to 0 here as it makes for a better out-of-the-box experience for development and testing. -# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup. -group.initial.rebalance.delay.ms=0 diff --git a/learn-kafka/config/tools-log4j.properties b/learn-kafka/config/tools-log4j.properties deleted file mode 100644 index b19e343..0000000 --- a/learn-kafka/config/tools-log4j.properties +++ /dev/null @@ -1,21 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=WARN, stderr - -log4j.appender.stderr=org.apache.log4j.ConsoleAppender -log4j.appender.stderr.layout=org.apache.log4j.PatternLayout -log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.stderr.Target=System.err diff --git a/learn-kafka/config/trogdor.conf b/learn-kafka/config/trogdor.conf deleted file mode 100644 index 320cbe7..0000000 --- a/learn-kafka/config/trogdor.conf +++ /dev/null @@ -1,25 +0,0 @@ -{ - "_comment": [ - "Licensed to the Apache Software Foundation (ASF) under one or more", - "contributor license agreements. See the NOTICE file distributed with", - "this work for additional information regarding copyright ownership.", - "The ASF licenses this file to You under the Apache License, Version 2.0", - "(the \"License\"); you may not use this file except in compliance with", - "the License. You may obtain a copy of the License at", - "", - "http://www.apache.org/licenses/LICENSE-2.0", - "", - "Unless required by applicable law or agreed to in writing, software", - "distributed under the License is distributed on an \"AS IS\" BASIS,", - "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", - "See the License for the specific language governing permissions and", - "limitations under the License." - ], - "platform": "org.apache.kafka.trogdor.basic.BasicPlatform", "nodes": { - "node0": { - "hostname": "localhost", - "trogdor.agent.port": 8888, - "trogdor.coordinator.port": 8889 - } - } -} diff --git a/learn-kafka/config/zookeeper.properties b/learn-kafka/config/zookeeper.properties deleted file mode 100644 index 90f4332..0000000 --- a/learn-kafka/config/zookeeper.properties +++ /dev/null @@ -1,24 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# the directory where the snapshot is stored. -dataDir=/tmp/zookeeper -# the port at which the clients will connect -clientPort=2181 -# disable the per-ip limit on the number of connections since this is a non-production config -maxClientCnxns=0 -# Disable the adminserver by default to avoid port conflicts. -# Set the port to something non-conflicting if choosing to enable this -admin.enableServer=false -# admin.serverPort=8080 diff --git a/learn-kafka/consumer.py b/learn-kafka/consumer.py deleted file mode 100644 index 1065cff..0000000 --- a/learn-kafka/consumer.py +++ /dev/null @@ -1,8 +0,0 @@ -from kafka import KafkaConsumer - - -consumer = KafkaConsumer(bootstrap_servers=['localhost:9092'], auto_offset_reset='earliest') -consumer.subscribe(['quickstart-events']) - -for event in consumer: - print("Got event: ", event.value) diff --git a/learn-kafka/docker-compose.yml b/learn-kafka/docker-compose.yml deleted file mode 100644 index 61dcee4..0000000 --- a/learn-kafka/docker-compose.yml +++ /dev/null @@ -1,25 +0,0 @@ -version: "2" -name: kafka-ecosystem - -services: - zookeeper: - image: docker.io/bitnami/zookeeper:3.8 - container_name: zookeeper - ports: - - "2181:2181" - volumes: - - ./.docker/data/zookeeper:/bitnami - environment: - - ALLOW_ANONYMOUS_LOGIN=yes - - kafka: - image: docker.io/bitnami/kafka:3.4 - container_name: kafka - ports: - - "9092:9092" - volumes: - - ./.docker/data/kafka:/bitnami - environment: - - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 - depends_on: - - zookeeper diff --git a/learn-kafka/inputfile.txt b/learn-kafka/inputfile.txt deleted file mode 100644 index 8255858..0000000 --- a/learn-kafka/inputfile.txt +++ /dev/null @@ -1,10 +0,0 @@ -mot -hai -ba -bon -nam -sau -bay -tam -chin -muoi diff --git a/learn-kafka/libs/activation-1.1.1.jar b/learn-kafka/libs/activation-1.1.1.jar deleted file mode 100644 index 1b703ab..0000000 Binary files a/learn-kafka/libs/activation-1.1.1.jar and /dev/null differ diff --git a/learn-kafka/libs/aopalliance-repackaged-2.6.1.jar b/learn-kafka/libs/aopalliance-repackaged-2.6.1.jar deleted file mode 100644 index 35502f0..0000000 Binary files a/learn-kafka/libs/aopalliance-repackaged-2.6.1.jar and /dev/null differ diff --git a/learn-kafka/libs/argparse4j-0.7.0.jar b/learn-kafka/libs/argparse4j-0.7.0.jar deleted file mode 100644 index b1865dd..0000000 Binary files a/learn-kafka/libs/argparse4j-0.7.0.jar and /dev/null differ diff --git a/learn-kafka/libs/audience-annotations-0.13.0.jar b/learn-kafka/libs/audience-annotations-0.13.0.jar deleted file mode 100644 index d7c55f2..0000000 Binary files a/learn-kafka/libs/audience-annotations-0.13.0.jar and /dev/null differ diff --git a/learn-kafka/libs/commons-cli-1.4.jar b/learn-kafka/libs/commons-cli-1.4.jar deleted file mode 100644 index 22deb30..0000000 Binary files a/learn-kafka/libs/commons-cli-1.4.jar and /dev/null differ diff --git a/learn-kafka/libs/commons-lang3-3.8.1.jar b/learn-kafka/libs/commons-lang3-3.8.1.jar deleted file mode 100644 index 2c65ce6..0000000 Binary files a/learn-kafka/libs/commons-lang3-3.8.1.jar and /dev/null differ diff --git a/learn-kafka/libs/connect-api-3.5.1.jar b/learn-kafka/libs/connect-api-3.5.1.jar deleted file mode 100644 index e68f395..0000000 Binary files a/learn-kafka/libs/connect-api-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/connect-basic-auth-extension-3.5.1.jar b/learn-kafka/libs/connect-basic-auth-extension-3.5.1.jar deleted file mode 100644 index 0d70aa6..0000000 Binary files a/learn-kafka/libs/connect-basic-auth-extension-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/connect-file-3.5.1.jar b/learn-kafka/libs/connect-file-3.5.1.jar deleted file mode 100644 index b6655e1..0000000 Binary files a/learn-kafka/libs/connect-file-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/connect-json-3.5.1.jar b/learn-kafka/libs/connect-json-3.5.1.jar deleted file mode 100644 index 17cb848..0000000 Binary files a/learn-kafka/libs/connect-json-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/connect-mirror-3.5.1.jar b/learn-kafka/libs/connect-mirror-3.5.1.jar deleted file mode 100644 index b0b1af6..0000000 Binary files a/learn-kafka/libs/connect-mirror-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/connect-mirror-client-3.5.1.jar b/learn-kafka/libs/connect-mirror-client-3.5.1.jar deleted file mode 100644 index 2bce736..0000000 Binary files a/learn-kafka/libs/connect-mirror-client-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/connect-runtime-3.5.1.jar b/learn-kafka/libs/connect-runtime-3.5.1.jar deleted file mode 100644 index cdf2325..0000000 Binary files a/learn-kafka/libs/connect-runtime-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/connect-transforms-3.5.1.jar b/learn-kafka/libs/connect-transforms-3.5.1.jar deleted file mode 100644 index 5667dc7..0000000 Binary files a/learn-kafka/libs/connect-transforms-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/hk2-api-2.6.1.jar b/learn-kafka/libs/hk2-api-2.6.1.jar deleted file mode 100644 index 03d6eb0..0000000 Binary files a/learn-kafka/libs/hk2-api-2.6.1.jar and /dev/null differ diff --git a/learn-kafka/libs/hk2-locator-2.6.1.jar b/learn-kafka/libs/hk2-locator-2.6.1.jar deleted file mode 100644 index 0906bd1..0000000 Binary files a/learn-kafka/libs/hk2-locator-2.6.1.jar and /dev/null differ diff --git a/learn-kafka/libs/hk2-utils-2.6.1.jar b/learn-kafka/libs/hk2-utils-2.6.1.jar deleted file mode 100644 index 768bc48..0000000 Binary files a/learn-kafka/libs/hk2-utils-2.6.1.jar and /dev/null differ diff --git a/learn-kafka/libs/jackson-annotations-2.13.5.jar b/learn-kafka/libs/jackson-annotations-2.13.5.jar deleted file mode 100644 index 20ecaed..0000000 Binary files a/learn-kafka/libs/jackson-annotations-2.13.5.jar and /dev/null differ diff --git a/learn-kafka/libs/jackson-core-2.13.5.jar b/learn-kafka/libs/jackson-core-2.13.5.jar deleted file mode 100644 index 401dee3..0000000 Binary files a/learn-kafka/libs/jackson-core-2.13.5.jar and /dev/null differ diff --git a/learn-kafka/libs/jackson-databind-2.13.5.jar b/learn-kafka/libs/jackson-databind-2.13.5.jar deleted file mode 100644 index fde442b..0000000 Binary files a/learn-kafka/libs/jackson-databind-2.13.5.jar and /dev/null differ diff --git a/learn-kafka/libs/jackson-dataformat-csv-2.13.5.jar b/learn-kafka/libs/jackson-dataformat-csv-2.13.5.jar deleted file mode 100644 index 08569aa..0000000 Binary files a/learn-kafka/libs/jackson-dataformat-csv-2.13.5.jar and /dev/null differ diff --git a/learn-kafka/libs/jackson-datatype-jdk8-2.13.5.jar b/learn-kafka/libs/jackson-datatype-jdk8-2.13.5.jar deleted file mode 100644 index b002723..0000000 Binary files a/learn-kafka/libs/jackson-datatype-jdk8-2.13.5.jar and /dev/null differ diff --git a/learn-kafka/libs/jackson-jaxrs-base-2.13.5.jar b/learn-kafka/libs/jackson-jaxrs-base-2.13.5.jar deleted file mode 100644 index 7876a40..0000000 Binary files a/learn-kafka/libs/jackson-jaxrs-base-2.13.5.jar and /dev/null differ diff --git a/learn-kafka/libs/jackson-jaxrs-json-provider-2.13.5.jar b/learn-kafka/libs/jackson-jaxrs-json-provider-2.13.5.jar deleted file mode 100644 index 334b418..0000000 Binary files a/learn-kafka/libs/jackson-jaxrs-json-provider-2.13.5.jar and /dev/null differ diff --git a/learn-kafka/libs/jackson-module-jaxb-annotations-2.13.5.jar b/learn-kafka/libs/jackson-module-jaxb-annotations-2.13.5.jar deleted file mode 100644 index de7c0f3..0000000 Binary files a/learn-kafka/libs/jackson-module-jaxb-annotations-2.13.5.jar and /dev/null differ diff --git a/learn-kafka/libs/jackson-module-scala_2.13-2.13.5.jar b/learn-kafka/libs/jackson-module-scala_2.13-2.13.5.jar deleted file mode 100644 index fd42a69..0000000 Binary files a/learn-kafka/libs/jackson-module-scala_2.13-2.13.5.jar and /dev/null differ diff --git a/learn-kafka/libs/jakarta.activation-api-1.2.2.jar b/learn-kafka/libs/jakarta.activation-api-1.2.2.jar deleted file mode 100644 index 3cc969d..0000000 Binary files a/learn-kafka/libs/jakarta.activation-api-1.2.2.jar and /dev/null differ diff --git a/learn-kafka/libs/jakarta.annotation-api-1.3.5.jar b/learn-kafka/libs/jakarta.annotation-api-1.3.5.jar deleted file mode 100644 index 606d992..0000000 Binary files a/learn-kafka/libs/jakarta.annotation-api-1.3.5.jar and /dev/null differ diff --git a/learn-kafka/libs/jakarta.inject-2.6.1.jar b/learn-kafka/libs/jakarta.inject-2.6.1.jar deleted file mode 100644 index cee6acd..0000000 Binary files a/learn-kafka/libs/jakarta.inject-2.6.1.jar and /dev/null differ diff --git a/learn-kafka/libs/jakarta.validation-api-2.0.2.jar b/learn-kafka/libs/jakarta.validation-api-2.0.2.jar deleted file mode 100644 index d68c9f7..0000000 Binary files a/learn-kafka/libs/jakarta.validation-api-2.0.2.jar and /dev/null differ diff --git a/learn-kafka/libs/jakarta.ws.rs-api-2.1.6.jar b/learn-kafka/libs/jakarta.ws.rs-api-2.1.6.jar deleted file mode 100644 index 4850659..0000000 Binary files a/learn-kafka/libs/jakarta.ws.rs-api-2.1.6.jar and /dev/null differ diff --git a/learn-kafka/libs/jakarta.xml.bind-api-2.3.3.jar b/learn-kafka/libs/jakarta.xml.bind-api-2.3.3.jar deleted file mode 100644 index b8c7dc1..0000000 Binary files a/learn-kafka/libs/jakarta.xml.bind-api-2.3.3.jar and /dev/null differ diff --git a/learn-kafka/libs/javassist-3.29.2-GA.jar b/learn-kafka/libs/javassist-3.29.2-GA.jar deleted file mode 100644 index 68fc301..0000000 Binary files a/learn-kafka/libs/javassist-3.29.2-GA.jar and /dev/null differ diff --git a/learn-kafka/libs/javax.activation-api-1.2.0.jar b/learn-kafka/libs/javax.activation-api-1.2.0.jar deleted file mode 100644 index 986c365..0000000 Binary files a/learn-kafka/libs/javax.activation-api-1.2.0.jar and /dev/null differ diff --git a/learn-kafka/libs/javax.annotation-api-1.3.2.jar b/learn-kafka/libs/javax.annotation-api-1.3.2.jar deleted file mode 100644 index a8a470a..0000000 Binary files a/learn-kafka/libs/javax.annotation-api-1.3.2.jar and /dev/null differ diff --git a/learn-kafka/libs/javax.servlet-api-3.1.0.jar b/learn-kafka/libs/javax.servlet-api-3.1.0.jar deleted file mode 100644 index 6b14c3d..0000000 Binary files a/learn-kafka/libs/javax.servlet-api-3.1.0.jar and /dev/null differ diff --git a/learn-kafka/libs/javax.ws.rs-api-2.1.1.jar b/learn-kafka/libs/javax.ws.rs-api-2.1.1.jar deleted file mode 100644 index 3eabbf0..0000000 Binary files a/learn-kafka/libs/javax.ws.rs-api-2.1.1.jar and /dev/null differ diff --git a/learn-kafka/libs/jaxb-api-2.3.1.jar b/learn-kafka/libs/jaxb-api-2.3.1.jar deleted file mode 100644 index 4565865..0000000 Binary files a/learn-kafka/libs/jaxb-api-2.3.1.jar and /dev/null differ diff --git a/learn-kafka/libs/jersey-client-2.39.1.jar b/learn-kafka/libs/jersey-client-2.39.1.jar deleted file mode 100644 index ebe07a1..0000000 Binary files a/learn-kafka/libs/jersey-client-2.39.1.jar and /dev/null differ diff --git a/learn-kafka/libs/jersey-common-2.39.1.jar b/learn-kafka/libs/jersey-common-2.39.1.jar deleted file mode 100644 index 6ef0176..0000000 Binary files a/learn-kafka/libs/jersey-common-2.39.1.jar and /dev/null differ diff --git a/learn-kafka/libs/jersey-container-servlet-2.39.1.jar b/learn-kafka/libs/jersey-container-servlet-2.39.1.jar deleted file mode 100644 index 451d721..0000000 Binary files a/learn-kafka/libs/jersey-container-servlet-2.39.1.jar and /dev/null differ diff --git a/learn-kafka/libs/jersey-container-servlet-core-2.39.1.jar b/learn-kafka/libs/jersey-container-servlet-core-2.39.1.jar deleted file mode 100644 index af3e491..0000000 Binary files a/learn-kafka/libs/jersey-container-servlet-core-2.39.1.jar and /dev/null differ diff --git a/learn-kafka/libs/jersey-hk2-2.39.1.jar b/learn-kafka/libs/jersey-hk2-2.39.1.jar deleted file mode 100644 index ff3596f..0000000 Binary files a/learn-kafka/libs/jersey-hk2-2.39.1.jar and /dev/null differ diff --git a/learn-kafka/libs/jersey-server-2.39.1.jar b/learn-kafka/libs/jersey-server-2.39.1.jar deleted file mode 100644 index b9240a2..0000000 Binary files a/learn-kafka/libs/jersey-server-2.39.1.jar and /dev/null differ diff --git a/learn-kafka/libs/jetty-client-9.4.51.v20230217.jar b/learn-kafka/libs/jetty-client-9.4.51.v20230217.jar deleted file mode 100644 index 7f53c19..0000000 Binary files a/learn-kafka/libs/jetty-client-9.4.51.v20230217.jar and /dev/null differ diff --git a/learn-kafka/libs/jetty-continuation-9.4.51.v20230217.jar b/learn-kafka/libs/jetty-continuation-9.4.51.v20230217.jar deleted file mode 100644 index e08647e..0000000 Binary files a/learn-kafka/libs/jetty-continuation-9.4.51.v20230217.jar and /dev/null differ diff --git a/learn-kafka/libs/jetty-http-9.4.51.v20230217.jar b/learn-kafka/libs/jetty-http-9.4.51.v20230217.jar deleted file mode 100644 index bbfccd6..0000000 Binary files a/learn-kafka/libs/jetty-http-9.4.51.v20230217.jar and /dev/null differ diff --git a/learn-kafka/libs/jetty-io-9.4.51.v20230217.jar b/learn-kafka/libs/jetty-io-9.4.51.v20230217.jar deleted file mode 100644 index e78675b..0000000 Binary files a/learn-kafka/libs/jetty-io-9.4.51.v20230217.jar and /dev/null differ diff --git a/learn-kafka/libs/jetty-security-9.4.51.v20230217.jar b/learn-kafka/libs/jetty-security-9.4.51.v20230217.jar deleted file mode 100644 index 98a823f..0000000 Binary files a/learn-kafka/libs/jetty-security-9.4.51.v20230217.jar and /dev/null differ diff --git a/learn-kafka/libs/jetty-server-9.4.51.v20230217.jar b/learn-kafka/libs/jetty-server-9.4.51.v20230217.jar deleted file mode 100644 index 3955431..0000000 Binary files a/learn-kafka/libs/jetty-server-9.4.51.v20230217.jar and /dev/null differ diff --git a/learn-kafka/libs/jetty-servlet-9.4.51.v20230217.jar b/learn-kafka/libs/jetty-servlet-9.4.51.v20230217.jar deleted file mode 100644 index cc3aaa3..0000000 Binary files a/learn-kafka/libs/jetty-servlet-9.4.51.v20230217.jar and /dev/null differ diff --git a/learn-kafka/libs/jetty-servlets-9.4.51.v20230217.jar b/learn-kafka/libs/jetty-servlets-9.4.51.v20230217.jar deleted file mode 100644 index e9f98be..0000000 Binary files a/learn-kafka/libs/jetty-servlets-9.4.51.v20230217.jar and /dev/null differ diff --git a/learn-kafka/libs/jetty-util-9.4.51.v20230217.jar b/learn-kafka/libs/jetty-util-9.4.51.v20230217.jar deleted file mode 100644 index bf21612..0000000 Binary files a/learn-kafka/libs/jetty-util-9.4.51.v20230217.jar and /dev/null differ diff --git a/learn-kafka/libs/jetty-util-ajax-9.4.51.v20230217.jar b/learn-kafka/libs/jetty-util-ajax-9.4.51.v20230217.jar deleted file mode 100644 index febc039..0000000 Binary files a/learn-kafka/libs/jetty-util-ajax-9.4.51.v20230217.jar and /dev/null differ diff --git a/learn-kafka/libs/jline-3.22.0.jar b/learn-kafka/libs/jline-3.22.0.jar deleted file mode 100644 index b016252..0000000 Binary files a/learn-kafka/libs/jline-3.22.0.jar and /dev/null differ diff --git a/learn-kafka/libs/jopt-simple-5.0.4.jar b/learn-kafka/libs/jopt-simple-5.0.4.jar deleted file mode 100644 index 317b2b0..0000000 Binary files a/learn-kafka/libs/jopt-simple-5.0.4.jar and /dev/null differ diff --git a/learn-kafka/libs/jose4j-0.9.3.jar b/learn-kafka/libs/jose4j-0.9.3.jar deleted file mode 100644 index e073555..0000000 Binary files a/learn-kafka/libs/jose4j-0.9.3.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-clients-3.5.1.jar b/learn-kafka/libs/kafka-clients-3.5.1.jar deleted file mode 100644 index 6f2fc13..0000000 Binary files a/learn-kafka/libs/kafka-clients-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-group-coordinator-3.5.1.jar b/learn-kafka/libs/kafka-group-coordinator-3.5.1.jar deleted file mode 100644 index cfa6cc8..0000000 Binary files a/learn-kafka/libs/kafka-group-coordinator-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-log4j-appender-3.5.1.jar b/learn-kafka/libs/kafka-log4j-appender-3.5.1.jar deleted file mode 100644 index f353a09..0000000 Binary files a/learn-kafka/libs/kafka-log4j-appender-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-metadata-3.5.1.jar b/learn-kafka/libs/kafka-metadata-3.5.1.jar deleted file mode 100644 index f91ffdf..0000000 Binary files a/learn-kafka/libs/kafka-metadata-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-raft-3.5.1.jar b/learn-kafka/libs/kafka-raft-3.5.1.jar deleted file mode 100644 index 14b3eb7..0000000 Binary files a/learn-kafka/libs/kafka-raft-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-server-common-3.5.1.jar b/learn-kafka/libs/kafka-server-common-3.5.1.jar deleted file mode 100644 index d35d7c0..0000000 Binary files a/learn-kafka/libs/kafka-server-common-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-shell-3.5.1.jar b/learn-kafka/libs/kafka-shell-3.5.1.jar deleted file mode 100644 index 107f485..0000000 Binary files a/learn-kafka/libs/kafka-shell-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-storage-3.5.1.jar b/learn-kafka/libs/kafka-storage-3.5.1.jar deleted file mode 100644 index 3f8a7c9..0000000 Binary files a/learn-kafka/libs/kafka-storage-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-storage-api-3.5.1.jar b/learn-kafka/libs/kafka-storage-api-3.5.1.jar deleted file mode 100644 index 1c333c0..0000000 Binary files a/learn-kafka/libs/kafka-storage-api-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-streams-3.5.1.jar b/learn-kafka/libs/kafka-streams-3.5.1.jar deleted file mode 100644 index a8956b0..0000000 Binary files a/learn-kafka/libs/kafka-streams-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-streams-examples-3.5.1.jar b/learn-kafka/libs/kafka-streams-examples-3.5.1.jar deleted file mode 100644 index d9df4bf..0000000 Binary files a/learn-kafka/libs/kafka-streams-examples-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-streams-scala_2.13-3.5.1.jar b/learn-kafka/libs/kafka-streams-scala_2.13-3.5.1.jar deleted file mode 100644 index 8926d17..0000000 Binary files a/learn-kafka/libs/kafka-streams-scala_2.13-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-streams-test-utils-3.5.1.jar b/learn-kafka/libs/kafka-streams-test-utils-3.5.1.jar deleted file mode 100644 index 248a547..0000000 Binary files a/learn-kafka/libs/kafka-streams-test-utils-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-tools-3.5.1.jar b/learn-kafka/libs/kafka-tools-3.5.1.jar deleted file mode 100644 index 9ba4a77..0000000 Binary files a/learn-kafka/libs/kafka-tools-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka-tools-api-3.5.1.jar b/learn-kafka/libs/kafka-tools-api-3.5.1.jar deleted file mode 100644 index 07f1b25..0000000 Binary files a/learn-kafka/libs/kafka-tools-api-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/kafka_2.13-3.5.1.jar b/learn-kafka/libs/kafka_2.13-3.5.1.jar deleted file mode 100644 index a39193a..0000000 Binary files a/learn-kafka/libs/kafka_2.13-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/lz4-java-1.8.0.jar b/learn-kafka/libs/lz4-java-1.8.0.jar deleted file mode 100644 index 89c644b..0000000 Binary files a/learn-kafka/libs/lz4-java-1.8.0.jar and /dev/null differ diff --git a/learn-kafka/libs/maven-artifact-3.8.8.jar b/learn-kafka/libs/maven-artifact-3.8.8.jar deleted file mode 100644 index 17ee3c2..0000000 Binary files a/learn-kafka/libs/maven-artifact-3.8.8.jar and /dev/null differ diff --git a/learn-kafka/libs/metrics-core-2.2.0.jar b/learn-kafka/libs/metrics-core-2.2.0.jar deleted file mode 100644 index 0f6d1cb..0000000 Binary files a/learn-kafka/libs/metrics-core-2.2.0.jar and /dev/null differ diff --git a/learn-kafka/libs/metrics-core-4.1.12.1.jar b/learn-kafka/libs/metrics-core-4.1.12.1.jar deleted file mode 100644 index 94fc834..0000000 Binary files a/learn-kafka/libs/metrics-core-4.1.12.1.jar and /dev/null differ diff --git a/learn-kafka/libs/netty-buffer-4.1.94.Final.jar b/learn-kafka/libs/netty-buffer-4.1.94.Final.jar deleted file mode 100644 index b7ca7dd..0000000 Binary files a/learn-kafka/libs/netty-buffer-4.1.94.Final.jar and /dev/null differ diff --git a/learn-kafka/libs/netty-codec-4.1.94.Final.jar b/learn-kafka/libs/netty-codec-4.1.94.Final.jar deleted file mode 100644 index a3f989c..0000000 Binary files a/learn-kafka/libs/netty-codec-4.1.94.Final.jar and /dev/null differ diff --git a/learn-kafka/libs/netty-common-4.1.94.Final.jar b/learn-kafka/libs/netty-common-4.1.94.Final.jar deleted file mode 100644 index 98d8abd..0000000 Binary files a/learn-kafka/libs/netty-common-4.1.94.Final.jar and /dev/null differ diff --git a/learn-kafka/libs/netty-handler-4.1.94.Final.jar b/learn-kafka/libs/netty-handler-4.1.94.Final.jar deleted file mode 100644 index 716799a..0000000 Binary files a/learn-kafka/libs/netty-handler-4.1.94.Final.jar and /dev/null differ diff --git a/learn-kafka/libs/netty-resolver-4.1.94.Final.jar b/learn-kafka/libs/netty-resolver-4.1.94.Final.jar deleted file mode 100644 index e915955..0000000 Binary files a/learn-kafka/libs/netty-resolver-4.1.94.Final.jar and /dev/null differ diff --git a/learn-kafka/libs/netty-transport-4.1.94.Final.jar b/learn-kafka/libs/netty-transport-4.1.94.Final.jar deleted file mode 100644 index 6f40952..0000000 Binary files a/learn-kafka/libs/netty-transport-4.1.94.Final.jar and /dev/null differ diff --git a/learn-kafka/libs/netty-transport-classes-epoll-4.1.94.Final.jar b/learn-kafka/libs/netty-transport-classes-epoll-4.1.94.Final.jar deleted file mode 100644 index 5ed85e6..0000000 Binary files a/learn-kafka/libs/netty-transport-classes-epoll-4.1.94.Final.jar and /dev/null differ diff --git a/learn-kafka/libs/netty-transport-native-epoll-4.1.94.Final.jar b/learn-kafka/libs/netty-transport-native-epoll-4.1.94.Final.jar deleted file mode 100644 index e46a4c8..0000000 Binary files a/learn-kafka/libs/netty-transport-native-epoll-4.1.94.Final.jar and /dev/null differ diff --git a/learn-kafka/libs/netty-transport-native-unix-common-4.1.94.Final.jar b/learn-kafka/libs/netty-transport-native-unix-common-4.1.94.Final.jar deleted file mode 100644 index a0fa922..0000000 Binary files a/learn-kafka/libs/netty-transport-native-unix-common-4.1.94.Final.jar and /dev/null differ diff --git a/learn-kafka/libs/osgi-resource-locator-1.0.3.jar b/learn-kafka/libs/osgi-resource-locator-1.0.3.jar deleted file mode 100644 index 0f3c386..0000000 Binary files a/learn-kafka/libs/osgi-resource-locator-1.0.3.jar and /dev/null differ diff --git a/learn-kafka/libs/paranamer-2.8.jar b/learn-kafka/libs/paranamer-2.8.jar deleted file mode 100644 index 0bf659b..0000000 Binary files a/learn-kafka/libs/paranamer-2.8.jar and /dev/null differ diff --git a/learn-kafka/libs/plexus-utils-3.3.1.jar b/learn-kafka/libs/plexus-utils-3.3.1.jar deleted file mode 100644 index 956c653..0000000 Binary files a/learn-kafka/libs/plexus-utils-3.3.1.jar and /dev/null differ diff --git a/learn-kafka/libs/reflections-0.9.12.jar b/learn-kafka/libs/reflections-0.9.12.jar deleted file mode 100644 index 0f176b9..0000000 Binary files a/learn-kafka/libs/reflections-0.9.12.jar and /dev/null differ diff --git a/learn-kafka/libs/reload4j-1.2.25.jar b/learn-kafka/libs/reload4j-1.2.25.jar deleted file mode 100644 index 1b51d62..0000000 Binary files a/learn-kafka/libs/reload4j-1.2.25.jar and /dev/null differ diff --git a/learn-kafka/libs/rocksdbjni-7.1.2.jar b/learn-kafka/libs/rocksdbjni-7.1.2.jar deleted file mode 100644 index f1cbf3e..0000000 Binary files a/learn-kafka/libs/rocksdbjni-7.1.2.jar and /dev/null differ diff --git a/learn-kafka/libs/scala-collection-compat_2.13-2.10.0.jar b/learn-kafka/libs/scala-collection-compat_2.13-2.10.0.jar deleted file mode 100644 index 82c221d..0000000 Binary files a/learn-kafka/libs/scala-collection-compat_2.13-2.10.0.jar and /dev/null differ diff --git a/learn-kafka/libs/scala-java8-compat_2.13-1.0.2.jar b/learn-kafka/libs/scala-java8-compat_2.13-1.0.2.jar deleted file mode 100644 index 11bc17e..0000000 Binary files a/learn-kafka/libs/scala-java8-compat_2.13-1.0.2.jar and /dev/null differ diff --git a/learn-kafka/libs/scala-library-2.13.10.jar b/learn-kafka/libs/scala-library-2.13.10.jar deleted file mode 100644 index ad931ed..0000000 Binary files a/learn-kafka/libs/scala-library-2.13.10.jar and /dev/null differ diff --git a/learn-kafka/libs/scala-logging_2.13-3.9.4.jar b/learn-kafka/libs/scala-logging_2.13-3.9.4.jar deleted file mode 100644 index 107e741..0000000 Binary files a/learn-kafka/libs/scala-logging_2.13-3.9.4.jar and /dev/null differ diff --git a/learn-kafka/libs/scala-reflect-2.13.10.jar b/learn-kafka/libs/scala-reflect-2.13.10.jar deleted file mode 100644 index 14e5f1c..0000000 Binary files a/learn-kafka/libs/scala-reflect-2.13.10.jar and /dev/null differ diff --git a/learn-kafka/libs/slf4j-api-1.7.36.jar b/learn-kafka/libs/slf4j-api-1.7.36.jar deleted file mode 100644 index 7d3ce68..0000000 Binary files a/learn-kafka/libs/slf4j-api-1.7.36.jar and /dev/null differ diff --git a/learn-kafka/libs/slf4j-reload4j-1.7.36.jar b/learn-kafka/libs/slf4j-reload4j-1.7.36.jar deleted file mode 100644 index b007cc7..0000000 Binary files a/learn-kafka/libs/slf4j-reload4j-1.7.36.jar and /dev/null differ diff --git a/learn-kafka/libs/snappy-java-1.1.10.1.jar b/learn-kafka/libs/snappy-java-1.1.10.1.jar deleted file mode 100644 index eccf180..0000000 Binary files a/learn-kafka/libs/snappy-java-1.1.10.1.jar and /dev/null differ diff --git a/learn-kafka/libs/swagger-annotations-2.2.8.jar b/learn-kafka/libs/swagger-annotations-2.2.8.jar deleted file mode 100644 index 9f71ba0..0000000 Binary files a/learn-kafka/libs/swagger-annotations-2.2.8.jar and /dev/null differ diff --git a/learn-kafka/libs/trogdor-3.5.1.jar b/learn-kafka/libs/trogdor-3.5.1.jar deleted file mode 100644 index 1fca158..0000000 Binary files a/learn-kafka/libs/trogdor-3.5.1.jar and /dev/null differ diff --git a/learn-kafka/libs/zookeeper-3.6.4.jar b/learn-kafka/libs/zookeeper-3.6.4.jar deleted file mode 100644 index e8d6a15..0000000 Binary files a/learn-kafka/libs/zookeeper-3.6.4.jar and /dev/null differ diff --git a/learn-kafka/libs/zookeeper-jute-3.6.4.jar b/learn-kafka/libs/zookeeper-jute-3.6.4.jar deleted file mode 100644 index df1d402..0000000 Binary files a/learn-kafka/libs/zookeeper-jute-3.6.4.jar and /dev/null differ diff --git a/learn-kafka/libs/zstd-jni-1.5.5-1.jar b/learn-kafka/libs/zstd-jni-1.5.5-1.jar deleted file mode 100644 index 40b44c0..0000000 Binary files a/learn-kafka/libs/zstd-jni-1.5.5-1.jar and /dev/null differ diff --git a/learn-kafka/licenses/CDDL+GPL-1.1 b/learn-kafka/licenses/CDDL+GPL-1.1 deleted file mode 100644 index 4b156e6..0000000 --- a/learn-kafka/licenses/CDDL+GPL-1.1 +++ /dev/null @@ -1,760 +0,0 @@ -COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.1 - -1. Definitions. - - 1.1. "Contributor" means each individual or entity that creates or - contributes to the creation of Modifications. - - 1.2. "Contributor Version" means the combination of the Original - Software, prior Modifications used by a Contributor (if any), and - the Modifications made by that particular Contributor. - - 1.3. "Covered Software" means (a) the Original Software, or (b) - Modifications, or (c) the combination of files containing Original - Software with files containing Modifications, in each case including - portions thereof. - - 1.4. "Executable" means the Covered Software in any form other than - Source Code. - - 1.5. "Initial Developer" means the individual or entity that first - makes Original Software available under this License. - - 1.6. "Larger Work" means a work which combines Covered Software or - portions thereof with code not governed by the terms of this License. - - 1.7. "License" means this document. - - 1.8. "Licensable" means having the right to grant, to the maximum - extent possible, whether at the time of the initial grant or - subsequently acquired, any and all of the rights conveyed herein. - - 1.9. "Modifications" means the Source Code and Executable form of - any of the following: - - A. Any file that results from an addition to, deletion from or - modification of the contents of a file containing Original Software - or previous Modifications; - - B. Any new file that contains any part of the Original Software or - previous Modification; or - - C. Any new file that is contributed or otherwise made available - under the terms of this License. - - 1.10. "Original Software" means the Source Code and Executable form - of computer software code that is originally released under this - License. - - 1.11. "Patent Claims" means any patent claim(s), now owned or - hereafter acquired, including without limitation, method, process, - and apparatus claims, in any patent Licensable by grantor. - - 1.12. "Source Code" means (a) the common form of computer software - code in which modifications are made and (b) associated - documentation included in or with such code. - - 1.13. "You" (or "Your") means an individual or a legal entity - exercising rights under, and complying with all of the terms of, - this License. For legal entities, "You" includes any entity which - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants. - - 2.1. The Initial Developer Grant. - - Conditioned upon Your compliance with Section 3.1 below and subject - to third party intellectual property claims, the Initial Developer - hereby grants You a world-wide, royalty-free, non-exclusive license: - - (a) under intellectual property rights (other than patent or - trademark) Licensable by Initial Developer, to use, reproduce, - modify, display, perform, sublicense and distribute the Original - Software (or portions thereof), with or without Modifications, - and/or as part of a Larger Work; and - - (b) under Patent Claims infringed by the making, using or selling of - Original Software, to make, have made, use, practice, sell, and - offer for sale, and/or otherwise dispose of the Original Software - (or portions thereof). - - (c) The licenses granted in Sections 2.1(a) and (b) are effective on - the date Initial Developer first distributes or otherwise makes the - Original Software available to a third party under the terms of this - License. - - (d) Notwithstanding Section 2.1(b) above, no patent license is - granted: (1) for code that You delete from the Original Software, or - (2) for infringements caused by: (i) the modification of the - Original Software, or (ii) the combination of the Original Software - with other software or devices. - - 2.2. Contributor Grant. - - Conditioned upon Your compliance with Section 3.1 below and subject - to third party intellectual property claims, each Contributor hereby - grants You a world-wide, royalty-free, non-exclusive license: - - (a) under intellectual property rights (other than patent or - trademark) Licensable by Contributor to use, reproduce, modify, - display, perform, sublicense and distribute the Modifications - created by such Contributor (or portions thereof), either on an - unmodified basis, with other Modifications, as Covered Software - and/or as part of a Larger Work; and - - (b) under Patent Claims infringed by the making, using, or selling - of Modifications made by that Contributor either alone and/or in - combination with its Contributor Version (or portions of such - combination), to make, use, sell, offer for sale, have made, and/or - otherwise dispose of: (1) Modifications made by that Contributor (or - portions thereof); and (2) the combination of Modifications made by - that Contributor with its Contributor Version (or portions of such - combination). - - (c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective - on the date Contributor first distributes or otherwise makes the - Modifications available to a third party. - - (d) Notwithstanding Section 2.2(b) above, no patent license is - granted: (1) for any code that Contributor has deleted from the - Contributor Version; (2) for infringements caused by: (i) third - party modifications of Contributor Version, or (ii) the combination - of Modifications made by that Contributor with other software - (except as part of the Contributor Version) or other devices; or (3) - under Patent Claims infringed by Covered Software in the absence of - Modifications made by that Contributor. - -3. Distribution Obligations. - - 3.1. Availability of Source Code. - - Any Covered Software that You distribute or otherwise make available - in Executable form must also be made available in Source Code form - and that Source Code form must be distributed only under the terms - of this License. You must include a copy of this License with every - copy of the Source Code form of the Covered Software You distribute - or otherwise make available. You must inform recipients of any such - Covered Software in Executable form as to how they can obtain such - Covered Software in Source Code form in a reasonable manner on or - through a medium customarily used for software exchange. - - 3.2. Modifications. - - The Modifications that You create or to which You contribute are - governed by the terms of this License. You represent that You - believe Your Modifications are Your original creation(s) and/or You - have sufficient rights to grant the rights conveyed by this License. - - 3.3. Required Notices. - - You must include a notice in each of Your Modifications that - identifies You as the Contributor of the Modification. You may not - remove or alter any copyright, patent or trademark notices contained - within the Covered Software, or any notices of licensing or any - descriptive text giving attribution to any Contributor or the - Initial Developer. - - 3.4. Application of Additional Terms. - - You may not offer or impose any terms on any Covered Software in - Source Code form that alters or restricts the applicable version of - this License or the recipients' rights hereunder. You may choose to - offer, and to charge a fee for, warranty, support, indemnity or - liability obligations to one or more recipients of Covered Software. - However, you may do so only on Your own behalf, and not on behalf of - the Initial Developer or any Contributor. You must make it - absolutely clear that any such warranty, support, indemnity or - liability obligation is offered by You alone, and You hereby agree - to indemnify the Initial Developer and every Contributor for any - liability incurred by the Initial Developer or such Contributor as a - result of warranty, support, indemnity or liability terms You offer. - - 3.5. Distribution of Executable Versions. - - You may distribute the Executable form of the Covered Software under - the terms of this License or under the terms of a license of Your - choice, which may contain terms different from this License, - provided that You are in compliance with the terms of this License - and that the license for the Executable form does not attempt to - limit or alter the recipient's rights in the Source Code form from - the rights set forth in this License. If You distribute the Covered - Software in Executable form under a different license, You must make - it absolutely clear that any terms which differ from this License - are offered by You alone, not by the Initial Developer or - Contributor. You hereby agree to indemnify the Initial Developer and - every Contributor for any liability incurred by the Initial - Developer or such Contributor as a result of any such terms You offer. - - 3.6. Larger Works. - - You may create a Larger Work by combining Covered Software with - other code not governed by the terms of this License and distribute - the Larger Work as a single product. In such a case, You must make - sure the requirements of this License are fulfilled for the Covered - Software. - -4. Versions of the License. - - 4.1. New Versions. - - Oracle is the initial license steward and may publish revised and/or - new versions of this License from time to time. Each version will be - given a distinguishing version number. Except as provided in Section - 4.3, no one other than the license steward has the right to modify - this License. - - 4.2. Effect of New Versions. - - You may always continue to use, distribute or otherwise make the - Covered Software available under the terms of the version of the - License under which You originally received the Covered Software. If - the Initial Developer includes a notice in the Original Software - prohibiting it from being distributed or otherwise made available - under any subsequent version of the License, You must distribute and - make the Covered Software available under the terms of the version - of the License under which You originally received the Covered - Software. Otherwise, You may also choose to use, distribute or - otherwise make the Covered Software available under the terms of any - subsequent version of the License published by the license steward. - - 4.3. Modified Versions. - - When You are an Initial Developer and You want to create a new - license for Your Original Software, You may create and use a - modified version of this License if You: (a) rename the license and - remove any references to the name of the license steward (except to - note that the license differs from this License); and (b) otherwise - make it clear that the license contains terms which differ from this - License. - -5. DISCLAIMER OF WARRANTY. - - COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS, - WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, - INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE - IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR - NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF - THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE - DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY - OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, - REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN - ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS - AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. - -6. TERMINATION. - - 6.1. This License and the rights granted hereunder will terminate - automatically if You fail to comply with terms herein and fail to - cure such breach within 30 days of becoming aware of the breach. - Provisions which, by their nature, must remain in effect beyond the - termination of this License shall survive. - - 6.2. If You assert a patent infringement claim (excluding - declaratory judgment actions) against Initial Developer or a - Contributor (the Initial Developer or Contributor against whom You - assert such claim is referred to as "Participant") alleging that the - Participant Software (meaning the Contributor Version where the - Participant is a Contributor or the Original Software where the - Participant is the Initial Developer) directly or indirectly - infringes any patent, then any and all rights granted directly or - indirectly to You by such Participant, the Initial Developer (if the - Initial Developer is not the Participant) and all Contributors under - Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice - from Participant terminate prospectively and automatically at the - expiration of such 60 day notice period, unless if within such 60 - day period You withdraw Your claim with respect to the Participant - Software against such Participant either unilaterally or pursuant to - a written agreement with Participant. - - 6.3. If You assert a patent infringement claim against Participant - alleging that the Participant Software directly or indirectly - infringes any patent where such claim is resolved (such as by - license or settlement) prior to the initiation of patent - infringement litigation, then the reasonable value of the licenses - granted by such Participant under Sections 2.1 or 2.2 shall be taken - into account in determining the amount or value of any payment or - license. - - 6.4. In the event of termination under Sections 6.1 or 6.2 above, - all end user licenses that have been validly granted by You or any - distributor hereunder prior to termination (excluding licenses - granted to You by any distributor) shall survive termination. - -7. LIMITATION OF LIABILITY. - - UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT - (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE - INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF - COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE - TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR - CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT - LIMITATION, DAMAGES FOR LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER - FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR - LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE - POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT - APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH - PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH - LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR - LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION - AND LIMITATION MAY NOT APPLY TO YOU. - -8. U.S. GOVERNMENT END USERS. - - The Covered Software is a "commercial item," as that term is defined - in 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer - software" (as that term is defined at 48 C.F.R. § - 252.227-7014(a)(1)) and "commercial computer software documentation" - as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent - with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 - (June 1995), all U.S. Government End Users acquire Covered Software - with only those rights set forth herein. This U.S. Government Rights - clause is in lieu of, and supersedes, any other FAR, DFAR, or other - clause or provision that addresses Government rights in computer - software under this License. - -9. MISCELLANEOUS. - - This License represents the complete agreement concerning subject - matter hereof. If any provision of this License is held to be - unenforceable, such provision shall be reformed only to the extent - necessary to make it enforceable. This License shall be governed by - the law of the jurisdiction specified in a notice contained within - the Original Software (except to the extent applicable law, if any, - provides otherwise), excluding such jurisdiction's conflict-of-law - provisions. Any litigation relating to this License shall be subject - to the jurisdiction of the courts located in the jurisdiction and - venue specified in a notice contained within the Original Software, - with the losing party responsible for costs, including, without - limitation, court costs and reasonable attorneys' fees and expenses. - The application of the United Nations Convention on Contracts for - the International Sale of Goods is expressly excluded. Any law or - regulation which provides that the language of a contract shall be - construed against the drafter shall not apply to this License. You - agree that You alone are responsible for compliance with the United - States export administration regulations (and the export control - laws and regulation of any other countries) when You use, distribute - or otherwise make available any Covered Software. - -10. RESPONSIBILITY FOR CLAIMS. - - As between Initial Developer and the Contributors, each party is - responsible for claims and damages arising, directly or indirectly, - out of its utilization of rights under this License and You agree to - work with Initial Developer and Contributors to distribute such - responsibility on an equitable basis. Nothing herein is intended or - shall be deemed to constitute any admission of liability. - ------------------------------------------------------------------------- - -NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION -LICENSE (CDDL) - -The code released under the CDDL shall be governed by the laws of the -State of California (excluding conflict-of-law provisions). Any -litigation relating to this License shall be subject to the jurisdiction -of the Federal Courts of the Northern District of California and the -state courts of the State of California, with venue lying in Santa Clara -County, California. - - - - The GNU General Public License (GPL) Version 2, June 1991 - -Copyright (C) 1989, 1991 Free Software Foundation, Inc. -51 Franklin Street, Fifth Floor -Boston, MA 02110-1335 -USA - -Everyone is permitted to copy and distribute verbatim copies -of this license document, but changing it is not allowed. - -Preamble - -The licenses for most software are designed to take away your freedom to -share and change it. By contrast, the GNU General Public License is -intended to guarantee your freedom to share and change free software--to -make sure the software is free for all its users. This General Public -License applies to most of the Free Software Foundation's software and -to any other program whose authors commit to using it. (Some other Free -Software Foundation software is covered by the GNU Library General -Public License instead.) You can apply it to your programs, too. - -When we speak of free software, we are referring to freedom, not price. -Our General Public Licenses are designed to make sure that you have the -freedom to distribute copies of free software (and charge for this -service if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs; and that you know you can do these things. - -To protect your rights, we need to make restrictions that forbid anyone -to deny you these rights or to ask you to surrender the rights. These -restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - -For example, if you distribute copies of such a program, whether gratis -or for a fee, you must give the recipients all the rights that you have. -You must make sure that they, too, receive or can get the source code. -And you must show them these terms so they know their rights. - -We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - -Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - -Finally, any free program is threatened constantly by software patents. -We wish to avoid the danger that redistributors of a free program will -individually obtain patent licenses, in effect making the program -proprietary. To prevent this, we have made it clear that any patent must -be licensed for everyone's free use or not licensed at all. - -The precise terms and conditions for copying, distribution and -modification follow. - -TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - -0. This License applies to any program or other work which contains a -notice placed by the copyright holder saying it may be distributed under -the terms of this General Public License. The "Program", below, refers -to any such program or work, and a "work based on the Program" means -either the Program or any derivative work under copyright law: that is -to say, a work containing the Program or a portion of it, either -verbatim or with modifications and/or translated into another language. -(Hereinafter, translation is included without limitation in the term -"modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of running -the Program is not restricted, and the output from the Program is -covered only if its contents constitute a work based on the Program -(independent of having been made by running the Program). Whether that -is true depends on what the Program does. - -1. You may copy and distribute verbatim copies of the Program's source -code as you receive it, in any medium, provided that you conspicuously -and appropriately publish on each copy an appropriate copyright notice -and disclaimer of warranty; keep intact all the notices that refer to -this License and to the absence of any warranty; and give any other -recipients of the Program a copy of this License along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - -2. You may modify your copy or copies of the Program or any portion of -it, thus forming a work based on the Program, and copy and distribute -such modifications or work under the terms of Section 1 above, provided -that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any part - thereof, to be licensed as a whole at no charge to all third parties - under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a notice - that there is no warranty (or else, saying that you provide a - warranty) and that users may redistribute the program under these - conditions, and telling the user how to view a copy of this License. - (Exception: if the Program itself is interactive but does not - normally print such an announcement, your work based on the Program - is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, and -can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based on -the Program, the distribution of the whole must be on the terms of this -License, whose permissions for other licensees extend to the entire -whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of a -storage or distribution medium does not bring the other work under the -scope of this License. - -3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections 1 - and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your cost - of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer to - distribute corresponding source code. (This alternative is allowed - only for noncommercial distribution and only if you received the - program in object code or executable form with such an offer, in - accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source code -means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to control -compilation and installation of the executable. However, as a special -exception, the source code distributed need not include anything that is -normally distributed (in either source or binary form) with the major -components (compiler, kernel, and so on) of the operating system on -which the executable runs, unless that component itself accompanies the -executable. - -If distribution of executable or object code is made by offering access -to copy from a designated place, then offering equivalent access to copy -the source code from the same place counts as distribution of the source -code, even though third parties are not compelled to copy the source -along with the object code. - -4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt otherwise -to copy, modify, sublicense or distribute the Program is void, and will -automatically terminate your rights under this License. However, parties -who have received copies, or rights, from you under this License will -not have their licenses terminated so long as such parties remain in -full compliance. - -5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and all -its terms and conditions for copying, distributing or modifying the -Program or works based on it. - -6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further restrictions -on the recipients' exercise of the rights granted herein. You are not -responsible for enforcing compliance by third parties to this License. - -7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot distribute -so as to satisfy simultaneously your obligations under this License and -any other pertinent obligations, then as a consequence you may not -distribute the Program at all. For example, if a patent license would -not permit royalty-free redistribution of the Program by all those who -receive copies directly or indirectly through you, then the only way you -could satisfy both it and this License would be to refrain entirely from -distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is implemented -by public license practices. Many people have made generous -contributions to the wide range of software distributed through that -system in reliance on consistent application of that system; it is up to -the author/donor to decide if he or she is willing to distribute -software through any other system and a licensee cannot impose that choice. - -This section is intended to make thoroughly clear what is believed to be -a consequence of the rest of this License. - -8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License may -add an explicit geographical distribution limitation excluding those -countries, so that distribution is permitted only in or among countries -not thus excluded. In such case, this License incorporates the -limitation as if written in the body of this License. - -9. The Free Software Foundation may publish revised and/or new -versions of the General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and -conditions either of that version or of any later version published by -the Free Software Foundation. If the Program does not specify a version -number of this License, you may choose any version ever published by the -Free Software Foundation. - -10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the -author to ask for permission. For software which is copyrighted by the -Free Software Foundation, write to the Free Software Foundation; we -sometimes make exceptions for this. Our decision will be guided by the -two goals of preserving the free status of all derivatives of our free -software and of promoting the sharing and reuse of software generally. - -NO WARRANTY - -11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO -WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. -EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR -OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, -EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE -ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH -YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL -NECESSARY SERVICING, REPAIR OR CORRECTION. - -12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN -WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY -AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR -DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL -DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM -(INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED -INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF -THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR -OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -END OF TERMS AND CONDITIONS - -How to Apply These Terms to Your New Programs - -If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - -To do so, attach the following notices to the program. It is safest to -attach them to the start of each source file to most effectively convey -the exclusion of warranty; and each file should have at least the -"copyright" line and a pointer to where the full notice is found. - - One line to give the program's name and a brief idea of what it does. - Copyright (C) - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335 USA - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this -when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type - `show w'. This is free software, and you are welcome to redistribute - it under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the -appropriate parts of the General Public License. Of course, the commands -you use may be called something other than `show w' and `show c'; they -could even be mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the program, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the - program `Gnomovision' (which makes passes at compilers) written by - James Hacker. - - signature of Ty Coon, 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications -with the library. If this is what you want to do, use the GNU Library -General Public License instead of this License. - -# - -Certain source files distributed by Oracle America, Inc. and/or its -affiliates are subject to the following clarification and special -exception to the GPLv2, based on the GNU Project exception for its -Classpath libraries, known as the GNU Classpath Exception, but only -where Oracle has expressly included in the particular source file's -header the words "Oracle designates this particular file as subject to -the "Classpath" exception as provided by Oracle in the LICENSE file -that accompanied this code." - -You should also note that Oracle includes multiple, independent -programs in this software package. Some of those programs are provided -under licenses deemed incompatible with the GPLv2 by the Free Software -Foundation and others. For example, the package includes programs -licensed under the Apache License, Version 2.0. Such programs are -licensed to you under their original licenses. - -Oracle facilitates your further distribution of this package by adding -the Classpath Exception to the necessary parts of its GPLv2 code, which -permits you to use that code in combination with other independent -modules not licensed under the GPLv2. However, note that this would -not permit you to commingle code under an incompatible license with -Oracle's GPLv2 licensed code by, for example, cutting and pasting such -code into a file also containing Oracle's GPLv2 licensed code and then -distributing the result. Additionally, if you were to remove the -Classpath Exception from any of the files to which it applies and -distribute the result, you would likely be required to license some or -all of the other code in that distribution under the GPLv2 as well, and -since the GPLv2 is incompatible with the license terms of some items -included in the distribution by Oracle, removing the Classpath -Exception could therefore effectively compromise your ability to -further distribute the package. - -Proceed with caution and we recommend that you obtain the advice of a -lawyer skilled in open source matters before removing the Classpath -Exception or making modifications to this package which may -subsequently be redistributed and/or involve the use of third party -software. - -CLASSPATH EXCEPTION -Linking this library statically or dynamically with other modules is -making a combined work based on this library. Thus, the terms and -conditions of the GNU General Public License version 2 cover the whole -combination. - -As a special exception, the copyright holders of this library give you -permission to link this library with independent modules to produce an -executable, regardless of the license terms of these independent -modules, and to copy and distribute the resulting executable under -terms of your choice, provided that you also meet, for each linked -independent module, the terms and conditions of the license of that -module. An independent module is a module which is not derived from or -based on this library. If you modify this library, you may extend this -exception to your version of the library, but you are not obligated to -do so. If you do not wish to do so, delete this exception statement -from your version. - diff --git a/learn-kafka/licenses/DWTFYWTPL b/learn-kafka/licenses/DWTFYWTPL deleted file mode 100644 index 5a8e332..0000000 --- a/learn-kafka/licenses/DWTFYWTPL +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/learn-kafka/licenses/argparse-MIT b/learn-kafka/licenses/argparse-MIT deleted file mode 100644 index 773b0df..0000000 --- a/learn-kafka/licenses/argparse-MIT +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (C) 2011-2017 Tatsuhiro Tsujikawa - * - * Permission is hereby granted, free of charge, to any person - * obtaining a copy of this software and associated documentation - * files (the "Software"), to deal in the Software without - * restriction, including without limitation the rights to use, copy, - * modify, merge, publish, distribute, sublicense, and/or sell copies - * of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be - * included in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS - * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN - * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN - * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ diff --git a/learn-kafka/licenses/eclipse-distribution-license-1.0 b/learn-kafka/licenses/eclipse-distribution-license-1.0 deleted file mode 100644 index 5f06513..0000000 --- a/learn-kafka/licenses/eclipse-distribution-license-1.0 +++ /dev/null @@ -1,13 +0,0 @@ -Eclipse Distribution License - v 1.0 - -Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors. - -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. -* Neither the name of the Eclipse Foundation, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/learn-kafka/licenses/eclipse-public-license-2.0 b/learn-kafka/licenses/eclipse-public-license-2.0 deleted file mode 100644 index c9f1425..0000000 --- a/learn-kafka/licenses/eclipse-public-license-2.0 +++ /dev/null @@ -1,87 +0,0 @@ -Eclipse Public License - v 2.0 - -THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. -1. DEFINITIONS - -“Contribution” means: - - a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and - b) in the case of each subsequent Contributor: - i) changes to the Program, and - ii) additions to the Program; - where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works. - -“Contributor” means any person or entity that Distributes the Program. - -“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. - -“Program” means the Contributions Distributed in accordance with this Agreement. - -“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors. - -“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. - -“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof. - -“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy. - -“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files. - -“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor. -2. GRANT OF RIGHTS - - a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works. - b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. - c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. - d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. - e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3). - -3. REQUIREMENTS - -3.1 If a Contributor Distributes the Program in any form, then: - - a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and - b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license: - i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; - ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; - iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and - iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3. - -3.2 When the Program is Distributed as Source Code: - - a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and - b) a copy of this Agreement must be included with each copy of the Program. - -3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices. -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - -If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. - -All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version. - -Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement. -Exhibit A – Form of Secondary Licenses Notice - -“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.” - - Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses. - - If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. - - You may add additional accurate notices of copyright ownership. - diff --git a/learn-kafka/licenses/jline-BSD-3-clause b/learn-kafka/licenses/jline-BSD-3-clause deleted file mode 100644 index 7e11b67..0000000 --- a/learn-kafka/licenses/jline-BSD-3-clause +++ /dev/null @@ -1,35 +0,0 @@ -Copyright (c) 2002-2018, the original author or authors. -All rights reserved. - -https://opensource.org/licenses/BSD-3-Clause - -Redistribution and use in source and binary forms, with or -without modification, are permitted provided that the following -conditions are met: - -Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with -the distribution. - -Neither the name of JLine nor the names of its contributors -may be used to endorse or promote products derived from this -software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, -BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, -OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING -IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED -OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/learn-kafka/licenses/jopt-simple-MIT b/learn-kafka/licenses/jopt-simple-MIT deleted file mode 100644 index 54b2732..0000000 --- a/learn-kafka/licenses/jopt-simple-MIT +++ /dev/null @@ -1,24 +0,0 @@ -/* - The MIT License - - Copyright (c) 2004-2016 Paul R. Holser, Jr. - - Permission is hereby granted, free of charge, to any person obtaining - a copy of this software and associated documentation files (the - "Software"), to deal in the Software without restriction, including - without limitation the rights to use, copy, modify, merge, publish, - distribute, sublicense, and/or sell copies of the Software, and to - permit persons to whom the Software is furnished to do so, subject to - the following conditions: - - The above copyright notice and this permission notice shall be - included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE - LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION - WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ diff --git a/learn-kafka/licenses/paranamer-BSD-3-clause b/learn-kafka/licenses/paranamer-BSD-3-clause deleted file mode 100644 index 9eab879..0000000 --- a/learn-kafka/licenses/paranamer-BSD-3-clause +++ /dev/null @@ -1,29 +0,0 @@ -[ ParaNamer used to be 'Pubic Domain', but since it includes a small piece of ASM it is now the same license as that: BSD ] - - Portions copyright (c) 2006-2018 Paul Hammant & ThoughtWorks Inc - Portions copyright (c) 2000-2007 INRIA, France Telecom - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - 1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - 3. Neither the name of the copyright holders nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE - LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF - THE POSSIBILITY OF SUCH DAMAGE. diff --git a/learn-kafka/licenses/slf4j-MIT b/learn-kafka/licenses/slf4j-MIT deleted file mode 100644 index 315bd49..0000000 --- a/learn-kafka/licenses/slf4j-MIT +++ /dev/null @@ -1,24 +0,0 @@ -Copyright (c) 2004-2017 QOS.ch -All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - - diff --git a/learn-kafka/licenses/zstd-jni-BSD-2-clause b/learn-kafka/licenses/zstd-jni-BSD-2-clause deleted file mode 100644 index 66abb8a..0000000 --- a/learn-kafka/licenses/zstd-jni-BSD-2-clause +++ /dev/null @@ -1,26 +0,0 @@ -Zstd-jni: JNI bindings to Zstd Library - -Copyright (c) 2015-present, Luben Karavelov/ All rights reserved. - -BSD License - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, this - list of conditions and the following disclaimer in the documentation and/or - other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/learn-kafka/producer.py b/learn-kafka/producer.py deleted file mode 100644 index 2070219..0000000 --- a/learn-kafka/producer.py +++ /dev/null @@ -1,11 +0,0 @@ -from kafka import KafkaProducer -import json - - -producer = KafkaProducer(bootstrap_servers=['localhost:9092']) -with open('inputfile.txt') as f: - lines = f.readlines() - -for line in lines: - print("Printing", line) - producer.send('quickstart-events', json.dumps({"Content": line}).encode('utf-8')) diff --git a/learn-kafka/site-docs/kafka_2.13-3.5.1-site-docs.tgz b/learn-kafka/site-docs/kafka_2.13-3.5.1-site-docs.tgz deleted file mode 100644 index abf1d38..0000000 Binary files a/learn-kafka/site-docs/kafka_2.13-3.5.1-site-docs.tgz and /dev/null differ diff --git a/learn-kafka/test.txt b/learn-kafka/test.txt deleted file mode 100644 index 1052f66..0000000 --- a/learn-kafka/test.txt +++ /dev/null @@ -1,5 +0,0 @@ -foo -bar -foobar -Another line -Another another line diff --git a/learn-sqlserver/.gitignore b/learn-sqlserver/.gitignore deleted file mode 100644 index 8692412..0000000 --- a/learn-sqlserver/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -1_Setup/data -1_Setup/log -1_Setup/backup diff --git a/learn-sqlserver/1_Setup/Makefile b/learn-sqlserver/1_Setup/Makefile deleted file mode 100644 index 7e66503..0000000 --- a/learn-sqlserver/1_Setup/Makefile +++ /dev/null @@ -1,11 +0,0 @@ -build: - docker compose build - -up: - docker compose up -d - -down: - docker compose down - -restart: - docker compose down && docker compose up -d diff --git a/learn-sqlserver/1_Setup/docker-compose.yml b/learn-sqlserver/1_Setup/docker-compose.yml deleted file mode 100644 index 6c880cc..0000000 --- a/learn-sqlserver/1_Setup/docker-compose.yml +++ /dev/null @@ -1,28 +0,0 @@ -version: "3.9" -services: - sql-server: - container_name: sql-server - hostname: sqlserver - image: "mcr.microsoft.com/mssql/server:2022-latest" - user: root - ports: - - 5533:1433 - environment: - - ACCEPT_EULA=Y - - MSSQL_PID=Developer - - MSSQL_USER=SA - - MSSQL_SA_PASSWORD=Ubunchuu@2022 - volumes: - - ./data:/var/opt/mssql/data - - ./log:/var/opt/mssql/log - - ./backup:/var/opt/mssql/backup - restart: on-failure - healthcheck: - test: /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P "$$MSSQL_SA_PASSWORD" -Q "SELECT 1" || exit - interval: 30s - timeout: 30s - retries: 3 -volumes: - data: {} - log: {} - backup: {} diff --git a/learn-sqlserver/2_TSQL/CreateTable_Complete.sql b/learn-sqlserver/2_TSQL/CreateTable_Complete.sql deleted file mode 100644 index ae58c35..0000000 --- a/learn-sqlserver/2_TSQL/CreateTable_Complete.sql +++ /dev/null @@ -1,5 +0,0 @@ -CREATE TABLE ProductCategories ( - CategoryID int IDENTITY(1,1) NOT NULL, - CategoryName varchar(25) NOT NULL, - CategoryAbbreviation char(2) NOT NULL -); \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/DeleteRecords_Complete.sql b/learn-sqlserver/2_TSQL/DeleteRecords_Complete.sql deleted file mode 100644 index ea861a7..0000000 --- a/learn-sqlserver/2_TSQL/DeleteRecords_Complete.sql +++ /dev/null @@ -1,8 +0,0 @@ -SELECT * -FROM Customers; - -DELETE FROM Customers -WHERE CustomerID = 1001; - -SELECT * -FROM Orders; \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/DeleteTable_Complete.sql b/learn-sqlserver/2_TSQL/DeleteTable_Complete.sql deleted file mode 100644 index 48684dc..0000000 --- a/learn-sqlserver/2_TSQL/DeleteTable_Complete.sql +++ /dev/null @@ -1,26 +0,0 @@ --- This script creates a fresh copy of the Red30Tech --- ProductCategories table - - -USE Red30Tech; -GO - -DROP TABLE IF EXISTS ProductCategories; - -CREATE TABLE ProductCategories ( - CategoryID int IDENTITY(1,1) NOT NULL, - CategoryName varchar(25) NOT NULL, - CategoryAbbreviation char(2) NOT NULL -); - -INSERT INTO ProductCategories (CategoryName, CategoryAbbreviation) - VALUES ('Blueprints', 'BP'), - ('Drone Kits', 'DK'), - ('Drones', 'DS'), - ('eBooks', 'EB'), - ('Robot Kits', 'RK'), - ('Robots', 'RS'), - ('Training Videos', 'TV') -; - -SELECT * FROM ProductCategories; \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/DeleteTable_Start.sql b/learn-sqlserver/2_TSQL/DeleteTable_Start.sql deleted file mode 100644 index aad60b2..0000000 --- a/learn-sqlserver/2_TSQL/DeleteTable_Start.sql +++ /dev/null @@ -1,15 +0,0 @@ -CREATE TABLE ProductCategories ( - CategoryID int IDENTITY(1,1) NOT NULL, - CategoryName varchar(25) NOT NULL, - CategoryAbbreviation char(2) NOT NULL -); - -INSERT INTO ProductCategories (CategoryName, CategoryAbbreviation) - VALUES ('Blueprints', 'BP'), - ('Drone Kits', 'DK'), - ('Drones', 'DS'), - ('eBooks', 'EB'), - ('Robot Kits', 'RK'), - ('Robots', 'RS'), - ('Training Videos', 'TV') -; \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/Insert_Complete.sql b/learn-sqlserver/2_TSQL/Insert_Complete.sql deleted file mode 100644 index aad60b2..0000000 --- a/learn-sqlserver/2_TSQL/Insert_Complete.sql +++ /dev/null @@ -1,15 +0,0 @@ -CREATE TABLE ProductCategories ( - CategoryID int IDENTITY(1,1) NOT NULL, - CategoryName varchar(25) NOT NULL, - CategoryAbbreviation char(2) NOT NULL -); - -INSERT INTO ProductCategories (CategoryName, CategoryAbbreviation) - VALUES ('Blueprints', 'BP'), - ('Drone Kits', 'DK'), - ('Drones', 'DS'), - ('eBooks', 'EB'), - ('Robot Kits', 'RK'), - ('Robots', 'RS'), - ('Training Videos', 'TV') -; \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/Insert_Start.sql b/learn-sqlserver/2_TSQL/Insert_Start.sql deleted file mode 100644 index ae58c35..0000000 --- a/learn-sqlserver/2_TSQL/Insert_Start.sql +++ /dev/null @@ -1,5 +0,0 @@ -CREATE TABLE ProductCategories ( - CategoryID int IDENTITY(1,1) NOT NULL, - CategoryName varchar(25) NOT NULL, - CategoryAbbreviation char(2) NOT NULL -); \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/Joins_Complete.sql b/learn-sqlserver/2_TSQL/Joins_Complete.sql deleted file mode 100644 index 329296b..0000000 --- a/learn-sqlserver/2_TSQL/Joins_Complete.sql +++ /dev/null @@ -1,15 +0,0 @@ -SELECT * FROM Customers; -SELECT * FROM Orders; -SELECT * FROM Products; - -SELECT Customers.FirstName, - Customers.LastName, - Orders.OrderDate, - Orders.Quantity, - Products.Name, - Products.RetailPrice -FROM Customers INNER JOIN Orders - ON Customers.CustomerID = Orders.CustomerID - INNER JOIN Products - ON Orders.ProductID = Products.ProductID -; diff --git a/learn-sqlserver/2_TSQL/Select_Complete.sql b/learn-sqlserver/2_TSQL/Select_Complete.sql deleted file mode 100644 index ebbd988..0000000 --- a/learn-sqlserver/2_TSQL/Select_Complete.sql +++ /dev/null @@ -1,14 +0,0 @@ -SELECT CustomerID, FirstName, LastName, Address, City, State -FROM Red30Tech.dbo.Customers; - -SELECT FirstName, LastName -FROM dbo.Customers; - -SELECT State, City, FirstName, LastName -FROM dbo.Customers; - -SELECT TOP (3) State, City, FirstName, LastName -FROM dbo.Customers; - -SELECT * -FROM dbo.Customers; \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/Sort_Complete.sql b/learn-sqlserver/2_TSQL/Sort_Complete.sql deleted file mode 100644 index ee619ac..0000000 --- a/learn-sqlserver/2_TSQL/Sort_Complete.sql +++ /dev/null @@ -1,5 +0,0 @@ -SELECT CustomerID, FirstName, LastName, Address, City, State -FROM dbo.Customers -WHERE State = 'CA' -ORDER BY FirstName DESC, LastName -; \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/Sort_Start.sql b/learn-sqlserver/2_TSQL/Sort_Start.sql deleted file mode 100644 index 7d3427f..0000000 --- a/learn-sqlserver/2_TSQL/Sort_Start.sql +++ /dev/null @@ -1,3 +0,0 @@ -SELECT CustomerID, FirstName, LastName, Address, City, State -FROM dbo.Customers -; \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/Update_Complete.sql b/learn-sqlserver/2_TSQL/Update_Complete.sql deleted file mode 100644 index 4dadef5..0000000 --- a/learn-sqlserver/2_TSQL/Update_Complete.sql +++ /dev/null @@ -1,5 +0,0 @@ -SELECT * FROM Orders; - -UPDATE Orders -SET Quantity = 5 -WHERE OrderID = 1 \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/Where_Complete.sql b/learn-sqlserver/2_TSQL/Where_Complete.sql deleted file mode 100644 index 9542300..0000000 --- a/learn-sqlserver/2_TSQL/Where_Complete.sql +++ /dev/null @@ -1,4 +0,0 @@ -SELECT CustomerID, FirstName, LastName, Address, City, State -FROM dbo.Customers -WHERE State = 'CA' OR State = 'NY' -; \ No newline at end of file diff --git a/learn-sqlserver/2_TSQL/Where_Start.sql b/learn-sqlserver/2_TSQL/Where_Start.sql deleted file mode 100644 index 312b180..0000000 --- a/learn-sqlserver/2_TSQL/Where_Start.sql +++ /dev/null @@ -1,2 +0,0 @@ -SELECT CustomerID, FirstName, LastName, Address, City, State -FROM dbo.Customers; \ No newline at end of file diff --git a/learn-sqlserver/3_Agg_Procedure_Functions/Aggregate_Functions.sql b/learn-sqlserver/3_Agg_Procedure_Functions/Aggregate_Functions.sql deleted file mode 100644 index 4cebf45..0000000 --- a/learn-sqlserver/3_Agg_Procedure_Functions/Aggregate_Functions.sql +++ /dev/null @@ -1,55 +0,0 @@ -USE WideWorldImporters; - -SELECT * -FROM Application.StateProvinces; - - --- Counting Records -SELECT COUNT(*) -FROM Application.StateProvinces; - -SELECT COUNT(*) AS CountOfStates -FROM Application.StateProvinces; - -SELECT COUNT(*) AS CountOfStates -FROM Application.StateProvinces -WHERE SalesTerritory = 'Southwest'; - -SELECT COUNT(*) AS CountOfStates -FROM Application.StateProvinces -WHERE LatestRecordedPopulation > 5000000; - - --- Grouping Records -SELECT SalesTerritory, StateProvinceName -FROM Application.StateProvinces -ORDER BY SalesTerritory; - -SELECT SalesTerritory, Count(StateProvinceName) AS NumberOfStates -FROM Application.StateProvinces -GROUP BY SalesTerritory -ORDER BY SalesTerritory; - - --- Maximum, Minimum, and Average -SELECT MAX(*) -FROM Application.StateProvinces; - -SELECT MAX(LatestRecordedPopulation) AS MaxPopulation, - MIN(LatestRecordedPopulation) AS MinPopulation, - AVG(LatestRecordedPopulation) AS AvgPopulation -FROM Application.StateProvinces; - - --- Subquery -SELECT StateProvinceName, LatestRecordedPopulation -FROM Application.StateProvinces -WHERE LatestRecordedPopulation = - (SELECT MAX(LatestRecordedPopulation) FROM Application.StateProvinces) -; - -SELECT StateProvinceName, LatestRecordedPopulation -FROM Application.StateProvinces -WHERE LatestRecordedPopulation > - (SELECT AVG(LatestRecordedPopulation) FROM Application.StateProvinces) -; \ No newline at end of file diff --git a/learn-sqlserver/3_Agg_Procedure_Functions/Create_Index_Complete.sql b/learn-sqlserver/3_Agg_Procedure_Functions/Create_Index_Complete.sql deleted file mode 100644 index 628beaf..0000000 --- a/learn-sqlserver/3_Agg_Procedure_Functions/Create_Index_Complete.sql +++ /dev/null @@ -1,7 +0,0 @@ --- Creates a nonclustered index on the LastName column of the Red30Tech Customers table. - -USE Red30Tech; -GO - -CREATE NONCLUSTERED INDEX IX_Customers_LastName -ON dbo.Customers (LastName ASC); diff --git a/learn-sqlserver/3_Agg_Procedure_Functions/Parameters.sql b/learn-sqlserver/3_Agg_Procedure_Functions/Parameters.sql deleted file mode 100644 index b738aeb..0000000 --- a/learn-sqlserver/3_Agg_Procedure_Functions/Parameters.sql +++ /dev/null @@ -1,57 +0,0 @@ --- switch to the WideWorldImporters database -USE WideWorldImporters; -GO - --- create procedure with parameter -CREATE PROCEDURE Warehouse.uspSelectProductsByColor - @paramColor char(20) -AS -SELECT Warehouse.StockItems.StockItemID, - Warehouse.StockItems.StockItemName, - Warehouse.StockItemHoldings.QuantityOnHand, - Warehouse.StockItems.RecommendedRetailPrice, - Warehouse.Colors.ColorName -FROM Warehouse.Colors INNER JOIN - Warehouse.StockItems ON Warehouse.Colors.ColorID = Warehouse.StockItems.ColorID INNER JOIN - Warehouse.StockItemHoldings ON Warehouse.StockItems.StockItemID = Warehouse.StockItemHoldings.StockItemID -WHERE ColorName = @paramColor -; -GO - --- execute the stored procedure with various parameters -EXEC Warehouse.uspSelectProductsByColor 'Black'; -GO -EXEC Warehouse.uspSelectProductsByColor 'Blue'; -GO -EXEC Warehouse.uspSelectProductsByColor; -GO - --- alter the procedure to include a default value and error handling -ALTER PROCEDURE Warehouse.uspSelectProductsByColor - @paramColor char(20) = NULL -AS -IF @paramColor IS NULL -BEGIN - PRINT 'A valid product color is required.' - RETURN -END -SELECT Warehouse.StockItems.StockItemID, - Warehouse.StockItems.StockItemName, - Warehouse.StockItemHoldings.QuantityOnHand, - Warehouse.StockItems.RecommendedRetailPrice, - Warehouse.Colors.ColorName -FROM Warehouse.Colors INNER JOIN - Warehouse.StockItems ON Warehouse.Colors.ColorID = Warehouse.StockItems.ColorID INNER JOIN - Warehouse.StockItemHoldings ON Warehouse.StockItems.StockItemID = Warehouse.StockItemHoldings.StockItemID -WHERE ColorName = @paramColor -; -GO - -EXEC Warehouse.uspSelectProductsByColor; -GO -EXEC Warehouse.uspSelectProductsByColor 'Red'; -GO - --- clean up the WideWorldImporters database -DROP PROCEDURE Warehouse.uspSelectProductsByColor; -GO \ No newline at end of file diff --git a/learn-sqlserver/3_Agg_Procedure_Functions/Stored_Procedure.sql b/learn-sqlserver/3_Agg_Procedure_Functions/Stored_Procedure.sql deleted file mode 100644 index 3689e9d..0000000 --- a/learn-sqlserver/3_Agg_Procedure_Functions/Stored_Procedure.sql +++ /dev/null @@ -1,46 +0,0 @@ --- switch to the WideWorldImporters database -USE WideWorldImporters; -GO - --- view all stored procedures in curent database -SELECT SCHEMA_NAME(schema_id) AS SchemaName, - name AS ProcedureName -FROM sys.procedures -ORDER BY SchemaName -; -GO - --- create a stored procedure to identify inventory -CREATE PROCEDURE Warehouse.uspLowInventory -AS -SELECT Warehouse.StockItems.StockItemID AS ID, - Warehouse.StockItems.StockItemName AS 'Item Name', - Warehouse.StockItemHoldings.QuantityOnHand AS 'On Hand', - Warehouse.StockItemHoldings.ReorderLevel AS 'Reorder Level' -FROM Warehouse.StockItems INNER JOIN - Warehouse.StockItemHoldings ON Warehouse.StockItems.StockItemID = Warehouse.StockItemHoldings.StockItemID -ORDER BY 'On Hand'; -GO - --- execute the stored procedure -EXECUTE Warehouse.uspLowInventory - --- alter the procedure to locate low inventory -ALTER PROCEDURE Warehouse.uspLowInventory -AS -SELECT Warehouse.StockItems.StockItemID AS ID, - Warehouse.StockItems.StockItemName AS 'Item Name', - Warehouse.StockItemHoldings.QuantityOnHand AS 'On Hand', - Warehouse.StockItemHoldings.ReorderLevel AS 'Reorder Level' -FROM Warehouse.StockItems INNER JOIN - Warehouse.StockItemHoldings ON Warehouse.StockItems.StockItemID = Warehouse.StockItemHoldings.StockItemID -WHERE ReorderLevel > QuantityOnHand -ORDER BY 'On Hand'; -GO - --- execute the stored procedure -EXECUTE Warehouse.uspLowInventory - --- clean up the WideWorldImporters database -DROP PROCEDURE Warehouse.uspLowInventory; -GO \ No newline at end of file diff --git a/learn-sqlserver/3_Agg_Procedure_Functions/User_Functions_Complete.sql b/learn-sqlserver/3_Agg_Procedure_Functions/User_Functions_Complete.sql deleted file mode 100644 index 2c2e87a..0000000 --- a/learn-sqlserver/3_Agg_Procedure_Functions/User_Functions_Complete.sql +++ /dev/null @@ -1,17 +0,0 @@ -USE WideWorldImporters; - --- create a custom function to convert degrees celsius into degrees fahrenheit -CREATE FUNCTION Warehouse.ToFahrenheit (@Celsius decimal(10,2)) -RETURNS decimal(10,2) -AS -BEGIN - DECLARE @Fahrenheit decimal(10,2); - SET @Fahrenheit = (@Celsius * 1.8 + 32); - RETURN @Fahrenheit -END; - --- use the custom function in a select statement -SELECT TOP 100 VehicleTemperatureID, - Temperature AS Celsius, - Warehouse.ToFahrenheit(Temperature) AS Fahrenheit -FROM Warehouse.VehicleTemperatures; \ No newline at end of file diff --git a/learn-sqlserver/4_Mask/Dynamic_Mask.sql b/learn-sqlserver/4_Mask/Dynamic_Mask.sql deleted file mode 100644 index 1cce6fb..0000000 --- a/learn-sqlserver/4_Mask/Dynamic_Mask.sql +++ /dev/null @@ -1,22 +0,0 @@ -USE Red30Tech; -GO - --- view the data -SELECT * FROM sales.Customers; -GO - --- add data masking to the Address field -ALTER TABLE sales.Customers -ALTER COLUMN Address ADD MASKED WITH (Function = 'default()'); - -- (Function = 'email()'); - -- (Function = 'random([start range], [end range])'); - -- (Function = 'partial(prefix, [padding], suffix)'); - -- (Function = 'partial(2, "-----", 1)'); - --- view the data again -SELECT * FROM sales.Customers; -GO - --- remove data masking from Address column -ALTER TABLE sales.Customers -ALTER COLUMN Address DROP MASKED; \ No newline at end of file diff --git a/learn-sqlserver/4_Mask/Schema_Complete.sql b/learn-sqlserver/4_Mask/Schema_Complete.sql deleted file mode 100644 index 27eb75d..0000000 --- a/learn-sqlserver/4_Mask/Schema_Complete.sql +++ /dev/null @@ -1,16 +0,0 @@ -USE Red30Tech; -GO - --- create a new schema in the database -CREATE SCHEMA sales; -GO - - --- move an existing table into the new schema -ALTER SCHEMA sales TRANSFER dbo.Customers; -GO - - --- elevate Octavia's permissions within the schema -GRANT INSERT ON SCHEMA :: sales TO Octavia; -- can also GRANT UPDATE or GRANT DELETE -GO \ No newline at end of file