# Comandos do Open Shift
## Tarefas Realizada
## Terminal
```cmd=
# logar via termina
oc login --token=sha256~hj47ma2LjzYfCSftNZ2xV692EhBEVo1Xu58g20R3Yd4 --server=https://api.ccr-rosapoc.yx4i.p3.openshiftapps.com:443
# Criar o Projeto
oc new-project projetojavakafka
# ok
oc whoami
dev1
mkdir kafkanode
cd kafkanode
oc explain pods
# esratégia do Linux configurei o linux dentro do windows
edson@caad-001649:~/kafka/zookeeper/bin$ zkServer.sh start
ZooKeeper JMX enabled by default
Using config: /mnt/c/apache-zookeeper-3.9.1/bin/../conf/zoo.cfg
Starting zookeeper ... STARTED
edson@caad-001649:~/kafka/zookeeper/bin$
sudo mv jdk-17.0.2a /usr/lib/jvm
tar -xzv
tar -xzf jdk-17_linux-x64_bin.tar.gz
# Configurando o Java 17 no Linux
export JAVA_HOME=/usr/lib/jvm/jdk-17
export PATH=$JAVA_HOME/bin:$PATH
export JAVA_HOME=/usr/lib/jvm/jdk-17
export PATH=$JAVA_HOME/bin:$PATH
nano ~/.bashrc
source ~/.bashrc
ls -lt | head -n 6
/mnt/c/Users/kaad-001649/Downloads$
## Configruação do Scala e dos demis
sdk install karafa
sdk install grails
sdk install
sdk install grovy
sdk install gradle
sdk install jmter
sdk install karaf
tar -xvf spring-tool-suite-4-4.21.1.RELEASE-e4.30.0-linux.gtk.x86_64.tar.gz
./SpringToolSuite4
Tudo sendo instalado no Windows com Linux
# Ponto de Atenção tudo será demandado pelo linux
## Mudança de Programação (windows para linux wsl )
Zookeper intalado.
Java Instalado _ java 17
sudo adduser kafka
sudo adduser kafka sudo
su -l kafka
sudo chmod 700 /home/edson
sudo chown edson:edson /home/edson
sudo chown kafka:kafka /home/kafka
sudo -u edson /bin/bash
sudo ./gradlew generateProtocolErrorDocs
sudo su - kafka
sudo passwd kafka -u
sudo snap install --beta kafka
sudo snap start kafka
snap info kafka
sudo snap refresh kafka
kafka.kafka-topics --create --topic test --partitions 1 --replication-factor 1 --if-not-exists --zookeeper localhost:2181
su kafka
cd kafka
./gradlew jar -PscalaVersion=2.13.12
sudo CFLAGS=-Wno-error=format-overflow gem install kafkat
sudo ./gradlew jar -PscalaVersion=2.13.12
# Utilizando os recursos do linux para criar os consumers e producers
sudo update-alternatives --config java
```
#### sudo ./gradlew jar -PscalaVersion=2.13.12

```cmd=
sudo update-alternatives --config java
sudo update-alternatives --config javac
sudo update-alternatives --config jar
export JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64
export PATH=$JAVA_HOME/bin:$PATH
# Mudando para versão 11
sudo cp -r ~/kafka/kafka3.7.0 /usr/local/kafka
sudo ./gradlew jar -PscalaVersion=2.13.12
zkServer.sh restart
bin/kafka-topics.sh --create --bootstrap-server localhost:9092 --replication-factor 1 --partitions 1 --topic topicCcr
sudo apt-get install zookeeperd
sudo adduser kafka
udo adduser kafka sudo
su -l kafka
$ nano ~/kafka/config/server.properties
delete.topic.enable = true
log.dirs=/home/kafka/logs
$ sudo nano /etc/systemd/system/zookeeper.service
[Unit]
Requires=network.target remote-fs.target
After=network.target remote-fs.target
[Service]
Type=simple
User=kafka
ExecStart=/home/kafka/kafka/bin/zookeeper-server-start.sh /home/kafka/kafka/config/zookeeper.properties
ExecStop=/home/kafka/kafka/bin/zookeeper-server-stop.sh
Restart=on-abnormal
[Install]
WantedBy=multi-user.target
#
$ sudo nano /etc/systemd/system/kafka.service
[Unit]
Requires=zookeeper.service
After=zookeeper.service
[Service]
Type=simple
User=kafka
ExecStart=/bin/sh -c '/home/kafka/kafka/bin/kafka-server-start.sh /home/kafka/kafka/config/server.properties > /home/kafka/kafka/kafka.log 2>&1'
ExecStop=/home/kafka/kafka/bin/kafka-server-stop.sh
Restart=on-abnormal
[Install]
WantedBy=multi-user.target
$ sudo systemctl start kafka
sudo systemctl status kafka
$ sudo systemctl enable zookeeper
$ sudo systemctl enable kafka
$ ~/kafka/bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic TutorialTopic
$ echo "Hello, World" | ~/kafka/bin/kafka-console-producer.sh --broker-list localhost:9092 --topic TutorialTopic > /dev/null
$ sudo apt install ruby ruby-dev build-essential
$ sudo CFLAGS=-Wno-error=format-overflow gem install kafkat
$ nano ~/.kafkatcfg
{
"kafka_path": "~/kafka",
"log_path": "/home/kafka/logs",
"zk_path": "localhost:2181"
}
kafkat partitions
```

![Uploading file..._2nbsellgc]()
##
## Complemento install
```cmd=
sudo apt-get update
sudo apt install default-jdk
ln –s kafka_2.12–3.2.1 kafka
Step 4: Set path for Kafka
Sudo nano .profile
Enter the following command in .profile
export PATH=/home/user/kafka/bin:$PATH
source .profile
sudo apt-get install -y zookeeperd
sudo nano /etc/systemd/system/zookeeper.service
[Unit]
Description=Apache Zookeeper server
Documentation=http://zookeeper.apache.org
Requires=network.target remote-fs.target
After=network.target remote-fs.target
[Service]
Type=simple
ExecStart=/home/user/kafka/bin/zookeeper-server-start.sh /home/user/kafka/config/zookeeper.properties
ExecStop=/home/user/kafka/bin/zookeeper-server-stop.sh
Restart=on-abnormal
[Install]
WantedBy=multi-user.target
#
sudo nano /etc/systemd/system/kafka.service
[Unit]
Description=Apache Kafka Server
Documentation=http://kafka.apache.org/documentation.html
Requires=zookeeper.service
[Service]
Type=simple
Environment="JAVA_HOME=/usr/lib/jvm/java-1.11.0-openjdk-amd64"
ExecStart=/home/user/kafka/bin/kafka-server-start.sh /home/user/kafka/config/server.properties
ExecStop=/home/user/kafka/bin/kafka-server-stop.sh
[Install]
WantedBy=multi-user.target
sudo systemctl enable zookeeper
sudo systemctl start zookeeper
sudo systemctl status zookeeper
sudo systemctl enable kafka
sudo systemctl start kafka
sudo systemctl status kafka
sudo systemctl enable kafka
sudo lsof -i :2181
sudo netstat -tuln | grep 2181
kill -9 pid
taskkill /PID 9232 /F
sudo snap remove kafka
# Kubernete
## Kubectl install
```cmd=
New-Item -Path 'c:\' -Name 'minikube' -ItemType Directory -Force
Invoke-WebRequest -OutFile 'c:\minikube\minikube.exe' -Uri 'https://github.com/kubernetes/minikube/releases/latest/download/minikube-windows-amd64.exe' -UseBasicParsing
```
## Minikube
```cmd=
New-Item -Path 'c:\' -Name 'minikube' -ItemType Directory -Force
Invoke-WebRequest -OutFile 'c:\minikube\minikube.exe' -Uri 'https://github.com/kubernetes/minikube/releases/latest/download/minikube-windows-amd64.exe' -UseBasicParsing
```
### Minikube parte 2
```cmd=
$oldPath = [Environment]::GetEnvironmentVariable('Path', [EnvironmentVariableTarget]::Machine)
if ($oldPath.Split(';') -inotcontains 'C:\minikube'){
[Environment]::SetEnvironmentVariable('Path', $('{0};C:\minikube' -f $oldPath), [EnvironmentVariableTarget]::Machine)
}
```
## Install Kubernetes
## https://minikube.sigs.k8s.io/docs/handbook/controls/
## Windows
## Python install pandas
```cmd=
python -m venv venv
source venv/bin/activate
venv\Scripts\activate
pip install flask flask-mysqldb
pip install pandas
pip install flask
pip install flask_pymongo
pip install werkzeug.security
python.exe -m pip install --upgrade pip
minikube start
from flask import Flask, request, jsonify
from flask_pymongo import PyMongo
app = Flask(__name__)
app.config["MONGO_URI"] = "mongodb://localhost:27017/databasepy"
mongo = PyMongo(app)
class User:
def __init__(self,name,email, password, status):
self.name = name
self.email = email
self.password = password
self.status = status
def toString(self):
return "f Name{self.name}, email{self.email}, password{self.password}, status: {self.status}"
def toDict(self):
return{
"name":self.name,
"email":self.email,
"password": self.password
}
@app.route('/api/register', methods=['POST'])
def register():
data = request.get_json()
name = data['name']
email = data['email']
password = data['password']
status = data['status']
user = User(name, email, password, status)
mongo.db.users.insert_one(user.toDict())
return jsonify(user.toDict())
if __name__ == '__main__':
app.run(debug=True)
```
```cmd=
kubectl version client
kubectl version
oc status
=== error
```
## Minikube
```cmd=
minikube start
kubectl get po -A
minikube stop
minikube delete
```
https://developers.redhat.com/blog/2019/06/10/accessing-apache-kafka-in-strimzi-part-3-red-hat-openshift-routes?p=601277#customizations
https://github.com/strimzi/strimzi-kafka-operator
https://strimzi.io/docs/operators/latest/configuri
## Install kubectl
```cmd=
curl.exe -LO "https://dl.k8s.io/release/v1.29.2/bin/windows/amd64/kubectl.exe"
CertUtil -hashfile kubectl.exe SHA256
New-Item -Path 'c:\' -Name 'minikube' -ItemType Directory -Force
Invoke-WebRequest -OutFile 'c:\minikube\minikube.exe' -Uri 'https://github.com/kubernetes/minikube/releases/latest/download/minikube-windows-amd64.exe' -UseBasicParsing
$oldPath = [Environment]::GetEnvironmentVariable('Path', [EnvironmentVariableTarget]::Machine)
if ($oldPath.Split(';') -inotcontains 'C:\minikube'){
[Environment]::SetEnvironmentVariable('Path', $('{0};C:\minikube' -f $oldPath), [EnvironmentVariableTarget]::Machine)
}
minikube start
```
## Open Shift
```cmd
oc explain pods
```

```cmd=
oc new-project my-project
```
### https://www.mongodb.com/docs/manual/tutorial/install-mongodb-on-windows/
https://www.scala-lang.org/download/
pip install spylon-kernel
python -m spylon_kernel install
cd spark-2.3.1-bin-hadoop2.7/bin
./spark-shell
minikube stop
minikube delete
minikube start
## open shift rosa
```cmd
rosa login --token="eyJhbGciOiJIUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJhZDUyMjdhMy1iY2ZkLTRjZjAtYTdiNi0zOTk4MzVhMDg1NjYifQ.eyJpYXQiOjE3MTAxNjIzMjYsImp0aSI6Ijk5MTc2ZGVkLTc4YTMtNDhkYS1hODJlLWU0NGFiODkwYTBlMCIsImlzcyI6Imh0dHBzOi8vc3NvLnJlZGhhdC5jb20vYXV0aC9yZWFsbXMvcmVkaGF0LWV4dGVybmFsIiwiYXVkIjoiaHR0cHM6Ly9zc28ucmVkaGF0LmNvbS9hdXRoL3JlYWxtcy9yZWRoYXQtZXh0ZXJuYWwiLCJzdWIiOiJmOjUyOGQ3NmZmLWY3MDgtNDNlZC04Y2Q1LWZlMTZmNGZlMGNlNjplZHNvbmp1bmlvci5rYWFkQGdydXBvY2NyLmNvbS5iciIsInR5cCI6Ik9mZmxpbmUiLCJhenAiOiJjbG91ZC1zZXJ2aWNlcyIsIm5vbmNlIjoiZGY3OGRlM2EtYjQ1OC00ODRiLTg4MDYtODM2NjFkYjAyZTNjIiwic2Vzc2lvbl9zdGF0ZSI6IjUyOWI5OTAxLTlkOWYtNDQyNi05MWE0LTNlMzNmNTUxN2UwYSIsInNjb3BlIjoib3BlbmlkIGFwaS5pYW0uc2VydmljZV9hY2NvdW50cyBvZmZsaW5lX2FjY2VzcyIsInNpZCI6IjUyOWI5OTAxLTlkOWYtNDQyNi05MWE0LTNlMzNmNTUxN2UwYSJ9.ALNplqUWmhA98yPZin7tCJU_W1mck68jz1kxI-rUOto"
```
https://docs.openshift.com/container-platform/4.4/cli_reference/openshift_cli/getting-started-cli.html?extIdCarryOver=true&intcmp=7013a000003SvYkAAK&sc_cid=7015Y000003ssZtQAI#creating-a-project
```cmd=
oc get nodes
```
## https://www.sqlite.org/download.html
##
```cmd=
cd C:\Users\kaad-001649\projetos\modulos
mkdir modulos
cd modulos
# projeto Principal
## projeto Main criação o Projeto main
mvn archetype:generate -DgroupId=br.com.arq -DartifactId=projetomain -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false
mvn archetype:generate -DgroupId=com.exemplo -DartifactId=projeto-principal -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false
mkdir kafka-producer-consumer
cd kafka-producer-consumer
C:\Users\kaad-001649\projetos\modulos\projetomain\kafka-consumer-producer
mvn archetype:generate -DgroupId=br.com.arq.kafka -DartifactId=kafka-consumer-producer -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false
```
## Repetição de Módulos
```cmd=
mvn archetype:generate -DgroupId=br.com.arq -DartifactId=projetomain -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false
mkdir kafka
cd kafka
mvn archetype:generate -DgroupId=br.com.arq.kafka -DartifactId=kafka-consumer-producer -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false
cd oracle
mvn archetype:generate -DgroupId=br.com.arq.oracle -DartifactId=oracle-module -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false
cd ..
cd grafana
mvn archetype:generate -DgroupId=b.com.arq.grafana -DartifactId=grafana-module -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false
```
```