Skip to content

Commit 2e2d612

Browse files
author
dushixiang
committed
- 移除旧版前端页面
- 修复「副本数显示异常」fixed #50
1 parent dec1de5 commit 2e2d612

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

50 files changed

+68
-4443
lines changed

Dockerfile

Lines changed: 34 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,52 @@
11
#
22
# Build stage
33
#
4-
FROM maven:3.6.0-jdk-11-slim AS build
4+
FROM node:16 AS front-build
5+
6+
WORKDIR /app
7+
8+
COPY web .
9+
10+
RUN yarn && yarn build
11+
12+
FROM maven:3.8.7-amazoncorretto-17 AS build
513

614
WORKDIR /app
715

816
COPY src src
917
COPY pom.xml pom.xml
1018
COPY LICENSE LICENSE
19+
COPY --from=front-build /app/dist src/main/resources/static
1120

1221
RUN mvn -f pom.xml clean package -Dmaven.test.skip=true
1322

1423

24+
# base image to build a JRE
25+
FROM amazoncorretto:17.0.6-alpine as corretto-jdk
26+
27+
# required for strip-debug to work
28+
RUN apk add --no-cache binutils
29+
30+
# Build small JRE image
31+
RUN $JAVA_HOME/bin/jlink \
32+
--verbose \
33+
--add-modules ALL-MODULE-PATH \
34+
--strip-debug \
35+
--no-man-pages \
36+
--no-header-files \
37+
--compress=2 \
38+
--output /customjre
39+
1540
#
1641
# Package stage
1742
#
18-
FROM openjdk:11-jre-slim
43+
FROM alpine:latest
44+
45+
ENV JAVA_HOME=/jre
46+
ENV PATH="${JAVA_HOME}/bin:${PATH}"
47+
48+
# copy JRE from the base image
49+
COPY --from=corretto-jdk /customjre $JAVA_HOME
1950

2051
ENV SERVER_PORT 8080
2152
ENV DEFAULT_USERNAME admin
@@ -28,4 +59,4 @@ COPY --from=build /app/LICENSE LICENSE
2859

2960
EXPOSE $SERVER_PORT
3061

31-
ENTRYPOINT ["java", "-Djava.security.egd=file:/dev/./urandom", "-jar", "/usr/local/kafka-map/kafka-map.jar", "--server.port=${SERVER_PORT}", "--default.username=${DEFAULT_USERNAME}", "--default.password=${DEFAULT_PASSWORD}"]
62+
ENTRYPOINT ["/jre/bin/java", "-Djava.security.egd=file:/dev/./urandom", "-jar", "/usr/local/kafka-map/kafka-map.jar", "--server.port=${SERVER_PORT}", "--default.username=${DEFAULT_USERNAME}", "--default.password=${DEFAULT_PASSWORD}"]

build.sh

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
cd web
2+
yarn && yarn build | exit
3+
mv dist ../src/main/resources/static
4+
5+
echo "build frontend success"
6+
7+
cd ../
8+
mvn -f pom.xml clean package -Dmaven.test.skip=true | exit
9+
10+
echo "build kafka-map success"

graalvm/reflect-config.json

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,5 +26,9 @@
2626
{
2727
"name": "com.github.benmanes.caffeine.cache.PSAMS",
2828
"allDeclaredConstructors": true
29+
},
30+
{
31+
"name": "org.hibernate.community.dialect.SQLiteDialect",
32+
"allDeclaredConstructors": true
2933
}
3034
]

pom.xml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
<parent>
77
<groupId>org.springframework.boot</groupId>
88
<artifactId>spring-boot-starter-parent</artifactId>
9-
<version>3.0.0</version>
9+
<version>3.0.1</version>
1010
<relativePath/> <!-- lookup parent from repository -->
1111
</parent>
1212

@@ -35,6 +35,7 @@
3535
<artifactId>lombok</artifactId>
3636
<optional>true</optional>
3737
</dependency>
38+
3839
<dependency>
3940
<groupId>org.springframework.boot</groupId>
4041
<artifactId>spring-boot-starter-test</artifactId>

src/main/java/cn/typesafe/km/service/TopicService.java

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,11 @@
77
import org.apache.kafka.common.*;
88
import org.apache.kafka.common.config.ConfigResource;
99
import org.springframework.stereotype.Service;
10+
import org.springframework.util.CollectionUtils;
1011
import org.springframework.util.StringUtils;
1112

1213
import jakarta.annotation.Resource;
14+
1315
import java.util.*;
1416
import java.util.concurrent.ExecutionException;
1517
import java.util.stream.Collectors;
@@ -59,6 +61,10 @@ public List<Topic> topics(String clusterId, Set<String> topicNames) throws Inter
5961
topic.setPartitionsCount(e.getValue().partitions().size());
6062
topic.setTotalLogSize(0L);
6163
topic.setReplicaCount(0);
64+
List<TopicPartitionInfo> partitions = e.getValue().partitions();
65+
if (!CollectionUtils.isEmpty(partitions)) {
66+
topic.setReplicaCount(partitions.get(0).replicas().size());
67+
}
6268
return topic;
6369
})
6470
.collect(Collectors.toList());
@@ -85,7 +91,6 @@ public List<Topic> topics(String clusterId, Set<String> topicNames) throws Inter
8591
}
8692
ReplicaInfo replicaInfo = replicaInfoEntry.getValue();
8793
long size = replicaInfo.size();
88-
topic.setReplicaCount(topic.getReplicaCount() + 1);
8994
topic.setTotalLogSize(topic.getTotalLogSize() + size);
9095
}
9196
}
@@ -99,17 +104,18 @@ public List<Topic> topics(String clusterId, Set<String> topicNames) throws Inter
99104
public TopicInfo info(String clusterId, String topicName) throws ExecutionException, InterruptedException {
100105
AdminClient adminClient = clusterService.getAdminClient(clusterId);
101106
try (KafkaConsumer<String, String> kafkaConsumer = clusterService.createConsumer(clusterId)) {
102-
TopicDescription topicDescription = adminClient.describeTopics(Collections.singletonList(topicName)).all().get().get(topicName);
107+
TopicDescription topicDescription = adminClient.describeTopics(Collections.singletonList(topicName)).allTopicNames().get().get(topicName);
103108
TopicInfo topicInfo = new TopicInfo();
104109
topicInfo.setClusterId(clusterId);
105110
topicInfo.setName(topicName);
106111

107112
List<TopicPartitionInfo> partitionInfos = topicDescription.partitions();
108-
int replicaCount = 0;
109-
for (TopicPartitionInfo topicPartitionInfo : partitionInfos) {
110-
replicaCount += topicPartitionInfo.replicas().size();
113+
if (!CollectionUtils.isEmpty(partitionInfos)) {
114+
int replicaCount = partitionInfos.get(0).replicas().size();
115+
topicInfo.setReplicaCount(replicaCount);
116+
} else {
117+
topicInfo.setReplicaCount(0);
111118
}
112-
topicInfo.setReplicaCount(replicaCount);
113119

114120
List<TopicPartition> topicPartitions = partitionInfos.stream().map(x -> new TopicPartition(topicName, x.partition())).collect(Collectors.toList());
115121
Map<TopicPartition, Long> beginningOffsets = kafkaConsumer.beginningOffsets(topicPartitions);
@@ -293,7 +299,7 @@ public void setConfigs(String topic, String clusterId, Map<String, String> confi
293299
adminClient.incrementalAlterConfigs(data).all().get();
294300
}
295301

296-
public void deleteDelayMessageTopics(String clusterId){
302+
public void deleteDelayMessageTopics(String clusterId) {
297303
List<String> topics = List.of(
298304
"delay-message",
299305
"__delay-seconds-1",

src/main/resources/application.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,10 @@ spring:
99
properties:
1010
hibernate:
1111
dialect: org.hibernate.community.dialect.SQLiteDialect
12+
defer-datasource-initialization: true
13+
sql:
14+
init:
15+
mode: always
1216
server:
1317
port: 8080
1418

src/main/web/.env

Lines changed: 0 additions & 1 deletion
This file was deleted.

src/main/web/.gitignore

Lines changed: 0 additions & 23 deletions
This file was deleted.

src/main/web/README.md

Lines changed: 0 additions & 70 deletions
This file was deleted.

src/main/web/package.json

Lines changed: 0 additions & 48 deletions
This file was deleted.

0 commit comments

Comments
 (0)