This commit is contained in:
2025-06-17 17:08:14 +08:00
commit 71179324c8
1205 changed files with 798946 additions and 0 deletions

View File

@@ -0,0 +1,23 @@
# 贝尔实验室 Spring 官方推荐镜像 JDK下载地址 https://bell-sw.com/pages/downloads/
FROM bellsoft/liberica-openjdk-debian:17.0.11-cds
#FROM bellsoft/liberica-openjdk-debian:21.0.5-cds
#FROM findepi/graalvm:java17-native
LABEL maintainer="Lion Li"
RUN mkdir -p /ruoyi/snailjob/logs
WORKDIR /ruoyi/snailjob
ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 JAVA_OPTS="-Xms512m -Xmx1024m"
EXPOSE 8800
EXPOSE 17888
ADD ./target/ruoyi-snailjob-server.jar ./app.jar
SHELL ["/bin/bash", "-c"]
ENTRYPOINT java -Djava.security.egd=file:/dev/./urandom \
-XX:+HeapDumpOnOutOfMemoryError -XX:+UseZGC ${JAVA_OPTS} \
-jar app.jar

View File

@@ -0,0 +1,63 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>org.dromara</groupId>
<artifactId>ruoyi-visual</artifactId>
<version>${revision}</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<packaging>jar</packaging>
<artifactId>ruoyi-snailjob-server</artifactId>
<dependencies>
<dependency>
<groupId>com.aizuda</groupId>
<artifactId>snail-job-server-starter</artifactId>
<version>${snailjob.version}</version>
<exclusions>
<exclusion>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.13.9</version>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>ruoyi-common-nacos</artifactId>
</dependency>
<dependency>
<groupId>de.codecentric</groupId>
<artifactId>spring-boot-admin-starter-client</artifactId>
<version>${spring-boot-admin.version}</version>
</dependency>
</dependencies>
<build>
<finalName>${project.artifactId}</finalName>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${spring-boot.version}</version>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,64 @@
package com.aizuda.snailjob.server.starter.config;
import jakarta.servlet.*;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
public class ActuatorAuthFilter implements Filter {
private final String username;
private final String password;
public ActuatorAuthFilter(String username, String password) {
this.username = username;
this.password = password;
}
@Override
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
HttpServletRequest request = (HttpServletRequest) servletRequest;
HttpServletResponse response = (HttpServletResponse) servletResponse;
// 获取 Authorization 头
String authHeader = request.getHeader("Authorization");
if (authHeader == null || !authHeader.startsWith("Basic ")) {
// 如果没有提供 Authorization 或者格式不对,则返回 401
response.setHeader("WWW-Authenticate", "Basic realm=\"realm\"");
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized");
return;
}
// 解码 Base64 编码的用户名和密码
String base64Credentials = authHeader.substring("Basic ".length());
byte[] credDecoded = Base64.getDecoder().decode(base64Credentials);
String credentials = new String(credDecoded, StandardCharsets.UTF_8);
String[] split = credentials.split(":");
if (split.length != 2) {
response.setHeader("WWW-Authenticate", "Basic realm=\"realm\"");
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized");
return;
}
// 验证用户名和密码
if (!username.equals(split[0]) || !password.equals(split[1])) {
response.setHeader("WWW-Authenticate", "Basic realm=\"realm\"");
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized");
return;
}
// 如果认证成功,继续处理请求
filterChain.doFilter(request, response);
}
@Override
public void init(FilterConfig filterConfig) {
}
@Override
public void destroy() {
}
}

View File

@@ -0,0 +1,29 @@
package com.aizuda.snailjob.server.starter.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* 权限安全配置
*
* @author Lion Li
*/
@Configuration
public class SecurityConfig {
@Value("${spring.cloud.nacos.discovery.metadata.username}")
private String username;
@Value("${spring.cloud.nacos.discovery.metadata.userpassword}")
private String password;
@Bean
public FilterRegistrationBean<ActuatorAuthFilter> actuatorFilterRegistrationBean() {
FilterRegistrationBean<ActuatorAuthFilter> registrationBean = new FilterRegistrationBean<>();
registrationBean.setFilter(new ActuatorAuthFilter(username, password));
registrationBean.addUrlPatterns("/actuator", "/actuator/*");
return registrationBean;
}
}

View File

@@ -0,0 +1,19 @@
package org.dromara.snailjob;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* SnailJob Server 启动程序
*
* @author opensnail
* @date 2024-05-17
*/
@SpringBootApplication
public class SnailJobServerApplication {
public static void main(String[] args) {
SpringApplication.run(com.aizuda.snailjob.server.SnailJobServerApplication.class, args);
}
}

View File

@@ -0,0 +1,60 @@
server:
port: 8800
servlet:
context-path: /snail-job
spring:
application:
name: ruoyi-snailjob-server
profiles:
active: @profiles.active@
web:
resources:
static-locations: classpath:admin/
mybatis-plus:
typeAliasesPackage: com.aizuda.snailjob.template.datasource.persistence.po
global-config:
db-config:
where-strategy: NOT_EMPTY
capital-mode: false
logic-delete-value: 1
logic-not-delete-value: 0
configuration:
map-underscore-to-camel-case: true
cache-enabled: true
logging:
config: classpath:logback-plus.xml
management:
endpoints:
web:
exposure:
include: '*'
endpoint:
health:
show-details: ALWAYS
logfile:
external-file: ./logs/${spring.application.name}/console.log
--- # nacos 配置
spring:
cloud:
nacos:
# nacos 服务地址
server-addr: @nacos.server@
username: @nacos.username@
password: @nacos.password@
discovery:
# 注册组
group: @nacos.discovery.group@
namespace: ${spring.profiles.active}
config:
# 配置组
group: @nacos.config.group@
namespace: ${spring.profiles.active}
config:
import:
- optional:nacos:datasource.yml
- optional:nacos:${spring.application.name}.yml

View File

@@ -0,0 +1,10 @@
Application Version: ${revision}
Spring Boot Version: ${spring-boot.version}
_ _ _ _
(_) (_) | |
___ _ __ __ _ _| |_ ___ | |__ ______ ___ ___ _ ____ _____ _ __
/ __| '_ \ / _` | | | |/ _ \| '_ \______/ __|/ _ \ '__\ \ / / _ \ '__|
\__ \ | | | (_| | | | | (_) | |_) | \__ \ __/ | \ V / __/ |
|___/_| |_|\__,_|_|_| |\___/|_.__/ |___/\___|_| \_/ \___|_|
_/ |
|__/

View File

@@ -0,0 +1,97 @@
<?xml version="1.0" encoding="UTF-8"?>
<included>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n"/>
<!-- 控制台输出 -->
<appender name="file_console" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/console.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/console.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大 1天 -->
<maxHistory>1</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
</filter>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- info异步输出 -->
<appender name="async_info" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>512</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="file_info"/>
</appender>
<!-- error异步输出 -->
<appender name="async_error" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>512</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="file_error"/>
</appender>
<!--系统操作日志-->
<root level="info">
<appender-ref ref="async_info"/>
<appender-ref ref="async_error"/>
<appender-ref ref="file_console"/>
</root>
</included>

View File

@@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}"/>
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%cyan(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!-- SnailJob appender -->
<appender name="snail_log_server_appender" class="com.aizuda.snailjob.server.common.appender.SnailJobServerLogbackAppender">
</appender>
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>
<appender-ref ref="snail_log_server_appender" />
</root>
</configuration>