first commit

This commit is contained in:
starrySky
2024-03-20 09:28:04 +08:00
commit 989f0210f2
286 changed files with 25129 additions and 0 deletions

View File

@@ -0,0 +1,56 @@
package com.starry.common.config;
import com.qcloud.cos.COSClient;
import com.qcloud.cos.ClientConfig;
import com.qcloud.cos.auth.BasicCOSCredentials;
import com.qcloud.cos.auth.COSCredentials;
import com.qcloud.cos.region.Region;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
/**
* @author admin
* 腾讯云cos对象存储配置类
* @since 2022/10/28
*/
@Data
@Component
@ConfigurationProperties(prefix = "cos")
public class CosConfig {
/**
* 存储桶访问路径
**/
private String baseUrl;
/**
* 腾讯云账号秘钥
**/
private String secretId;
/**
* 密码秘钥
**/
private String secretKey;
/**
* 存储桶地区
**/
private String regionName;
/**
* 存储桶名称
**/
private String bucketName;
/**
* 上传的根目录
**/
private String folderPrefix;
public COSClient getCosClient() {
// 初始化用户信息
COSCredentials cosCredentials = new BasicCOSCredentials(this.secretId, this.secretKey);
// 设置地域
Region region = new Region(this.regionName);
ClientConfig config = new ClientConfig(region);
// 生成COS客户端
return new COSClient(cosCredentials, config);
}
}

View File

@@ -0,0 +1,44 @@
package com.starry.common.config;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONReader;
import com.alibaba.fastjson2.JSONWriter;
import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.data.redis.serializer.SerializationException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
/**
* Redis使用FastJson序列化
*
* @author ruoyi
*/
public class FastJson2JsonRedisSerializer<T> implements RedisSerializer<T> {
public static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
private final Class<T> clazz;
public FastJson2JsonRedisSerializer(Class<T> clazz) {
super();
this.clazz = clazz;
}
@Override
public byte[] serialize(T t) throws SerializationException {
if (t == null) {
return new byte[0];
}
return JSON.toJSONString(t, JSONWriter.Feature.WriteClassName).getBytes(DEFAULT_CHARSET);
}
@Override
public T deserialize(byte[] bytes) throws SerializationException {
if (bytes == null || bytes.length <= 0) {
return null;
}
String str = new String(bytes, DEFAULT_CHARSET);
return JSON.parseObject(str, clazz, JSONReader.Feature.SupportAutoType);
}
}

View File

@@ -0,0 +1,36 @@
package com.starry.common.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.StringRedisSerializer;
/**
* @author admin
* redis 配置
* @since 2022/10/18
*/
@Configuration
public class RedisConfig {
@Bean
public RedisTemplate<Object, Object> redisTemplate(RedisConnectionFactory connectionFactory) {
RedisTemplate<Object, Object> template = new RedisTemplate<>();
template.setConnectionFactory(connectionFactory);
FastJson2JsonRedisSerializer serializer = new FastJson2JsonRedisSerializer(Object.class);
// 使用StringRedisSerializer来序列化和反序列化redis的key值
template.setKeySerializer(new StringRedisSerializer());
template.setValueSerializer(serializer);
// Hash的key也采用StringRedisSerializer的序列化方式
template.setHashKeySerializer(new StringRedisSerializer());
template.setHashValueSerializer(serializer);
template.afterPropertiesSet();
return template;
}
}

View File

@@ -0,0 +1,53 @@
package com.starry.common.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.ApiKey;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2WebMvc;
import java.util.ArrayList;
import java.util.List;
/**
* @author admin
* swagger配置
* @since 2021/9/1
*/
@Configuration
@EnableSwagger2WebMvc
public class Swagger2Config {
@Bean(value = "defaultApi2")
public Docket defaultApi2() {
Docket docket = new Docket(DocumentationType.SWAGGER_2)
.apiInfo(new ApiInfoBuilder()
.title("接口文档")
.description("# 接口文档")
.termsOfServiceUrl("http://www.xx.com/")
.contact("277769738@qq.com")
.version("1.0")
.build())
// 分组名称
.groupName("2.X版本")
.select()
// 这里指定Controller扫描包路径
.apis(RequestHandlerSelectors.basePackage("com.java.admin"))
.paths(PathSelectors.any())
.build();
return docket;
}
private List<ApiKey> securitySchemes() {
// 设置请求头信息
List<ApiKey> result = new ArrayList<>();
ApiKey apiKey = new ApiKey("Authorization", "Authorization", "header");
result.add(apiKey);
return result;
}
}

View File

@@ -0,0 +1,67 @@
package com.starry.common.config;
import com.starry.common.utils.ThreadsUtils;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadPoolExecutor;
/**
* @author admin
* 线程池配置
* @since 2022/7/25
*/
@Configuration
public class ThreadPoolConfig {
/**
* 核心线程池大小
**/
private final int corePoolSize = 50;
/**
* 最大可创建的线程数
**/
private final int maxPoolSize = 200;
/**
* 队列最大长度
**/
private final int queueCapacity = 1000;
/**
* 线程池维护线程所允许的空闲时间
**/
private final int keepAliveSeconds = 300;
@Bean(name = "threadPoolTaskExecutor")
public ThreadPoolTaskExecutor threadPoolTaskExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setMaxPoolSize(maxPoolSize);
executor.setCorePoolSize(corePoolSize);
executor.setQueueCapacity(queueCapacity);
executor.setKeepAliveSeconds(keepAliveSeconds);
// 线程池对拒绝任务(无线程可用)的处理策略
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
return executor;
}
/**
* 执行周期性或定时任务
*/
@Bean(name = "scheduledExecutorService")
protected ScheduledExecutorService scheduledExecutorService() {
return new ScheduledThreadPoolExecutor(corePoolSize, new BasicThreadFactory.Builder().namingPattern("schedule-pool-%d").daemon(true).build(),
new ThreadPoolExecutor.CallerRunsPolicy()) {
@Override
protected void afterExecute(Runnable r, Throwable t) {
super.afterExecute(r, t);
ThreadsUtils.printException(r, t);
}
};
}
}

View File

@@ -0,0 +1,48 @@
package com.starry.common.config.typehandler;
import cn.hutool.json.JSONUtil;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedJdbcTypes;
import org.apache.ibatis.type.MappedTypes;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* 存储到数据库, 将LONG数组转换成字符串;
* 从数据库获取数据, 将字符串转为LONG数组.
*/
@MappedTypes({Long[].class})
@MappedJdbcTypes({JdbcType.VARCHAR})
public class ArrayLongTypeHandler extends BaseTypeHandler<Long[]> {
private static final Long[] l = new Long[]{};
@Override
public void setNonNullParameter(PreparedStatement ps, int i,
Long[] parameter, JdbcType jdbcType) throws SQLException {
ps.setString(i, JSONUtil.toJsonStr(parameter));
}
@Override
public Long[] getNullableResult(ResultSet rs, String columnName)
throws SQLException {
return JSONUtil.parseArray(rs.getString(columnName)).toArray(l);
}
@Override
public Long[] getNullableResult(ResultSet rs, int columnIndex)
throws SQLException {
return JSONUtil.parseArray(rs.getString(columnIndex)).toArray(l);
}
@Override
public Long[] getNullableResult(CallableStatement cs, int columnIndex)
throws SQLException {
return JSONUtil.parseArray(cs.getString(columnIndex)).toArray(l);
}
}

View File

@@ -0,0 +1,48 @@
package com.starry.common.config.typehandler;
import cn.hutool.json.JSONUtil;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedJdbcTypes;
import org.apache.ibatis.type.MappedTypes;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* 存储到数据库, 将String数组转换成字符串;
* 从数据库获取数据, 将字符串转为LONG数组.
*/
@MappedTypes({String[].class})
@MappedJdbcTypes({JdbcType.VARCHAR})
public class ArrayStringTypeHandler extends BaseTypeHandler<String[]> {
private static final String[] l = new String[]{};
@Override
public void setNonNullParameter(PreparedStatement ps, int i,
String[] parameter, JdbcType jdbcType) throws SQLException {
ps.setString(i, JSONUtil.toJsonStr(parameter));
}
@Override
public String[] getNullableResult(ResultSet rs, String columnName)
throws SQLException {
return JSONUtil.parseArray(rs.getString(columnName)).toArray(l);
}
@Override
public String[] getNullableResult(ResultSet rs, int columnIndex)
throws SQLException {
return JSONUtil.parseArray(rs.getString(columnIndex)).toArray(l);
}
@Override
public String[] getNullableResult(CallableStatement cs, int columnIndex)
throws SQLException {
return JSONUtil.parseArray(cs.getString(columnIndex)).toArray(l);
}
}

View File

@@ -0,0 +1,50 @@
package com.starry.common.config.typehandler;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedJdbcTypes;
import org.apache.ibatis.type.MappedTypes;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* 存储到数据库, 将JSON对象转换成字符串;
* 从数据库获取数据, 将字符串转为JSON对象.
*/
@MappedTypes({JSONObject.class})
@MappedJdbcTypes({JdbcType.VARCHAR})
public class JsonTypeHandler extends BaseTypeHandler<JSONObject> {
@Override
public void setNonNullParameter(PreparedStatement ps, int i, JSONObject parameter,
JdbcType jdbcType) throws SQLException {
ps.setString(i, JSONUtil.toJsonStr(parameter));
}
@Override
public JSONObject getNullableResult(ResultSet rs, String columnName)
throws SQLException {
return JSONUtil.parseObj(rs.getString(columnName)).toBean(JSONObject.class);
}
@Override
public JSONObject getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
return JSONUtil.parseObj(rs.getString(columnIndex)).toBean(JSONObject.class);
}
@Override
public JSONObject getNullableResult(CallableStatement cs, int columnIndex)
throws SQLException {
return JSONUtil.parseObj(cs.getString(columnIndex)).toBean(JSONObject.class);
}
}