Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
F
fzm-joying
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
JIRA
JIRA
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
lei
fzm-joying
Commits
e63db94a
Commit
e63db94a
authored
Jul 14, 2021
by
tangtuo
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'dev_1.0.0' into test_v1.0.0
parents
673b7140
a6c5c0d9
Hide whitespace changes
Inline
Side-by-side
Showing
14 changed files
with
82 additions
and
304 deletions
+82
-304
application-dev.yml
joying-admin/src/main/resources/application-dev.yml
+0
-4
application-local.yml
joying-admin/src/main/resources/application-local.yml
+1
-4
application-test.yml
joying-admin/src/main/resources/application-test.yml
+0
-4
logback-spring.xml
joying-admin/src/main/resources/logback-spring.xml
+25
-93
NftServiceImpl.java
...main/java/com/fzm/common/service/impl/NftServiceImpl.java
+1
-0
NftMapper.xml
joying-common/src/main/resources/mapper/NftMapper.xml
+3
-3
pom.xml
joying-portal/pom.xml
+0
-4
RabbitMQConfig.java
...l/src/main/java/com/fzm/portal/config/RabbitMQConfig.java
+0
-45
application-dev.yml
joying-portal/src/main/resources/application-dev.yml
+2
-13
application-local.yml
joying-portal/src/main/resources/application-local.yml
+13
-13
application-test.yml
joying-portal/src/main/resources/application-test.yml
+11
-13
application.yml
joying-portal/src/main/resources/application.yml
+1
-0
logback-spring.xml
joying-portal/src/main/resources/logback-spring.xml
+25
-95
pom.xml
pom.xml
+0
-13
No files found.
joying-admin/src/main/resources/application-dev.yml
View file @
e63db94a
...
...
@@ -30,10 +30,6 @@ spring:
master-data-source-name
:
write
# 从库数据源名称列表,多个逗号分隔
slave-data-source-names
:
read
props
:
# 开启SQL显示,默认false
sql
:
show
:
true
redis
:
host
:
172.16.101.135
port
:
6379
...
...
joying-admin/src/main/resources/application-local.yml
View file @
e63db94a
...
...
@@ -30,10 +30,7 @@ spring:
master-data-source-name
:
write
# 从库数据源名称列表,多个逗号分隔
slave-data-source-names
:
read
props
:
# 开启SQL显示,默认false
sql
:
show
:
true
redis
:
host
:
172.16.101.135
port
:
6379
...
...
joying-admin/src/main/resources/application-test.yml
View file @
e63db94a
...
...
@@ -30,10 +30,6 @@ spring:
master-data-source-name
:
write
# 从库数据源名称列表,多个逗号分隔
slave-data-source-names
:
read
props
:
# 开启SQL显示,默认false
sql
:
show
:
true
redis
:
host
:
172.16.101.136
port
:
6379
...
...
joying-admin/src/main/resources/logback-spring.xml
View file @
e63db94a
...
...
@@ -3,127 +3,59 @@
<!-- scan:当此属性设置为true时,配置文件如果发生改变,将会被重新加载,默认值为true -->
<!-- scanPeriod:设置监测配置文件是否有修改的时间间隔,如果没有给出时间单位,默认单位是毫秒。当scan为true时,此属性生效。默认的时间间隔为1分钟。 -->
<!-- debug:当此属性设置为true时,将打印出logback内部日志信息,实时查看logback运行状态。默认值为false。 -->
<configuration
scan=
"true"
scanPeriod=
"10 seconds"
>
<!--<include resource="org/springframework/boot/logging/logback/base.xml" />-->
<configuration
scan=
"true"
scanPeriod=
"60 seconds"
debug=
"false"
>
<contextName>
logback
</contextName>
<!-- name的值是变量的名称,value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义变量后,可以使“${}”来使用变量。 -->
<property
name=
"LOG_HOME"
value=
"logs"
/>
<springProfile
name=
"dev"
>
<property
name=
"LOG_HOME"
value=
"logs"
/>
</springProfile>
<springProfile
name=
"test"
>
<property
name=
"LOG_HOME"
value=
"logs"
/>
</springProfile>
<!-- 彩色日志 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule
conversionWord=
"clr"
converterClass=
"org.springframework.boot.logging.logback.ColorConverter"
/>
<conversionRule
conversionWord=
"wex"
converterClass=
"org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter"
/>
<conversionRule
conversionWord=
"wEx"
converterClass=
"org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"
/>
<!-- 彩色日志格式 -->
<property
name=
"CONSOLE_LOG_PATTERN"
value=
"${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"
/>
<!--日志路径-->
<property
name=
"LOG_HOME"
value=
"logs"
/>
<!--日志格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度 %logger{50}:表示logger名字最长50个字符,否则按照句点分割 %msg:日志消息,%n是换行符 -->
<property
name=
"LOG_PATTERN"
value=
"%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}-%msg%n"
/>
<!--
输出到控制台
-->
<!--
控制台
-->
<appender
name=
"CONSOLE"
class=
"ch.qos.logback.core.ConsoleAppender"
>
<!--
此日志appender是为开发使用,只配置最底级别,控制台输出的日志级别是大于或等于此级别的日志信息
-->
<!--
日志格式
-->
<encoder>
<Pattern>
${CONSOLE_LOG_PATTERN}
</Pattern>
<!-- 设置字符集 -->
<pattern>
${LOG_PATTERN}
</pattern>
<charset>
UTF-8
</charset>
</encoder>
</appender>
<!--输出到控制台-->
<appender
name=
"DOCKER_LOGS"
class=
"ch.qos.logback.core.ConsoleAppender"
>
<!--此日志appender是为开发使用,只配置最底级别,控制台输出的日志级别是大于或等于此级别的日志信息-->
<filter
class=
"ch.qos.logback.classic.filter.ThresholdFilter"
>
<level>
INFO
</level>
<level>
DEBUG
</level>
</filter>
<encoder>
<Pattern>
${CONSOLE_LOG_PATTERN}
</Pattern>
<!-- 设置字符集 -->
<charset>
UTF-8
</charset>
</encoder>
</appender>
<!--输出到文件-->
<!-- 设置分割 -->
<!-- 2、全部日志时间滚动输出 -->
<appender
name=
"INFO_FILE"
class=
"ch.qos.logback.core.rolling.RollingFileAppender"
>
<filter
class=
"ch.qos.logback.classic.filter.ThresholdFilter"
>
<level>
INFO
</level>
</filter>
<!-- 设置按尺寸和时间(同时满足)分割 -->
<rollingPolicy
class=
"ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"
>
<!-- rollover daily -->
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy
class=
"ch.qos.logback.core.rolling.TimeBasedRollingPolicy"
>
<!-- 日志命名:单个文件大于50MB 按照时间+自增i 生成log文件 -->
<fileNamePattern>
${LOG_HOME}/logback.%d{yyyy-MM-dd}.%i.log
</fileNamePattern>
<!-- each file should be at most 100MB, keep 20 days worth of history,
but at most 3GB -->
<maxFileSize>
100MB
</maxFileSize>
<maxHistory>
60
</maxHistory>
<totalSizeCap>
20GB
</totalSizeCap>
<timeBasedFileNamingAndTriggeringPolicy
class=
"ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"
>
<maxFileSize>
100MB
</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>
180
</maxHistory>
</rollingPolicy>
<encoder
class=
"ch.qos.logback.classic.encoder.PatternLayoutEncoder"
>
<pattern>
%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}-%msg%n
</pattern>
<append>
true
</append>
<encoder>
<pattern>
${LOG_PATTERN}
</pattern>
<charset>
UTF-8
</charset>
</encoder>
</appender>
<appender
name=
"ASYNC_LOG"
class=
"ch.qos.logback.classic.AsyncAppender"
>
<!-- 不丢失日志,默认值80,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>
0
</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值256 -->
<queueSize>
256
</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref
ref=
"INFO_FILE"
/>
</appender>
<!--
<logger>用来设置某一个包或者具体的某一个类的日志打印级别、
以及指定<appender>。<logger>仅有一个name属性,
一个可选的level和一个可选的addtivity属性。
name:用来指定受此logger约束的某一个包或者具体的某一个类。
level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
还有一个特俗值INHERITED或者同义词NULL,代表强制执行上级的级别。
如果未设置此属性,那么当前logger将会继承上级的级别。
addtivity:是否向上级logger传递打印信息。默认是true。
-->
<!--<logger name="org.springframework.web" level="info"/>-->
<!--<logger name="org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor" level="INFO"/>-->
<!--
使用mybatis的时候,sql语句是debug下才会打印,而这里我们只配置了info,所以想要查看sql语句的话,有以下两种操作:
第一种把<root level="info">改成<root level="DEBUG">这样就会打印sql,不过这样日志那边会出现很多其他消息
第二种就是单独给dao下目录配置debug模式,代码如下,这样配置sql语句会打印,其他还是正常info级别:
-->
<!--
root节点是必选节点,用来指定最基础的日志输出级别,只有一个level属性
level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
不能设置为INHERITED或者同义词NULL。默认是DEBUG
可以包含零个或多个元素,标识这个appender将会添加到这个logger。
-->
<logger
name=
"com.fzm.common.mapper"
level=
"DEBUG"
/>
<root
level=
"INFO"
>
<!--开发环境和本地:-->
<springProfile
name=
"local"
>
<appender-ref
ref=
"CONSOLE"
/>
</springProfile>
<springProfile
name=
"dev"
>
<!-- <appender-ref ref="CONSOLE" />-->
<appender-ref
ref=
"DOCKER_LOGS"
/>
<appender-ref
ref=
"CONSOLE"
/>
<appender-ref
ref=
"INFO_FILE"
/>
</springProfile>
<springProfile
name=
"test"
>
<!-- <appender-ref ref="CONSOLE" />-->
<appender-ref
ref=
"DOCKER_LOGS"
/>
<appender-ref
ref=
"CONSOLE"
/>
<appender-ref
ref=
"INFO_FILE"
/>
</springProfile>
</root>
...
...
joying-common/src/main/java/com/fzm/common/service/impl/NftServiceImpl.java
View file @
e63db94a
...
...
@@ -198,6 +198,7 @@ public class NftServiceImpl extends ServiceImpl<NftMapper, Nft> implements NftSe
public
List
<
Nft
>
listCurrent
(
Integer
categoryId
,
Integer
userId
)
{
QueryWrapper
<
Nft
>
queryWrapper
=
new
QueryWrapper
<>();
queryWrapper
.
eq
(
"user_id"
,
userId
);
queryWrapper
.
ne
(
"nft_hash"
,
""
);
if
(
categoryId
!=
null
)
{
queryWrapper
.
eq
(
"category_id"
,
categoryId
);
}
...
...
joying-common/src/main/resources/mapper/NftMapper.xml
View file @
e63db94a
...
...
@@ -4,11 +4,11 @@
<select
id=
"list"
resultType=
"com.fzm.common.entity.Nft"
>
select * from tb_nft
where status = 1
where status = 1
and nft_hash != ''
<if
test=
"categoryId != null and categoryId > 0"
>
and category_id=#{categoryId}
</if>
order by is_top desc ,
update_dat
e desc
order by is_top desc ,
publish_tim
e desc
limit #{pageNum},#{pageSize}
</select>
...
...
@@ -40,7 +40,7 @@
tb_nft n
LEFT JOIN tb_user u ON n.user_id = u.id
WHERE
1 = 1
nft_hash != ''
<if
test=
"name != null and name != ''"
>
and n.name like concat ('%',#{name},'%')
</if>
...
...
joying-portal/pom.xml
View file @
e63db94a
...
...
@@ -19,10 +19,6 @@
</dependency>
<dependency>
<groupId>
org.springframework.boot
</groupId>
<artifactId>
spring-boot-starter-amqp
</artifactId>
</dependency>
<dependency>
<groupId>
org.springframework.boot
</groupId>
<artifactId>
spring-boot-starter-test
</artifactId>
<scope>
test
</scope>
</dependency>
...
...
joying-portal/src/main/java/com/fzm/portal/config/RabbitMQConfig.java
deleted
100644 → 0
View file @
673b7140
package
com
.
fzm
.
portal
.
config
;
import
org.springframework.amqp.core.Binding
;
import
org.springframework.amqp.core.Exchange
;
import
org.springframework.amqp.core.Queue
;
import
org.springframework.amqp.core.TopicExchange
;
import
org.springframework.context.annotation.Bean
;
import
org.springframework.context.annotation.Configuration
;
import
java.util.HashMap
;
/**
* @author tangtuo
* @date 2021/6/30 10:41
*/
@Configuration
public
class
RabbitMQConfig
{
@Bean
public
Exchange
orderEventExchange
()
{
return
new
TopicExchange
(
"nft-event-exchange"
,
true
,
false
);
}
@Bean
public
Queue
nftHashQueue
()
{
return
new
Queue
(
"nft.hash.queue"
,
true
,
false
,
false
);
}
@Bean
public
Binding
orderCreateBinding
()
{
/**
* String destination, 目的地(队列名或者交换机名字)
* DestinationType destinationType, 目的地类型(Queue、Exhcange)
* String exchange,
* String routingKey,
* Map<String, Object> arguments
* */
return
new
Binding
(
"nft.hash.queue"
,
Binding
.
DestinationType
.
QUEUE
,
"nft-event-exchange"
,
"nft.hash"
,
null
);
}
}
joying-portal/src/main/resources/application-dev.yml
View file @
e63db94a
...
...
@@ -30,10 +30,6 @@ spring:
master-data-source-name
:
write
# 从库数据源名称列表,多个逗号分隔
slave-data-source-names
:
read
props
:
# 开启SQL显示,默认false
sql
:
show
:
true
redis
:
host
:
172.16.101.135
port
:
6379
...
...
@@ -51,15 +47,8 @@ spring:
cache
:
# 整合SpringCache,redis作为缓存类型
type
:
redis
rabbitmq
:
host
:
172.16.101.135
port
:
5672
username
:
admin
password
:
admin
listener
:
simple
:
####开启手动签收
acknowledge-mode
:
manual
redis
:
time-to-live
:
86400000
swagger
:
title
:
乐映影视门户系统
...
...
joying-portal/src/main/resources/application-local.yml
View file @
e63db94a
...
...
@@ -30,10 +30,6 @@ spring:
master-data-source-name
:
write
# 从库数据源名称列表,多个逗号分隔
slave-data-source-names
:
read
props
:
# 开启SQL显示,默认false
sql
:
show
:
true
redis
:
host
:
localhost
port
:
6379
...
...
@@ -51,15 +47,17 @@ spring:
cache
:
# 整合SpringCache,redis作为缓存类型
type
:
redis
rabbitmq
:
host
:
172.16.101.135
port
:
5672
username
:
admin
password
:
admin
listener
:
simple
:
####开启手动签收
acknowledge-mode
:
manual
redis
:
time-to-live
:
86400000
# rabbitmq:
# host: 172.16.101.135
# port: 5672
# username: admin
# password: admin
# listener:
# simple:
# ####开启手动签收
# acknowledge-mode: manual
swagger
:
title
:
乐映影视门户系统
...
...
@@ -105,3 +103,5 @@ chain:
admin-key
:
8cd19e9bf39055f95e3e33cc1e08b9f9fc2e9be48a5b3a4d401e64041c97aec7
contract-name
:
user.evm.0xd996a3a866c577596df260844a045a068ec5accd8d71ccaa3d578c9617ec5490
contract-address
:
1iDWTHZQxPES4hLveZRcwJH6AMaMfZfZZ
joying-portal/src/main/resources/application-test.yml
View file @
e63db94a
...
...
@@ -30,10 +30,6 @@ spring:
master-data-source-name
:
write
# 从库数据源名称列表,多个逗号分隔
slave-data-source-names
:
read
props
:
# 开启SQL显示,默认false
sql
:
show
:
true
redis
:
host
:
172.16.101.136
port
:
6379
...
...
@@ -51,15 +47,17 @@ spring:
cache
:
# 整合SpringCache,redis作为缓存类型
type
:
redis
rabbitmq
:
host
:
172.16.101.135
port
:
5672
username
:
admin
password
:
admin
listener
:
simple
:
####开启手动签收
acknowledge-mode
:
manual
redis
:
time-to-live
:
86400000
# rabbitmq:
# host: 172.16.101.135
# port: 5672
# username: admin
# password: admin
# listener:
# simple:
# ####开启手动签收
# acknowledge-mode: manual
swagger
:
title
:
乐映影视门户系统
...
...
joying-portal/src/main/resources/application.yml
View file @
e63db94a
...
...
@@ -15,6 +15,7 @@ mybatis-plus:
type-aliases-package
:
com.fzm.common.entity
mapper-locations
:
classpath:mapper/*.xml
configuration
:
#log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
map-underscore-to-camel-case
:
true
logging
:
...
...
joying-portal/src/main/resources/logback-spring.xml
View file @
e63db94a
...
...
@@ -3,129 +3,59 @@
<!-- scan:当此属性设置为true时,配置文件如果发生改变,将会被重新加载,默认值为true -->
<!-- scanPeriod:设置监测配置文件是否有修改的时间间隔,如果没有给出时间单位,默认单位是毫秒。当scan为true时,此属性生效。默认的时间间隔为1分钟。 -->
<!-- debug:当此属性设置为true时,将打印出logback内部日志信息,实时查看logback运行状态。默认值为false。 -->
<configuration
scan=
"true"
scanPeriod=
"10 seconds"
>
<!--<include resource="org/springframework/boot/logging/logback/base.xml" />-->
<configuration
scan=
"true"
scanPeriod=
"60 seconds"
debug=
"false"
>
<contextName>
logback
</contextName>
<!-- name的值是变量的名称,value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义变量后,可以使“${}”来使用变量。 -->
<property
name=
"LOG_HOME"
value=
"logs"
/>
<springProfile
name=
"local"
>
</springProfile>
<springProfile
name=
"dev"
>
<property
name=
"LOG_HOME"
value=
"logs"
/>
</springProfile>
<springProfile
name=
"test"
>
<property
name=
"LOG_HOME"
value=
"logs"
/>
</springProfile>
<!-- 彩色日志 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule
conversionWord=
"clr"
converterClass=
"org.springframework.boot.logging.logback.ColorConverter"
/>
<conversionRule
conversionWord=
"wex"
converterClass=
"org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter"
/>
<conversionRule
conversionWord=
"wEx"
converterClass=
"org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"
/>
<!-- 彩色日志格式 -->
<property
name=
"CONSOLE_LOG_PATTERN"
value=
"${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"
/>
<!--日志路径-->
<property
name=
"LOG_HOME"
value=
"logs"
/>
<!--日志格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度 %logger{50}:表示logger名字最长50个字符,否则按照句点分割 %msg:日志消息,%n是换行符 -->
<property
name=
"LOG_PATTERN"
value=
"%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}-%msg%n"
/>
<!--
输出到控制台
-->
<!--
控制台
-->
<appender
name=
"CONSOLE"
class=
"ch.qos.logback.core.ConsoleAppender"
>
<!--
此日志appender是为开发使用,只配置最底级别,控制台输出的日志级别是大于或等于此级别的日志信息
-->
<!--
日志格式
-->
<encoder>
<Pattern>
${CONSOLE_LOG_PATTERN}
</Pattern>
<!-- 设置字符集 -->
<pattern>
${LOG_PATTERN}
</pattern>
<charset>
UTF-8
</charset>
</encoder>
</appender>
<!--输出到控制台-->
<appender
name=
"DOCKER_LOGS"
class=
"ch.qos.logback.core.ConsoleAppender"
>
<!--此日志appender是为开发使用,只配置最底级别,控制台输出的日志级别是大于或等于此级别的日志信息-->
<filter
class=
"ch.qos.logback.classic.filter.ThresholdFilter"
>
<level>
INFO
</level>
<level>
DEBUG
</level>
</filter>
<encoder>
<Pattern>
${CONSOLE_LOG_PATTERN}
</Pattern>
<!-- 设置字符集 -->
<charset>
UTF-8
</charset>
</encoder>
</appender>
<!--输出到文件-->
<!-- 设置分割 -->
<!-- 2、全部日志时间滚动输出 -->
<appender
name=
"INFO_FILE"
class=
"ch.qos.logback.core.rolling.RollingFileAppender"
>
<filter
class=
"ch.qos.logback.classic.filter.ThresholdFilter"
>
<level>
INFO
</level>
</filter>
<!-- 设置按尺寸和时间(同时满足)分割 -->
<rollingPolicy
class=
"ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"
>
<!-- rollover daily -->
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy
class=
"ch.qos.logback.core.rolling.TimeBasedRollingPolicy"
>
<!-- 日志命名:单个文件大于50MB 按照时间+自增i 生成log文件 -->
<fileNamePattern>
${LOG_HOME}/logback.%d{yyyy-MM-dd}.%i.log
</fileNamePattern>
<!-- each file should be at most 100MB, keep 20 days worth of history,
but at most 3GB -->
<maxFileSize>
100MB
</maxFileSize>
<maxHistory>
60
</maxHistory>
<totalSizeCap>
20GB
</totalSizeCap>
<timeBasedFileNamingAndTriggeringPolicy
class=
"ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"
>
<maxFileSize>
100MB
</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>
180
</maxHistory>
</rollingPolicy>
<encoder
class=
"ch.qos.logback.classic.encoder.PatternLayoutEncoder"
>
<pattern>
%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}-%msg%n
</pattern>
<append>
true
</append>
<encoder>
<pattern>
${LOG_PATTERN}
</pattern>
<charset>
UTF-8
</charset>
</encoder>
</appender>
<appender
name=
"ASYNC_LOG"
class=
"ch.qos.logback.classic.AsyncAppender"
>
<!-- 不丢失日志,默认值80,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>
0
</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值256 -->
<queueSize>
256
</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref
ref=
"INFO_FILE"
/>
</appender>
<!--
<logger>用来设置某一个包或者具体的某一个类的日志打印级别、
以及指定<appender>。<logger>仅有一个name属性,
一个可选的level和一个可选的addtivity属性。
name:用来指定受此logger约束的某一个包或者具体的某一个类。
level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
还有一个特俗值INHERITED或者同义词NULL,代表强制执行上级的级别。
如果未设置此属性,那么当前logger将会继承上级的级别。
addtivity:是否向上级logger传递打印信息。默认是true。
-->
<!--<logger name="org.springframework.web" level="info"/>-->
<!--<logger name="org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor" level="INFO"/>-->
<!--
使用mybatis的时候,sql语句是debug下才会打印,而这里我们只配置了info,所以想要查看sql语句的话,有以下两种操作:
第一种把<root level="info">改成<root level="DEBUG">这样就会打印sql,不过这样日志那边会出现很多其他消息
第二种就是单独给dao下目录配置debug模式,代码如下,这样配置sql语句会打印,其他还是正常info级别:
-->
<!--
root节点是必选节点,用来指定最基础的日志输出级别,只有一个level属性
level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
不能设置为INHERITED或者同义词NULL。默认是DEBUG
可以包含零个或多个元素,标识这个appender将会添加到这个logger。
-->
<logger
name=
"com.fzm.common.mapper"
level=
"DEBUG"
/>
<root
level=
"INFO"
>
<!--开发环境和本地:-->
<springProfile
name=
"local"
>
<appender-ref
ref=
"CONSOLE"
/>
</springProfile>
<springProfile
name=
"dev"
>
<!-- <appender-ref ref="CONSOLE" />-->
<appender-ref
ref=
"DOCKER_LOGS"
/>
<appender-ref
ref=
"CONSOLE"
/>
<appender-ref
ref=
"INFO_FILE"
/>
</springProfile>
<springProfile
name=
"test"
>
<!-- <appender-ref ref="CONSOLE" />-->
<appender-ref
ref=
"DOCKER_LOGS"
/>
<appender-ref
ref=
"CONSOLE"
/>
<appender-ref
ref=
"INFO_FILE"
/>
</springProfile>
</root>
...
...
pom.xml
View file @
e63db94a
...
...
@@ -75,19 +75,6 @@
<version>
2.8.6
</version>
</dependency>
<!-- Sa-Token 权限认证, 在线文档:http://sa-token.dev33.cn/ -->
<dependency>
<groupId>
cn.dev33
</groupId>
<artifactId>
sa-token-spring-boot-starter
</artifactId>
<version>
1.20.0
</version>
</dependency>
<!-- Sa-Token整合redis (使用jackson序列化方式) -->
<dependency>
<groupId>
cn.dev33
</groupId>
<artifactId>
sa-token-dao-redis-jackson
</artifactId>
<version>
1.20.0
</version>
</dependency>
<dependency>
<groupId>
com.github.pagehelper
</groupId>
<artifactId>
pagehelper-spring-boot-starter
</artifactId>
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment