Skip to content

线程池拒绝策略:

AbortPolicy 丢弃任务 抛出异常 DiscardPolicy 丢弃任务 DiscardOldestPolicy 丢弃 等待队列中队首元素 ,让这个任务入队等待处理 CallerRunsPolicy 由调用者线程执行任务 自定义 RejectedExecutionHandler

I0 密集型时,大部分线程都阻寒,故需要多配置线程数:

线程数 = CPU核心数/(1-阻塞系数) 阻塞系数一般0.8~0.9,双核CPU理想线程数20 ** I/O密集型(I/O-bound)线程池设计 最佳线程数 = ((线程等待时间+线程cpu时间)/线程cpu时间*cpu数目)**

计算密集型

理想方案是:线程数 = CPU核数+1 也可CPU核数2,要看JDK版本,及CPU配置(服务器CPU有超线程)。1.8来增加并行计算,想线程数 = CPU内核线程数2

锁的分类

  1. 悲观锁 乐观锁
  2. 公平锁和 非公平锁 ReentrantLock(true/false) 默认是非公平锁
  3. 自旋锁和 重入锁(synchronized,ReentrantLock)
  4. 重量级锁和轻量级锁
  5. 独占锁 共享锁

java sm2 加解密

xml
 <dependency>
      <groupId>com.antherd</groupId>
      <artifactId>sm-crypto</artifactId>
      <version>0.3.2.1</version>
    </dependency>
java
/**
 *  生成私钥 公钥  sm2 是公钥加密  私钥解密
 *  */ 
 Keypair keypair = Sm2.generateKeyPairHex();
 String privateKey = keypair.getPrivateKey(); // 公钥
 String publicKey = keypair.getPublicKey(); // 私钥

String msg = "hello world";
String encryptData = Sm2.doEncrypt(msg, publicKey); // 加密结果
System.out.println(encryptData);
String decryptData = Sm2.doDecrypt(encryptData, privateKey); // 解密结果
System.out.println(decryptData);

springboot maven 搭建

xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <groupId>com.me</groupId>
    <artifactId>demo2</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>demo2</name>
    <description>demo2</description>
    <properties>
        <java.version>1.8</java.version>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
        <spring-boot.version>2.6.13</spring-boot.version>
    </properties>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-devtools</artifactId>
            <scope>runtime</scope>
            <optional>true</optional>
        </dependency>
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>5.1.46</version>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-configuration-processor</artifactId>
            <optional>true</optional>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <optional>true</optional>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>com.baomidou</groupId>
            <artifactId>mybatis-plus-boot-starter</artifactId>
            <version>3.5.7</version>
        </dependency>
        <dependency>
            <groupId>org.bouncycastle</groupId>
            <artifactId>bcpkix-jdk18on</artifactId>
            <version>1.78.1</version>
        </dependency>
        <dependency>
            <groupId>cn.hutool</groupId>
            <artifactId>hutool-all</artifactId>
            <version>5.8.28</version>
        </dependency>
    </dependencies>
    <dependencyManagement>
        <dependencies>
            <dependency>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-dependencies</artifactId>
                <version>${spring-boot.version}</version>
                <type>pom</type>
                <scope>import</scope>
            </dependency>
        </dependencies>
    </dependencyManagement>

    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.8.1</version>
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                    <encoding>UTF-8</encoding>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
                <version>${spring-boot.version}</version>
                <configuration>
                    <mainClass>com.me.demo2.Demo2Application</mainClass>
                    <skip>false</skip>
                </configuration>
                <executions>
                    <execution>
                        <id>repackage</id>
                        <goals>
                            <goal>repackage</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
        </plugins>
    </build>

</project>

springboot sharding-jdbc

xml
        <dependency>
            <groupId>org.apache.shardingsphere</groupId>
            <artifactId>shardingsphere-jdbc-core-spring-boot-starter</artifactId>
            <version>5.2.1</version>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
            <exclusions>
                <exclusion>
                    <groupId>org.yaml</groupId>
                    <artifactId>snakeyaml</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.yaml</groupId>
            <artifactId>snakeyaml</artifactId>
            <version>1.33</version>
        </dependency>
yml
server:
  port: 8080
spring:
  shardingsphere:
    mode:
      type: Standalone
      repository:
        type: JDBC
    props:
      # 禁用执行SQL用于获取表元数据
      sql-show: true
      # 禁用执行SQL用于获取数据库元数据
    #      check-table-metadata-enabled: false
    datasource:
      # 配置真实数据源 相当于ds0,ds1
      names: ds0
      ds0:
        type: com.zaxxer.hikari.HikariDataSource
        driver-class-name: com.mysql.jdbc.Driver
        jdbc-url: jdbc:mysql://localhost:3306/test
        username: root
        password: root

    rules:
      sharding:
        tables:
          #这里以student表为例
          user_info:
            # 表名的分片规则: # 由数据源名 + 表名组成(参考 Inline 语法规则)
            actual-data-nodes: ds0.user_info_$->{1..2}
            # 分布式序列策略
            key-generate-strategy:
              # 自增列名称,缺省表示不使用自增主键生成器
              column: id
              # 分布式序列算法名称
              key-generator-name: snowflake
            # 配置分库策略,缺省表示使用默认分库策略,以下的分片策略只能选其一:standard/complex/hint/none
#            database-strategy:
#              # 用于单分片键的标准分片场景
#              standard:
#                # 分片列名称 这里指定age作为分库键
#                sharding-column: age
#                # 分片算法名称
#                sharding-algorithm-name: student_age_inline
            # 配置分表策略,分库键class_id,分库策略student_class_id_inline
            table-strategy:
              standard:
                sharding-column: id
                sharding-algorithm-name: user_inline
        # 配置分片算法
        sharding-algorithms:
          # 分库策略:根据age取余2
#          student_age_inline:
#            type: INLINE
#            props:
#              algorithm-expression: ds$->{age % 2}
          # 分表策略:根据classid取余2
          user_inline:
            type: INLINE
            props:
              algorithm-expression: user_info_$->{id % 2 +1}
        key-generators:
          # 配置主键生成算法-雪花算法
          snowflake:
            type: SNOWFLAKE





mybatis-plus:
   mapper-locations: classpath:mapper/*.xml
   configuration:
     map-underscore-to-camel-case: true
     log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
java
/**
	springboot  2.6.3 集成   sharding-jdbc 5.2.1 
  springboot web  依赖排除 snakeyaml 原来版本 升级新版本 否则解析有问题
  使用和平时相同
*/  
@RequestMapping(value = "/test",method = {RequestMethod.GET})
    @ResponseBody
    public ResponseEntity<String> index2() {
        Snowflake snowflake = IdUtil.getSnowflake();
        User user = new User();
        user.setId(snowflake.nextId());
        user.setName("test"+user.getId());
        user.setSex("male");
        user.setTsDate(new Date());
        userMapper.insert(user);

        return ResponseEntity.ok("success");
    }

定时任务

java
/**
 定时任务 task   所有的定时任务需要继承该基类
 重写构造方法
*/
public abstract class Task extends TimerTask{
	
	/* 每天执行 */
	public static final String TASK_TYPE_ERVERDAY = "everyday";
	/*间隔多少分钟执行 */
	public static final String TASK_TYPE_INTERVAL = "interval";
	/* 固定时间点执行*/
	public static final String TASK_TYPE_ONCE = "runonce";

	//任务名称
	protected String m_strName = null;
	//任务定时执行方式
	private String m_strType = null;

	private long m_lInterval = -1;
	private Calendar m_calCurrent = null;
	public Task()
	{
		super();
		m_strName = null;
		m_strType = null;
		m_lInterval = -1;
		m_calCurrent = Calendar.getInstance();
	}
	/**
	 * 设置一次执行的日期时间
	 * @param time
	 */
	public final void setRunDateTime(Date time)
	{
		if (time != null)
		{
			this.m_strType = TASK_TYPE_ONCE;

			this.m_calCurrent.setTime(time);
		}

	}

	/**
	 * 设置每日执行的时间,传人的虽然是Date类型,但只有时间信息有效
	 * @param time
	 */
	public final void setEveryDayRunTime(Date time)
	{
		if (time != null)
		{
			this.m_strType = TASK_TYPE_ERVERDAY;

			this.m_calCurrent.setTime(time);
		}
	}

	/**
	 * 设置按固定间隔时间执行的间隔时间,间隔时间的单位是分钟,可以小于一分钟
	 * @param interval
	 */
	public final void setRunIntervalTime(float interval)
	{
		if (interval > 0)
		{
			this.m_strType = TASK_TYPE_INTERVAL;
			this.m_lInterval = Math.round(interval * Scheduler.MS_OF_MINUTE);
		}
	}

	public final Date getDate()
	{
		return this.m_calCurrent.getTime();
	}

	/**
	 * Returns the interval.
	 * @return int
	 */
	public final long getInterval()
	{
		return m_lInterval;
	}

	/**
	 * Returns the hour.
	 * @return int
	 */
	public final int getHour()
	{
		return m_calCurrent.get(Calendar.HOUR_OF_DAY);
	}

	/**
	 * Returns the minute.
	 * @return int
	 */
	public final int getMinute()
	{
		return this.m_calCurrent.get(Calendar.MINUTE);
	}

	/**
	 * Returns the second.
	 * @return int
	 */
	public final int getSecond()
	{
		return this.m_calCurrent.get(Calendar.SECOND);
	}

	/**
	 * Returns the name.
	 * @return String
	 */
	public final String getName()
	{
		return m_strName;
	}

	/**
	 * Sets the name.
	 * @param name The name to set
	 */
	public final void setName(String name)
	{
		m_strName = name;
	}

	/**
	 * Returns the tyep.
	 * @return String
	 */
	public final String getType()
	{
		return m_strType;
	}
	
	
}
java

/**
*  定时任务调度器  可以多次调度 执行addtask
*
* */
public class Scheduler
{
	public static final long MILLISECONDS_OF_DAY = 24 * 60 * 60 * 1000;
	public static final long MS_OF_MINUTE = 60 * 1000; //每分钟的毫秒数



	//private static Scheduler m_instance = null;
	private Scheduler m_instance = null;

	private Timer m_timer = null;

	/**
	 * Constructor for Scheduler.
	 */
	private Scheduler()
	{
		m_timer = new Timer();
	}

	/**
	 *
	 */
	/*synchronized public static Scheduler getInstance()
	{
		if (m_instance == null)
		{
			m_instance = new Scheduler();
		}
		return m_instance;
	}*/

	/**
	 * Modify by leiyang date 2007/07/04
	 */
	public static Scheduler getInstance()
	{
		return new Scheduler();
	}

	/**
	 *
	 */
	public void addTask(Task task) throws Exception
	{
		try
		{
			if (task != null)
			{
				//获得当前时间
				Calendar calCurrent = this.getCurrentDate();
				calCurrent.setTime(new Date());
				long lDelay = 0;
				long lPeriod = 5000;

				if (task.getType().equalsIgnoreCase(Task.TASK_TYPE_ERVERDAY))
				{
					long lTemp = 0;

					lTemp = calCurrent.get(Calendar.HOUR_OF_DAY) * 60 * 60 * 1000;

					lTemp += calCurrent.get(Calendar.MINUTE) * 60 * 1000;

					lTemp += calCurrent.get(Calendar.SECOND) * 1000;

					lDelay = task.getHour() * 60 * 60 * 1000;

					lDelay += task.getMinute() * 60 * 1000;

					lDelay += task.getSecond() * 1000;

					if (lDelay < lTemp)
					{
						lDelay = Scheduler.MILLISECONDS_OF_DAY-(lTemp - lDelay) ;
					}
					else
					{
						lDelay = lDelay - lTemp;
					}

					lPeriod = Scheduler.MILLISECONDS_OF_DAY;

					this.m_timer.schedule(task, lDelay, lPeriod);

					System.out.println(
							"AutoTask: "
									+ task.getName()
									+ "将于每天 "
									+ task.getHour()
									+ ":"
									+ task.getMinute()
									+ ":"
									+ task.getSecond()
									+ " 执行。lDelay=="+lDelay);
				}
				else if (task.getType().equalsIgnoreCase(Task.TASK_TYPE_INTERVAL))
				{
					lPeriod = task.getInterval();

					this.m_timer.schedule(task, lDelay, lPeriod);

					System.out.println(
							"AutoTask: " + task.getName() + "将于" + (lDelay / 1000) + "秒后每隔" + (lPeriod / 1000) + "秒执行。");
				}
				else if (task.getType().equalsIgnoreCase(Task.TASK_TYPE_ONCE))
				{
					Date dtTemp = task.getDate();

					if (dtTemp.before(calCurrent.getTime()))
						throw new Exception("任务计划执行时间已过");

					this.m_timer.schedule(task, dtTemp);
					SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
					System.out.println("AutoTask: " + task.getName() + "将于" + sdf.format(dtTemp) + "执行。");
				}
			}
		}
		catch (Exception e)
		{
			//e.printStackTrace();
			throw new Exception("不能添加该任务计划!" + e.getMessage());
		}
	}

	/**
	 *
	 */
	public void Pause()
	{
		if (m_timer != null)
		{
			m_timer.cancel();
		}
	}

	/**
	 * 获得系统当前日期时间
	 * @return Calendar
	 */
	public Calendar getCurrentDate()
	{
		//Timestamp dtTemp = Env.getSystemDate();

		Calendar calCurrent = Calendar.getInstance();

		//calCurrent.setTime(dtTemp);

		return calCurrent;
	}

}
java

/**
 *  固定时间点执行的任务  执行一次
   runDate  执行日期一般写在配置文件中
 */
public class DemoTask  extends Task{
    public static final String runDate="2024-09-14 20:38:00";
    public DemoTask() {
        this.m_strName = "demo";
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date time;
        try {
            time = sdf.parse(runDate);
            this.setRunDateTime(time);
        } catch (ParseException e) {
            e.printStackTrace();
        }
    }

    /**
     *  定时任务逻辑
     */
    @Override
    public void run() {
        System.out.println(Thread.currentThread().getName()+":"+new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()));
    }
}
java
/**
 * 固定时间间隔执行的任务  分钟 
 */
public class DemoTask2 extends Task{

    public DemoTask2() {
        this.m_strName = "demo";

        try {
            this.setRunIntervalTime(1);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * The action to be performed by this timer task.
     */
    @Override
    public void run() {
        System.out.println(Thread.currentThread().getName()+":"+new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()));
    }
}
java

/**
 * 每天固定执行的任务
 */
public class DemoTask3 extends Task{
    public static final String runDate="2024-09-14 20:38:00";
    public DemoTask3() {
        this.m_strName = "demo";

        try {
            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            this.setEveryDayRunTime(sdf.parse(runDate));
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * The action to be performed by this timer task.
     */
    @Override
    public void run() {
        System.out.println(Thread.currentThread().getName()+":"+new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()));
    }
}
java
/**
测试类
*/
public class Test1 {
    public static void main(String[] args) {
        ExecutorService executorService = Executors.newFixedThreadPool(1);
        executorService.execute(new Runnable() {
            /**
             * When an object implementing interface <code>Runnable</code> is used
             * to create a thread, starting the thread causes the object's
             * <code>run</code> method to be called in that separately executing
             * thread.
             * <p>
             * The general contract of the method <code>run</code> is that it may
             * take any action whatsoever.
             *
             * @see Thread#run()
             */
            @Override
            public void run() {
                DemoTask task = new DemoTask();
                DemoTask2 task2 = new DemoTask2();
                DemoTask3 task3 = new DemoTask3();
                Scheduler scheduler =Scheduler.getInstance();
                try {
                    scheduler.addTask(task);
                    scheduler.addTask(task2);
                    scheduler.addTask(task3);
                } catch (Exception e) {
                    e.printStackTrace();
                }

            }
        });

    }
}

springboot tomcat 配置 线程数

yml
tomcat:
     threads:
       max: 1
       min-spare: 1
---------------------------
##   老版本
tomcat:
##    最大连接数
    max-connections: 1
##    最大工作线程数
    max-threads: 1
##    最小工作线程数
    min-spare-threads: 1
##    等待队列长度
    accept-count: 1
#

tomcat 修改session 信息

xml
<Context docBase="D:/tmp/opt/tomcat1/web" path="/web" reloadable="true"
      sessionCookieName="demo"
      sessionCookiePath="/" />
       
      <Context docBase="demo" path="/demo" reloadable="true" sessionCookieName="demo"
      sessionCookiePath="/"
       />
sh
nginx  后接两个tomcatA  tomcatB 
 domain   cookiePath   cookieName 都相同的化可能出现session覆盖导致session丢失问题

在tomcatA 设置session 值为1 
在tomcatB 设置session 值为3
刷新tomcatA 发现  session中的值为null 
即发生了session 覆盖问题
在实际中 出现了 

tongweb websphere 出现该问题
websphere 默认的  cookiePath /  最终出现了上述问题

解决方案  修改  默认的 jsessionID  其他的  eg:  bizsession
  

https://www.cnblogs.com/wenlong/p/3684233.html

springboot 实现 sse 类似于 gpt 回话中一个字一个字蹦出来的效果

java
package com.me.meterdemo.demos.web;

import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;

import java.io.IOException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

@RestController
public class Test2Controller {

    @GetMapping("msg")
    public SseEmitter test2(){
        SseEmitter emitter = new SseEmitter();
        ExecutorService executorService = Executors.newFixedThreadPool(1);
        executorService.execute(() -> {
            for (int i = 0; i < 10; i++) {
                try {
                    emitter.send("test"+i);
                    Thread.sleep(1000);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
            try {
                emitter.send(SseEmitter.event().name("stop").data(""));
            } catch (IOException e) {
                e.printStackTrace();
            }
            emitter.complete();
        });
        return  emitter;
    }
}

springboot 整合 quartz 集群版本

xml
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-quartz</artifactId>
            <version>2.5.2</version>
        </dependency>

集群数据库相关配置

yml配置

yaml
server:
  port: 8080
spring:
  datasource:
    type:  com.zaxxer.hikari.HikariDataSource  #数据源类型
    hikari:
      pool-name: KevinHikariPool  #连接池名称,默认HikariPool-1
      maximum-pool-size: 20   #最大连接数,小于等于0会被重置为默认值10;大于零小于1会被重置为minimum-idle的值
      connection-timeout: 60000 #连接超时时间:毫秒,小于250毫秒,否则被重置为默认值30秒
      minimum-idle: 10  #最小空闲连接,默认值10,小于0或大于maximum-pool-size,都会重置为maximum-pool-size
      idle-timeout: 500000   # 只有空闲连接数大于最大连接数且空闲时间超过该值,才会被释放
      max-lifetime: 600000   #连接最大存活时间.不等于0且小于30秒,会被重置为默认值30分钟.设置应该比mysql设置的超时时间短
      connection-test-query: SELECT 1   #连接测试查询
    quartz:
      driver-class-name: com.mysql.jdbc.Driver
      jdbc-url: jdbc:mysql://localhost:3306/quartz?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&useAffectedRows=true&useSSL=false&zeroDateTimeBehavior=convertToNull&serverTimezone=GMT%2B8
      username: root
      password: root
  quartz:
    # 配置使用jdbc 存储job
    job-store-type: jdbc
    #  随着容器启动,启动定时任务(默认值ture)
    auto-startup: true
    # 是否可以覆盖定时任务,true 是 (默认值false)
    overwrite-existing-jobs: false
    # 在容器关闭时,任务执行后关闭容 (默认值false)
    wait-for-jobs-to-complete-on-shutdown: true
    # 定时任务延时启动的时间 (默认值0s)
    startup-delay: 10s
    properties:
      # 配置定时任务执行的线程池个数(默认10个)
      org.quartz.threadPool.threadCount: 10
      # 配置集群的名称,同一个集群内的多个服务需要保证名称一致
      org.quartz.scheduler.instanceName: OrderService
      # 集群中单个服务的实例id ,同一个集群中 实例id 需要不相同
      org.quartz.scheduler.instanceId: Order_0
      # 标识以集群的方式启动
      org.quartz.jobStore.isClustered: true
      # 存储job 时使用的事务管理类,注意改参数 不同版本设置的值 有差异
      org.quartz.jobStore.class: org.springframework.scheduling.quartz.LocalDataSourceJobStore
      # 数据库驱动,用来匹配不同数据的实现类
      org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.StdJDBCDelegate
      # quartz集群 定时任务集群表前缀
      org.quartz.jobStore.tablePrefix: QRTZ_
      # 容许的调度引擎设置触发器超时的"临界值"。任务的超时容忍度 默认为60秒(这里单位为毫秒)
      org.quartz.jobStore.misfireThreshold: 12000

    jdbc:
      #      initialize-schema: always 自动生成表结构 never
      initialize-schema: always
package com.me.quartzdemo.config;

import com.zaxxer.hikari.HikariDataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.quartz.QuartzDataSource;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.sql.DataSource;

/**
 *   quartz  数据源配置   集群配置
 */
@Configuration
public class QuartzDataSourceConfig {
    @Value("${spring.datasource.quartz.jdbc-url}")
    private String url;

    @Value("${spring.datasource.quartz.driver-class-name}")
    private String driverClassName;

    @Value("${spring.datasource.quartz.username}")
    private String username;

    @Value("${spring.datasource.quartz.password}")
    private String password;


    @Bean
    // 标识quartz 数据源
    @QuartzDataSource
    @Qualifier("quartzDataSource")
    public DataSource quartzDataSource() {
        HikariDataSource dataSource = new HikariDataSource();
        dataSource.setJdbcUrl(url);
        dataSource.setUsername(username);
        dataSource.setPassword(password);
        dataSource.setDriverClassName(driverClassName);

        return  dataSource;
    }
}
java
package com.me.quartzdemo.config;

import com.me.quartzdemo.task.QuartzDemoJob;
import org.quartz.*;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

/**
 * 基础版本
 */
@Configuration
public class QuartzConfig {

    /**
     *  构建 job 定时任务
     * @return
     */
    @Bean
    public JobDetail printTimeJobDetail(){
        return JobBuilder.newJob(QuartzDemoJob.class)
                .withIdentity("quartzDemoJob")
                .usingJobData("name","111")
                .storeDurably()
                .build();
    }

    /**
     *  定时任务 配置cron表达式
     * @return
     */

    @Bean
    public Trigger printTimeJobTrigger(){
        CronScheduleBuilder cronScheduleBuilder = CronScheduleBuilder.cronSchedule("0/3 * * * * ? ");
        return TriggerBuilder.newTrigger()
                .forJob(printTimeJobDetail())//关联上述的JobDetail
                .withIdentity("quartzTaskService")//给Trigger起个名字
                .withSchedule(cronScheduleBuilder)
                .build();
    }
}
java
package com.me.quartzdemo.task;

import lombok.extern.slf4j.Slf4j;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.scheduling.quartz.QuartzJobBean;
import org.springframework.stereotype.Component;

import java.text.SimpleDateFormat;
import java.util.Date;
/**

 定时任务实现 方式
*/
@Slf4j
@Component
public class QuartzDemoJob extends QuartzJobBean {
    @Override
    protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
        String name = context.getJobDetail().getJobDataMap().getString("name");
        log.info("name="+name);
        System.out.println("current time :"+new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "---" );
    }
}

logstash

xml
 <dependency>
            <groupId>net.logstash.logback</groupId>
            <artifactId>logstash-logback-encoder</artifactId>
            <version>7.0</version>
        </dependency>
        <dependency>
        <groupId>ch.qos.logback</groupId>
        <artifactId>logback-classic</artifactId>
        <version>1.2.3</version>
        </dependency>
        <dependency>
            <groupId>cn.licoy</groupId>
            <artifactId>encrypt-body-spring-boot-starter</artifactId>
            <version>1.2.3</version>
        </dependency>
xml
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
    <!--spring boot提供的logback的默认配置,必须引用,否则日志无法输出-->
    <include resource="org/springframework/boot/logging/logback/defaults.xml"/>

    <!--&lt;!&ndash;配置集中管理的属性&ndash;&gt;
    <property resource="application.properties"/>-->
    <!--定义日志文件的存储地址 -->
    <property name="LOG_HOME" value="logs"/>
    <property name="FILE_NAME" value="${LOG_HOME}/%d{yyyy-MM-dd}"/>
    <!--定义自己个日志格式-->
    <property name="PATTERN" value="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %c{50} %M %L -- %m%n"/>
    <!--
        %d{yyyy-MM-dd HH:mm:ss.SSS} 日期格式
        %c 类的完整名称
        %logger 类的完整名称
        %M 方法名
        %L 行号
        %thread 线程
        %m 信息
        %n 换行
        %-5level 信息级别
    -->

    <!-- 控制台输出 -->
    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern> %red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger) - %cyan(%msg%n)</pattern>
        </encoder>
    </appender>
    <!-- 按照每天生成日志文件 -->
    <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!--日志文件输出的文件名-->
            <FileNamePattern>${FILE_NAME}_all.txt</FileNamePattern>
            <!--日志文件保留天数-->
            <MaxHistory>7</MaxHistory>
            <totalSizeCap>512MB</totalSizeCap>
        </rollingPolicy>
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern>${PATTERN}</pattern>
            <!--<pattern>%JsonOutPut %n</pattern>-->
        </encoder>
    </appender>
    <!-- 按照每天生成日志文件过滤消息级别 -->
    <appender name="FILE_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!--日志文件输出的文件名-->
            <FileNamePattern>${FILE_NAME}_err.txt</FileNamePattern>
            <!--日志文件保留天数-->
            <MaxHistory>7</MaxHistory>
            <totalSizeCap>512MB</totalSizeCap>
        </rollingPolicy>
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern>${PATTERN}</pattern>
            <!--<pattern>%JsonOutPut %n</pattern>-->
        </encoder>
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <!--设置过滤级别-->
            <level>ERROR</level>
            <!--过滤级别以上的处理方式-->
            <onMatch>ACCEPT</onMatch>
            <!--过滤级别以下的处理方式-->
            <onMismatch>DENY</onMismatch>
        </filter>
    </appender>
    <!-- 按照每天生成HTML日志文件 -->
    <appender name="FILE_HTML" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!--日志文件输出的文件名-->
            <FileNamePattern>${FILE_NAME}_all.html</FileNamePattern>
            <!--日志文件保留天数-->
            <MaxHistory>7</MaxHistory>
            <totalSizeCap>512MB</totalSizeCap>
        </rollingPolicy>
        <encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
            <layout class="ch.qos.logback.classic.html.HTMLLayout">
                <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS}%-5level%thread%c{50}%M%L%m</pattern>
            </layout>
        </encoder>
    </appender>
    <!--异步日志-->
    <appender name="ASYNC" class="ch.qos.logback.classic.AsyncAppender">
        <!--指定需要异步打印的日志-->
        <appender-ref ref="FILE" />
    </appender>
    <!--   logstash  把入职推送到 es-->
    <!--<appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
        &lt;!&ndash; <destination>${logstash.host}:${logstash.port}</destination>&ndash;&gt;
        <destination>127.0.0.1:8081</destination>
        <includeCallerData>true</includeCallerData>

        <encoder class="net.logstash.logback.encoder.LogstashEncoder">
            <includeCallerData>true</includeCallerData>
        </encoder>
    </appender>-->

    <!-- 日志输出级别 -->
    <root level="info">
        <appender-ref ref="FILE_HTML"/>
        <appender-ref ref="FILE_ERROR"/>
        <appender-ref ref="FILE"/>
        <appender-ref ref="STDOUT"/>
<!--        <appender-ref ref="STDOUT"/>-->
<!--        <appender-ref ref="SOCKET"/>-->
        <!--<appender-ref ref="LOGSTASH"/>-->

    </root>
    <!--自定义指定包的日志级别-->
    <logger name="com.me.logelk" level="DEBUG"/>
</configuration>

基于 shedlock 解决 定时任务 重复执行问题

xml
  <dependency>
            <groupId>net.javacrumbs.shedlock</groupId>
            <artifactId>shedlock-spring</artifactId>
            <version>4.2.0</version>
        </dependency>
        <dependency>
            <groupId>net.javacrumbs.shedlock</groupId>
            <artifactId>shedlock-provider-jdbc-template</artifactId>
            <version>4.2.0</version>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-jdbc</artifactId>
        </dependency>
java
import javax.annotation.Resource;
import javax.sql.DataSource;
@Component
public class ShedLockConfig {
    @Resource
    private DataSource dataSource;

    @Bean
    private LockProvider lockProvider() {
        return new JdbcTemplateLockProvider(dataSource);
    }
}
java
@SpringBootApplication
@MapperScan("com.me.mapper")
@EnableScheduling
@EnableSchedulerLock(defaultLockAtMostFor = "PT1S")
public class App {
    public static void main(String[] args) {
        SpringApplication.run(App.class,args);
    }
}
java
package com.me.task;

import cn.hutool.core.date.DateUtil;
import com.me.entity.Balance;
import com.me.entity.Lock;
import com.me.entity.Log;
import com.me.mapper.BalanceMapper;
import com.me.mapper.LockMapper;
import com.me.mapper.LogMapper;
import com.me.util.LockUtil;
import net.javacrumbs.shedlock.spring.annotation.SchedulerLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.Date;


@Component
public class AutoTask {

    @Autowired
    BalanceMapper mapper;




    @Value("${task.ip}")
    private String ip ;

    Logger logger = LoggerFactory.getLogger(AutoTask.class);

    /**  指定定时任务 cron表达式 */
   @Scheduled(cron = "${task.cron}")
   /**
    *  name  分布式锁名称
    */
   @SchedulerLock(name = "balance", lockAtMostFor = "PT1S", lockAtLeastFor = "PT1S")
   public void task() {
       /**
        *   方法体内部编写  定时任务的逻辑
        * 
        */

       try {

               Date date = new Date();
               String format = DateUtil.format(date, "yyyy-MM-dd HH:mm:ss");
               logger.info("开始执行定时任务 时间: {}" ,format);
               Balance balance = mapper.selectById(1);
               if (balance!= null) {

                   balance.setBalance(balance.getBalance().add(new java.math.BigDecimal(100)));
                   balance.setUpdateTime(new Date());
                   balance.setVersion( balance.getVersion() + 1);
                   mapper.updateById(balance);
               }
            


       }catch (Exception e){
           logger.info("定时任务已执行 不能重复执行");
           e.printStackTrace();
       }finally {

       }

   }
}
sql
CREATE TABLE shedlock(
    name VARCHAR(64) ,
    lock_until TIMESTAMP(3) NULL,
    locked_at TIMESTAMP(3) NULL,
    locked_by  VARCHAR(255),
    PRIMARY KEY (name)
)

jacoco 生成 测试覆盖率报告 pom.xml

xml
 <plugin>
                <groupId>org.jacoco</groupId>
                <artifactId>jacoco-maven-plugin</artifactId>
                <version>0.8.11</version>

                <configuration>
                    <includes>
                        <include>com/me/workflow/controller/*</include>
                    </includes>
                </configuration>
                <executions>
                    <execution>
                        <id>pre-test</id>
                        <goals>
                            <goal>prepare-agent</goal>
                        </goals>
                    </execution>
                    <execution>
                        <id>post-test</id>
                        <phase>test</phase>
                        <goals>
                            <goal>report</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>

mybatis 整合 ehcache

xml
pom.xml
 <!-- ehcache依赖 -->
            <dependency>
                <groupId>org.mybatis.caches</groupId>
                <artifactId>mybatis-ehcache</artifactId>
                <version>1.1.0</version>
            </dependency>
xml
<?xml version="1.0" encoding="UTF-8"?>
<ehcache xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:noNamespaceSchemaLocation="ehcache.xsd" updateCheck="true"
         monitoring="autodetect" dynamicConfig="true">

    <!-- 指定数据在磁盘中的存储位置 -->
    <diskStore path="/opt/tmp" />

    <!-- 缓存策略  -->
    <defaultCache
            maxElementsInMemory="1000"
            maxElementsOnDisk="10000000"
            eternal="false"
            overflowToDisk="false"
            timeToIdleSeconds="120"
            timeToLiveSeconds="120"
            diskExpiryThreadIntervalSeconds="120"
            memoryStoreEvictionPolicy="LRU">
    </defaultCache>
</ehcache>
xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE configuration PUBLIC "-//mybatis.org//DTD Config 3.0//EN" "http://mybatis.org/dtd/mybatis-3-config.dtd">
<configuration>
    <settings>
        <setting name="logImpl" value="STDOUT_LOGGING"/>
    <!--开启二级缓存 配置-->    
        <setting name="cacheEnabled" value="true"/>
        <setting name="defaultExecutorType" value="SIMPLE"/>
    </settings>
    
    <!--配置环境-->
    <environments default="mysql">
        <!--配置Mysql的环境-->
        <environment id="mysql">
            <!--配置事务的类型-->
            <transactionManager type="JDBC"></transactionManager>
            <!--配置数据源(连接池)-->
            <dataSource type="POOLED">
                <!--配置连接数据库的四个基本信息-->
                <property name="driver" value="com.mysql.jdbc.Driver" />
                <property name="url" value="jdbc:mysql://localhost:3306/test?useSSL=false" />
                <property name="username" value="root" />
                <property name="password" value="root" />
            </dataSource>
        </environment>
        <environment id="pg">
            <!--配置事务的类型-->
            <transactionManager type="JDBC"></transactionManager>
            <!--配置数据源(连接池)-->
            <dataSource type="POOLED">
                <!--配置连接数据库的四个基本信息-->
                <property name="driver" value="org.postgresql.Driver" />
                <property name="url" value="jdbc:postgresql://127.0.0.1:5432/postgres" />
                <property name="username" value="postgres" />
                <property name="password" value="123456" />
            </dataSource>
        </environment>
    </environments>

    <mappers>
        <mapper resource="mybatis/mapper/UsersMapper.xml" />
    </mappers>

</configuration>

springboot 统一异常处理

java
package com.me.lockydemo.handler;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;

import java.util.Map;

/**
 *  统一异常处理 
 * 
 */
@RestControllerAdvice
public class LogAspect {

    Logger logger = LoggerFactory.getLogger(LogAspect.class);


    @ExceptionHandler(Exception.class)
    public ResponseEntity<Map<String , Object>> error(Exception e){
        logger.error("error:",e);
        Map<String, Object> map = null;
        map.put("code", 500);
        map.put("msg",e.getMessage());
        return ResponseEntity.ok(map);
    }
}

springboot aop 环绕通知 打印 日志

引入依赖

xml
 <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-aop</artifactId>
</dependency>
java
package com.me.lockydemo.handler;

import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;

import java.util.Arrays;

@Aspect
@Component
public class LockAspect {

    Logger logger = LoggerFactory.getLogger(LockAspect.class);

    /**
     *  切面
     * @return
     */
    @Pointcut
    public void lockPoint() {
    }

    /**
     *
     *  环绕 通知  打印  请求参数 和日志信息
     * @param joinPoint
     * @return
     * @throws Throwable
     */
    @Around("execution(* com.me.lockydemo.controller.*.*(..))")
    public Object logBeforeMethod(ProceedingJoinPoint joinPoint) throws Throwable {

        try {
            ServletRequestAttributes requestAttributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
            if (requestAttributes != null) {
                logger.info("请求地址:" + requestAttributes.getRequest().getRequestURL().toString());
                logger.info("query请求参数:" + requestAttributes.getRequest().getQueryString());
                logger.info("请求method:" + requestAttributes.getRequest().getInputStream());
            }
            logger.info("方法名称:" + joinPoint.getTarget().getClass().getName() + "." + joinPoint.getSignature().getName());
            logger.info("请求参数:" + Arrays.toString( joinPoint.getArgs()));
            Object proceed = joinPoint.proceed();
            return proceed;
        } catch (Throwable e){
            throw   e;
        }
    }
}

spring cloud open feign header 参数传递

consumer 配置 以下内容

java
package com.me.consumer.config;

import feign.RequestInterceptor;
import feign.RequestTemplate;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;

import javax.servlet.http.HttpServletRequest;
import java.util.Enumeration;

/**
 *  feign 传递  header 参数
 */
@Configuration
public class FeignRequestConfiguration {


    @Bean
    public RequestInterceptor requestInterceptor() {
        return new RequestInterceptor() {
            @Override
            public void apply(RequestTemplate template) {
                /**
                 *   从 consumer 获取 请求header参数    透传到 provider
                 */
                RequestAttributes requestAttributes = RequestContextHolder.getRequestAttributes();
                if (requestAttributes != null) {
                    ServletRequestAttributes servletRequestAttributes = (ServletRequestAttributes) requestAttributes;
                    if (servletRequestAttributes!=null){
                        HttpServletRequest request = servletRequestAttributes.getRequest();

                        Enumeration<String> headerNames = request.getHeaderNames();
                        if (headerNames != null) {
                            while (headerNames.hasMoreElements()) {
                                String name = headerNames.nextElement();
                                String values = request.getHeader(name);
                                template.header(name, values);
                            }
                        }
                    }
                }

            }
        }
        ;
    }
}

springboot prometheus 整合

xml
 <dependency>
            <groupId>io.micrometer</groupId>
            <artifactId>micrometer-registry-prometheus</artifactId>
            <version>1.9.0</version>
        </dependency>

application.yml

yml
management:
  endpoints:
    web:
      exposure:
        include: "*"
  metrics:
    tags:
      application: ${spring.application.name}
      instance: ${spring.application.instance_id:${random.value}}
      service: ${spring.application.name}
      service-instance-id: ${spring.application.instance_id:${random.value}}
#      http://localhost:8081/actuator/prometheus
java
package com.me.stress1.controller;

import cn.hutool.core.date.TimeInterval;
import com.alibaba.excel.EasyExcel;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.me.stress1.domain.IdmsUser;
import com.me.stress1.mapper.IdmsUserMapper;
import com.me.stress1.service.IdmsUserService;
import io.micrometer.core.annotation.Timed;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Counter;
import org.apache.ibatis.cursor.Cursor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;

@RestController
public class IndexController {

    private Logger logger = LoggerFactory.getLogger(IndexController.class);

    @Timed("basic.createID")
    @RequestMapping("/generateId")
    public String createID(){

        System.out.println("fffffldlfdlfld");
        logger.info("createID");
        return UUID.randomUUID().toString();
    }


  
    @Autowired
    private MeterRegistry meterRegistry;
    private Counter counter;


    /**
     *
     *  使用  jmeter  进行压力测试使用
     *   默认 单笔   或2笔数据
     *   测试其相应速度
     *   qps 相关指标
     *
     * @param idmsUser
     * @return
     */
    @PostMapping("/add")
    public JSONObject add(@RequestBody List<IdmsUser> idmsUser){
        /**
         *  统计 调用次数
         */
        counter  = meterRegistry.counter("app_requests_method_count","method","com.me.stress1.controller.IndexController.add");

        /**
         * 
         *  业务逻辑
         */
        JSONObject object = new JSONObject();
        object.put("code",200);
        object.put("msg","ok");
        object.put("data",idmsUser);
        counter.increment();
        return object;
    }
   
}