在用springboot 2.0集成disconf 后(參考博客其他文章),發現一個問題。只有啟動兩次,被disconf托管的配置才會生效。
之前springboot的配置基本都在application.properties中,需要將配置改成代碼方式。代碼方式的配置需要加一個@Configuration注解
@Configuration 注解是用於定義配置類,可替換xml配置文件,被注解的類內部包含有一個或多個被@Bean注解的方法,這些方法將會被AnnotationConfigApplicationContext或AnnotationConfigWebApplicationContext類進行掃描,並用於構建bean定義,初始化Spring容器。
1.Disconf配置( 替代之前的spring-disconf.xml)
@Configuration public class DisconfConfig { @Bean(destroyMethod = "destroy") public DisconfMgrBean getDisconfMgrBean() { DisconfMgrBean disconfMgrBean = new DisconfMgrBean(); disconfMgrBean.setScanPackage("com.sijibao.scfs"); return disconfMgrBean; } @Bean(destroyMethod = "destroy", initMethod = "init") public DisconfMgrBeanSecond getDisconfMgrBean2() { return new DisconfMgrBeanSecond(); } }
disconf.properties文件和之前一樣,保持不變
disconf配置中心需要建被托管的文件,如下面代碼就需要建dataSource.properties、redis.properties、kafka.properties
2.數據庫配置
@Component @DisconfFile(filename = "dataSource.properties") public class DataSourceConfig { private String url; private String username; private String password; private String driverClassName; private int initialSize; private int minIdle; private int maxActive; private int maxWait; private int timeBetweenEvictionRunsMillis; private int minEvictableIdleTimeMillis; private String validationQuery; private boolean testWhileIdle; private boolean testOnBorrow; private boolean testOnReturn; //private boolean poolPreparedStatements; //private int maxPoolPreparedStatementPerConnectionSize; //private String filters; // private String connectionProperties; private boolean useGlobalDataSourceStat; @DisconfFileItem(associateField = "url",name = "spring.datasource.url") public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } @DisconfFileItem(associateField = "username",name = "spring.datasource.username") public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } @DisconfFileItem(associateField = "password",name = "spring.datasource.password") public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } @DisconfFileItem(associateField = "driverClassName",name = "spring.datasource.driver-class-name") public String getDriverClassName() { return driverClassName; } public void setDriverClassName(String driverClassName) { this.driverClassName = driverClassName; } @DisconfFileItem(associateField = "initialSize",name = "spring.datasource.druid.initialSize") public int getInitialSize() { return initialSize; } public void setInitialSize(int initialSize) { this.initialSize = initialSize; } @DisconfFileItem(associateField = "minIdle",name = "spring.datasource.druid.minIdle") public int getMinIdle() { return minIdle; } public void setMinIdle(int minIdle) { this.minIdle = minIdle; } @DisconfFileItem(associateField = "maxActive",name = "spring.datasource.druid.maxActive") public int getMaxActive() { return maxActive; } public void setMaxActive(int maxActive) { this.maxActive = maxActive; } @DisconfFileItem(associateField = "maxWait",name = "spring.datasource.druid.maxWait") public int getMaxWait() { return maxWait; } public void setMaxWait(int maxWait) { this.maxWait = maxWait; } @DisconfFileItem(associateField = "timeBetweenEvictionRunsMillis",name = "spring.datasource.druid.timeBetweenEvictionRunsMillis") public int getTimeBetweenEvictionRunsMillis() { return timeBetweenEvictionRunsMillis; } public void setTimeBetweenEvictionRunsMillis(int timeBetweenEvictionRunsMillis) { this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis; } @DisconfFileItem(associateField = "minEvictableIdleTimeMillis",name = "spring.datasource.druid.minEvictableIdleTimeMillis") public int getMinEvictableIdleTimeMillis() { return minEvictableIdleTimeMillis; } public void setMinEvictableIdleTimeMillis(int minEvictableIdleTimeMillis) { this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis; } @DisconfFileItem(associateField = "validationQuery",name = "spring.datasource.druid.validationQuery") public String getValidationQuery() { return validationQuery; } public void setValidationQuery(String validationQuery) { this.validationQuery = validationQuery; } @DisconfFileItem(associateField = "testWhileIdle",name = "spring.datasource.druid.testWhileIdle") public boolean isTestWhileIdle() { return testWhileIdle; } public void setTestWhileIdle(boolean testWhileIdle) { this.testWhileIdle = testWhileIdle; } @DisconfFileItem(associateField = "testOnBorrow",name = "spring.datasource.druid.testOnBorrow") public boolean isTestOnBorrow() { return testOnBorrow; } public void setTestOnBorrow(boolean testOnBorrow) { this.testOnBorrow = testOnBorrow; } @DisconfFileItem(associateField = "testOnReturn",name = "spring.datasource.druid.testOnReturn") public boolean isTestOnReturn() { return testOnReturn; } public void setTestOnReturn(boolean testOnReturn) { this.testOnReturn = testOnReturn; } // @DisconfFileItem(associateField = "poolPreparedStatements",name = "redis.host") // public boolean isPoolPreparedStatements() { // return poolPreparedStatements; // } // // public void setPoolPreparedStatements(boolean poolPreparedStatements) { // this.poolPreparedStatements = poolPreparedStatements; // } // @DisconfFileItem(associateField = "maxPoolPreparedStatementPerConnectionSize",name = "") // public int getMaxPoolPreparedStatementPerConnectionSize() { // return maxPoolPreparedStatementPerConnectionSize; // } // // public void setMaxPoolPreparedStatementPerConnectionSize(int maxPoolPreparedStatementPerConnectionSize) { // this.maxPoolPreparedStatementPerConnectionSize = maxPoolPreparedStatementPerConnectionSize; // } // @DisconfFileItem(associateField = "filters",name = "") // public String getFilters() { // return filters; // } // // public void setFilters(String filters) { // this.filters = filters; // } // @DisconfFileItem(associateField = "connectionProperties",name = "redis.host") // public String getConnectionProperties() { // return connectionProperties; // } // // public void setConnectionProperties(String connectionProperties) { // this.connectionProperties = connectionProperties; // } @DisconfFileItem(associateField = "useGlobalDataSourceStat",name = "redis.host") public boolean isUseGlobalDataSourceStat() { return useGlobalDataSourceStat; } public void setUseGlobalDataSourceStat(boolean useGlobalDataSourceStat) { this.useGlobalDataSourceStat = useGlobalDataSourceStat; } }
@Configuration @AutoConfigureAfter(DisconfConfig.class) public class DataSourceConfiguration { public final static org.slf4j.Logger logger = LoggerFactory.getLogger(DataSourceConfiguration.class); @Autowired private DataSourceConfig dataSourceConfig; @Bean //聲明其為Bean實例 @Primary //在同樣的DataSource中,首先使用被標注的DataSource public DataSource dataSource(){ logger.info("初始化數據庫連接池"); DruidDataSource datasource = new DruidDataSource(); datasource.setUrl(dataSourceConfig.getUrl()); datasource.setUsername(dataSourceConfig.getUsername()); datasource.setPassword(dataSourceConfig.getPassword()); datasource.setDriverClassName(dataSourceConfig.getDriverClassName()); //configuration datasource.setInitialSize(dataSourceConfig.getInitialSize()); datasource.setMinIdle(dataSourceConfig.getMinIdle()); datasource.setMaxActive(dataSourceConfig.getMaxActive()); datasource.setMaxWait(dataSourceConfig.getMaxWait()); datasource.setTimeBetweenEvictionRunsMillis(dataSourceConfig.getTimeBetweenEvictionRunsMillis()); datasource.setMinEvictableIdleTimeMillis(dataSourceConfig.getMinEvictableIdleTimeMillis()); datasource.setValidationQuery(dataSourceConfig.getValidationQuery()); datasource.setTestWhileIdle(dataSourceConfig.isTestWhileIdle()); datasource.setTestOnBorrow(dataSourceConfig.isTestOnReturn()); datasource.setTestOnReturn(dataSourceConfig.isTestOnReturn()); // datasource.setPoolPreparedStatements(poolPreparedStatements); // datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize); // datasource.setUseGlobalDataSourceStat(useGlobalDataSourceStat); // try { // datasource.setFilters(filters); // } catch (SQLException e) { // System.err.println("druid configuration initialization filter: "+ e); // } // datasource.setConnectionProperties(connectionProperties); logger.info("初始化數據庫連接池完成"); return datasource; } }
3.redis配置
@Component @DisconfFile(filename = "redis.properties") public class RedisConfig { private String host; private int port; private int timeout; private int maxIdle; private long maxWaitMillis; private String password; @DisconfFileItem(associateField = "host",name = "spring.redis.host") public String getHost() { return host; } public void setHost(String host) { this.host = host; } @DisconfFileItem(associateField = "port",name = "spring.redis.port") public int getPort() { return port; } public void setPort(int port) { this.port = port; } @DisconfFileItem(associateField = "timeout",name = "spring.redis.timeout") public int getTimeout() { return timeout; } public void setTimeout(int timeout) { this.timeout = timeout; } @DisconfFileItem(associateField = "maxIdle",name = "spring.redis.jedis.pool.max-idle") public int getMaxIdle() { return maxIdle; } public void setMaxIdle(int maxIdle) { this.maxIdle = maxIdle; } @DisconfFileItem(associateField = "maxWaitMillis",name = "spring.redis.jedis.pool.max-wait") public long getMaxWaitMillis() { return maxWaitMillis; } public void setMaxWaitMillis(long maxWaitMillis) { this.maxWaitMillis = maxWaitMillis; } @DisconfFileItem(associateField = "password",name = "spring.redis.password") public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } }
4.kafka配置
@Component @DisconfFile(filename = "kfaka.properties") public class KafkaConfig { private String producerServers; private int producerRetries; private int producerBatchSize; private int producerBufferMemory; private String consumerServers; private String consumerGroupId; private String consumerAutoOffsetReset; private String consumerEnableAutoCommit; private String consumerAutoCommitInterval; // 指定消息key和消息體的編解碼方式 private String consumerKeyDeserializer; private String consumerValueDeserializer; @DisconfFileItem(associateField = "producerServers",name = "spring.kafka.bootstrap-servers") public String getProducerServers() { return producerServers; } public void setProducerServers(String producerServers) { this.producerServers = producerServers; } @DisconfFileItem(associateField = "producerRetries",name = "spring.kafka.producer.retries") public int getProducerRetries() { return producerRetries; } public void setProducerRetries(int producerRetries) { this.producerRetries = producerRetries; } @DisconfFileItem(associateField = "producerBatchSize",name = "spring.kafka.producer.batch-size") public int getProducerBatchSize() { return producerBatchSize; } public void setProducerBatchSize(int producerBatchSize) { this.producerBatchSize = producerBatchSize; } @DisconfFileItem(associateField = "producerBufferMemory",name = "spring.kafka.producer.buffer-memory") public int getProducerBufferMemory() { return producerBufferMemory; } public void setProducerBufferMemory(int producerBufferMemory) { this.producerBufferMemory = producerBufferMemory; } @DisconfFileItem(associateField = "consumerServers",name = "spring.kafka.consumer.bootstrap-servers") public String getConsumerServers() { return consumerServers; } public void setConsumerServers(String consumerServers) { this.consumerServers = consumerServers; } @DisconfFileItem(associateField = "consumerGroupId",name = "spring.kafka.consumer.group-id") public String getConsumerGroupId() { return consumerGroupId; } public void setConsumerGroupId(String consumerGroupId) { this.consumerGroupId = consumerGroupId; } @DisconfFileItem(associateField = "consumerAutoOffsetReset",name = "spring.kafka.consumer.auto-offset-reset") public String getConsumerAutoOffsetReset() { return consumerAutoOffsetReset; } public void setConsumerAutoOffsetReset(String consumerAutoOffsetReset) { this.consumerAutoOffsetReset = consumerAutoOffsetReset; } @DisconfFileItem(associateField = "consumerEnableAutoCommit",name = "spring.kafka.consumer.enable-auto-commit") public String getConsumerEnableAutoCommit() { return consumerEnableAutoCommit; } public void setConsumerEnableAutoCommit(String consumerEnableAutoCommit) { this.consumerEnableAutoCommit = consumerEnableAutoCommit; } @DisconfFileItem(associateField = "consumerAutoCommitInterval",name = "spring.kafka.consumer.auto-commit-interval") public String getConsumerAutoCommitInterval() { return consumerAutoCommitInterval; } public void setConsumerAutoCommitInterval(String consumerAutoCommitInterval) { this.consumerAutoCommitInterval = consumerAutoCommitInterval; } @DisconfFileItem(associateField = "consumerKeyDeserializer",name = "spring.kafka.consumer.key-deserializer") public String getConsumerKeyDeserializer() { return consumerKeyDeserializer; } public void setConsumerKeyDeserializer(String consumerKeyDeserializer) { this.consumerKeyDeserializer = consumerKeyDeserializer; } @DisconfFileItem(associateField = "consumerValueDeserializer",name = "spring.kafka.consumer.value-deserializer") public String getConsumerValueDeserializer() { return consumerValueDeserializer; } public void setConsumerValueDeserializer(String consumerValueDeserializer) { this.consumerValueDeserializer = consumerValueDeserializer; } }
@Configuration @AutoConfigureAfter(DisconfConfig.class) public class KafkaProducerConfig { @Autowired private KafkaConfig kafkaConfig; public Map<String, Object> producerConfigs() { Map<String, Object> props = new HashMap<>(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConfig.getProducerServers()); props.put(ProducerConfig.RETRIES_CONFIG, kafkaConfig.getProducerRetries()); props.put(ProducerConfig.BATCH_SIZE_CONFIG, kafkaConfig.getProducerBatchSize()); // props.put(ProducerConfig.LINGER_MS_CONFIG, kafkaConfig.get); props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, kafkaConfig.getProducerBufferMemory()); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); return props; } public ProducerFactory<String, String> producerFactory() { return new DefaultKafkaProducerFactory<>(producerConfigs()); } @Bean public KafkaTemplate<String, String> kafkaTemplate() { return new KafkaTemplate<String, String>(producerFactory()); } }
@Configuration @AutoConfigureAfter(DisconfConfig.class) public class KafkaConsumerConfig { @Autowired private KafkaConfig kafkaConfig; @Bean public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() { ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>(); factory.setConsumerFactory(consumerFactory()); // factory.setConcurrency(); factory.getContainerProperties().setPollTimeout(1500); return factory; } public ConsumerFactory<String, String> consumerFactory() { return new DefaultKafkaConsumerFactory<>(consumerConfigs()); } public Map<String, Object> consumerConfigs() { Map<String, Object> propsMap = new HashMap<>(); // propsMap.put("zookeeper.connect", "master1.hdp.com:2181,master2.hdp.com:2181,slave1.hdp.com:2181"); propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConfig.getConsumerServers()); // propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, ); propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, kafkaConfig.getConsumerAutoCommitInterval()); //propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, kafkaConfig); propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaConfig.getConsumerGroupId()); propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, kafkaConfig.getConsumerAutoOffsetReset()); return propsMap; } }
5.這樣再啟動就不需要啟動兩次了。如果有其他配置需要交給disconf托管,也需要寫類似上面的配置代碼。