第一種方法:使用insert into 插入
從Redis每次獲取100條數據,根據條件去插入到Mysql數據庫中:
條件:
如果當前隊列中的值大於1000條,則會自動的條用該方法,該方法每次獲取從隊列的頭部每次獲取100掉數據插入到Mysql數據庫中,同時以當前隊列的長度為插入條件。
1000為原始數據,從隊列頭獲取100條,插入到Mysql數據,同時刪除已經插入的數據,再通過隊列的長度判斷是否繼續插入,直到循環不滿足條件為止。
[1]獲取頭100條數據:$redis->lRange($liveKey,0,99)
[2]刪除頭100條數據:$redis->lTrim($liveKey, 100, -1);
[1]獲取當前隊列長度:$redis->lLen($liveKey);
public function redisSaveMysqlAction() { $liveKey = $this->request->getQuery('liveKey'); if(empty($liveKey)){ $result = array("errcode" => 500, "errmsg" => "this parameter is empty!"); return $this->toJson($result); } $redis = new \Redis(); $redis->connect('1.1.2.16', '6379'); $redisInfo = $redis->lRange($liveKey,0,99); $dataLength = $redis->lLen($liveKey); while($dataLength > 200) { try { $this->db->begin(); foreach ($redisInfo as $action) { $sql = "INSERT INTO livecomment (liveId,username,createTime,userId,content) VALUES (?, ? ,?,? ,?)"; $this->db->execute($sql, array( json_decode($action,true)['roomId'], json_decode($action,true)['userName'], json_decode($action,true)['createTime'], json_decode($action,true)['userId'], json_decode($action,true)['content'], )); } $redis->set('message_insert_success', '1'); $redis->lTrim($liveKey, 100, -1); $redisInfo = $redis->lRange($liveKey,0,99); // 這句也要重新的獲取,不然就會插入重復的數據,也就是獲取刪除后的數據 $dataLength = $redis->lLen($liveKey); //注意這句一定要加上的,做為下一次的判斷標准,當插入完后和刪除后,重新獲取列表的長度,作為條件依據 $redis->set('dataLength_backenk', $dataLength); $this->db->commit(); } catch (\Exception $e) { $redis->set('message_insert_fail', '0'); $this->db->rollback(); } } $redis->set('log'.$liveKey,$redis->incr('request_counts')); $result = array("errcode" => 200, "errmsg" => "Data Insert into Success!",'data'=>'dataLength:'.$dataLength.'liveKey:'.$liveKey); return $this->toJson($result);
第二種方法:使用優化SQL語句:將SQL語句進行拼接,使用 insert into table () values (),(),(),()然后再一次性插入,如果字符串太長,則需要配置下MYSQL,在mysql 命令行中運行 :set global max_allowed_packet = 2*1024*1024*10;
拼接后的字符串:
'insert into twenty_million (value) values('50'),('50'),('50'),('50'),('50'),('50'),('50'),('50'),('50'),('50')'
實際案例:
/** * 獲取Redis數據批量的保存到Redis中去解析Redis數據的json格式 */ public function RedisSaveToMysqlJsonAction() { $redis = RedisInstance::getInstance(); $redis->select(1); $redisInfo = $redis->lRange('message01',0,9999); $dataLength = $redis->lLen('message01'); $redis->set('dataLength_front',$dataLength); $t1=microtime(true); while($dataLength > 20000) { try { $this->db->begin(); $sql = "INSERT INTO stream_name (name,createTime,userId,content) VALUES"; foreach ($redisInfo as $action) { $sql .= "('" . json_decode($action, true)['userName'] . "', '" . json_decode($action, true)['createTime'] . "', '" . json_decode($action, true)['userId'] . "', '" . json_decode($action, true)['content'] . "'),"; } $sql = rtrim($sql, ','); $this->db->execute($sql); $redis->lTrim('message01', 10000, -1); $redisInfo = $redis->lRange('message01',0,9999); $dataLength = $redis->lLen('message01'); $this->db->commit(); } catch (\Exception $e) { $redis->set('message_catch', json_encode($e)); $this->db->rollback(); } } echo 'ENDTIME:'.(microtime(true)-$t1)."<BR/>"; echo 'success'; die; }
輸出結果為:
ENDTIME:3.0146479606628(s)
success