作为一个php开发猿来说,文件上传是常见的问题。在处理小文件上传的时候还得心应手,可能在面对大文件几百M或者上G的文件,如果这时候还使用对待小文件的处理方式,这个时候会不会出现问题呢?如果文件很大上传过程中超时或者上传过程中断电!此时就需要我们提供断点续传功能了。php猿们考虑一下,该如何实现该功能呢?别着急,本博文宅鸟根据实际项目中的经历提供一个切实可行的解决方案。一共大家参考!废话不多说,直上干货。
Resumable.js是一个JavaScript库,通过HTML5 File API来为应用加入多文件同步上传、稳定传输和断点续传功能。该库在HTTP上传大型文件的过程中加入了容错系统,并把每个文件分成小块,在文件上传失败时,只重新上传失败的部分,同时还允许在网络连接中断恢复后,自动恢复文件的上传。此外,该库还允许用户暂停、恢复、重新上传文件。
Resumable.js除了HTML5 FILE API(用于将文件分割成小块)外,不依赖任何其它的库。
关于Resumable.js的详细介绍请看官方文档.
* http://www.23developer.com/opensource
* http://github.com/23/resumable.js
* Steffen Tiedemann Christensen, steffen@23company.com
var Resumable = function(opts){
if ( !(this instanceof Resumable ) ) {
return new Resumable( opts );
// Check if these features are support by the browser:
// - FileList object type
(typeof(File)!=='undefined')
(typeof(Blob)!=='undefined')
(typeof(FileList)!=='undefined')
(!!Blob.prototype.webkitSlice||!!Blob.prototype.mozSlice||Blob.prototype.slice||false)
if(!this.support) return(false);
fileParameterName:'file',
throttleProgressCallbacks:0.5,
prioritizeFirstAndLastChunk:false,
generateUniqueIdentifier:null,
maxChunkRetries:undefined,
chunkRetryInterval:undefined,
permanentErrors:[415, 500, 501],
maxFilesErrorCallback:function (files, errorCount) {
var maxFiles = $.getOpt('maxFiles');
alert('Please upload ' + maxFiles + ' file' + (maxFiles === 1 ? '' : 's') + ' at a time.');
minFileSizeErrorCallback:function(file, errorCount) {
alert(file.fileName +' is too small, please upload files larger than ' + $h.formatSize($.getOpt('minFileSize')) + '.');
maxFileSizeErrorCallback:function(file, errorCount) {
alert(file.fileName +' is too large, please upload files less than ' + $h.formatSize($.getOpt('maxFileSize')) + '.');
fileTypeErrorCallback: function(file, errorCount) {
alert(file.fileName +' has type not allowed, please upload files of type ' + $.getOpt('fileType') + '.');
// Get multiple option if passed an array
$h.each(o, function(option){
options[option] = $this.getOpt(option);
// Otherwise, just return a simple option
if ($this instanceof ResumableChunk) {
if (typeof $this.opts[o] !== 'undefined') { return $this.opts[o]; }
else { $this = $this.fileObj; }
if ($this instanceof ResumableFile) {
if (typeof $this.opts[o] !== 'undefined') { return $this.opts[o]; }
else { $this = $this.resumableObj; }
if ($this instanceof Resumable) {
if (typeof $this.opts[o] !== 'undefined') { return $this.opts[o]; }
else { return $this.defaults[o]; }
// fileSuccess(file), fileProgress(file), fileAdded(file, event), fileRetry(file), fileError(file, message),
// complete(), progress(), error(message, file), pause()
$.on = function(event,callback){
$.events.push(event.toLowerCase(), callback);
// `arguments` is an object, not array, in FF, so:
for (var i=0; i<arguments.length; i++) args.push(arguments[i]);
// Find event listeners, and support pseudo-event `catchAll`
var event = args[0].toLowerCase();
for (var i=0; i<=$.events.length; i+=2) {
if($.events[i]==event) $.events[i+1].apply($,args.slice(1));
if($.events[i]=='catchall') $.events[i+1].apply(null,args);
if(event=='fileerror') $.fire('error', args[2], args[1]);
if(event=='fileprogress') $.fire('progress');
// INTERNAL HELPER METHODS (handy, but ultimately not part of uploading)
each: function(o,callback){
if(typeof(o.length)!=='undefined') {
for (var i=0; i<o.length; i++) {
if(callback(o[i])===false) return;
if(callback(i,o[i])===false) return;
generateUniqueIdentifier:function(file){
var custom = $.getOpt('generateUniqueIdentifier');
if(typeof custom === 'function') {
var relativePath = file.webkitRelativePath||file.fileName||file.name; // Some confusion in different versions of Firefox
return(size + '-' + relativePath.replace(/[^0-9a-zA-Z_-]/img, ''));
contains:function(array,test) {
$h.each(array, function(value) {
formatSize:function(size){
} else if(size<1024*1024) {
return (size/1024.0).toFixed(0) + ' KB';
} else if(size<1024*1024*1024) {
return (size/1024.0/1024.0).toFixed(1) + ' MB';
return (size/1024.0/1024.0/1024.0).toFixed(1) + ' GB';
var onDrop = function(event){
appendFilesFromFileList(event.dataTransfer.files, event);
var onDragOver = function(e) {
// INTERNAL METHODS (both handy and responsible for the heavy load)
var appendFilesFromFileList = function(fileList, event){
// check for uploading too many files
var o = $.getOpt(['maxFiles', 'minFileSize', 'maxFileSize', 'maxFilesErrorCallback', 'minFileSizeErrorCallback', 'maxFileSizeErrorCallback', 'fileType', 'fileTypeErrorCallback']);
if (typeof(o.maxFiles)!=='undefined' && o.maxFiles<(fileList.length+$.files.length)) {
o.maxFilesErrorCallback(fileList, errorCount++);
$h.each(fileList, function(file){
file.name = file.fileName = file.fileName||file.name; // consistency across browsers for the error message
if (o.fileType.length > 0 && !$h.contains(o.fileType, file.type.split('/')[1])) {
o.fileTypeErrorCallback(file, errorCount++);
if (typeof(o.minFileSize)!=='undefined' && file.size<o.minFileSize) {
o.minFileSizeErrorCallback(file, errorCount++);
if (typeof(o.maxFileSize)!=='undefined' && file.size>o.maxFileSize) {
o.maxFileSizeErrorCallback(file, errorCount++);
// directories have size == 0
if (file.size > 0 && !$.getFromUniqueIdentifier($h.generateUniqueIdentifier(file))) {
var f = new ResumableFile($, file);
$.fire('fileAdded', f, event);
$.fire('filesAdded', files);
function ResumableFile(resumableObj, file){
$.getOpt = resumableObj.getOpt;
$.resumableObj = resumableObj;
$.fileName = file.fileName||file.name; // Some confusion in different versions of Firefox
$.relativePath = file.webkitRelativePath || $.fileName;
$.uniqueIdentifier = $h.generateUniqueIdentifier(file);
// Callback when something happens within the chunk
var chunkEvent = function(event, message){
// event can be 'progress', 'success', 'error' or 'retry'
$.resumableObj.fire('fileProgress', $);
$.resumableObj.fire('fileError', $, message);
$.resumableObj.fire('fileProgress', $); // it's at least progress
$.resumableObj.fire('fileSuccess', $, message);
$.resumableObj.fire('fileRetry', $);
// Main code to set up a file object with chunks,
// packaged to be able to handle retries if needed.
$h.each($.chunks, function(c){
if(c.status()=='uploading') c.abort();
$.resumableObj.fire('fileProgress', $);
// Reset this file to be void
$h.each(_chunks, function(c){
if(c.status()=='uploading') {
$.resumableObj.uploadNextChunk();
$.resumableObj.removeFile($);
$.resumableObj.fire('fileProgress', $);
$.bootstrap = function(){
// Rebuild stack of chunks from file
var round = $.getOpt('forceChunkSize') ? Math.ceil : Math.floor;
for (var offset=0; offset<Math.max(round($.file.size/$.getOpt('chunkSize')),1); offset++) {
$.chunks.push(new ResumableChunk($.resumableObj, $, offset, chunkEvent));
// Sum up progress across everything
$h.each($.chunks, function(c){
if(c.status()=='error') error = true;
ret += c.progress(true); // get chunk progress relative to entire file
ret = (error ? 1 : (ret>0.999 ? 1 : ret))
ret = Math.max($._prevProgress, ret); // We don't want to lose percentages when an upload is paused
function ResumableChunk(resumableObj, fileObj, offset, callback){
$.getOpt = resumableObj.getOpt;
$.resumableObj = resumableObj;
$.fileObjSize = fileObj.size;
$.lastProgressCallback = (new Date);
$.preprocessState = 0; // 0 = unprocessed, 1 = processing, 2 = finished
var chunkSize = $.getOpt('chunkSize');
$.startByte = $.offset*chunkSize;
$.endByte = Math.min($.fileObjSize, ($.offset+1)*chunkSize);
if ($.fileObjSize-$.endByte < chunkSize && !$.getOpt('forceChunkSize')) {
// The last chunk will be bigger than the chunk size, but less than 2*chunkSize
$.endByte = $.fileObjSize;
// test() makes a GET request without any data to see if the chunk has already been uploaded in a previous session
// Set up request and listen for event
$.xhr = new XMLHttpRequest();
var testHandler = function(e){
$.callback(status, $.message());
$.resumableObj.uploadNextChunk();
$.xhr.addEventListener("load", testHandler, false);
$.xhr.addEventListener("error", testHandler, false);
// Add data from the query options
var customQuery = $.getOpt('query');
if(typeof customQuery == "function") customQuery = customQuery($.fileObj, $);
$h.each(customQuery, function(k,v){
params.push([encodeURIComponent(k), encodeURIComponent(v)].join('='));
// Add extra data to identify chunk
params.push(['resumableChunkNumber', encodeURIComponent($.offset+1)].join('='));
params.push(['resumableChunkSize', encodeURIComponent($.getOpt('chunkSize'))].join('='));
params.push(['resumableCurrentChunkSize', encodeURIComponent($.endByte - $.startByte)].join('='));
params.push(['resumableTotalSize', encodeURIComponent($.fileObjSize)].join('='));
params.push(['resumableIdentifier', encodeURIComponent($.fileObj.uniqueIdentifier)].join('='));
params.push(['resumableFilename', encodeURIComponent($.fileObj.fileName)].join('='));
params.push(['resumableRelativePath', encodeURIComponent($.fileObj.relativePath)].join('='));
// Append the relevant chunk and send it
$.xhr.open("GET", $.getOpt('target') + '?' + params.join('&'));
// Add data from header options
$h.each($.getOpt('headers'), function(k,v) {
$.xhr.setRequestHeader(k, v);
$.preprocessFinished = function(){
// send() uploads the actual data in a POST call
var preprocess = $.getOpt('preprocess');
if(typeof preprocess === 'function') {
switch($.preprocessState) {
case 0: preprocess($); $.preprocessState = 1; return;
if($.getOpt('testChunks') && !$.tested) {
// Set up request and listen for event
$.xhr = new XMLHttpRequest();
$.xhr.upload.addEventListener("progress", function(e){
if( (new Date) - $.lastProgressCallback > $.getOpt('throttleProgressCallbacks') * 1000 ) {
$.lastProgressCallback = (new Date);
// Done (either done, failed or retry)
var doneHandler = function(e){
if(status=='success'||status=='error') {
$.callback(status, $.message());
$.resumableObj.uploadNextChunk();
$.callback('retry', $.message());
var retryInterval = $.getOpt('chunkRetryInterval');
if(retryInterval !== undefined) {
setTimeout($.send, retryInterval);
$.xhr.addEventListener("load", doneHandler, false);
$.xhr.addEventListener("error", doneHandler, false);
// Set up the basic query data from Resumable
resumableChunkNumber: $.offset+1,
resumableChunkSize: $.getOpt('chunkSize'),
resumableCurrentChunkSize: $.endByte - $.startByte,
resumableTotalSize: $.fileObjSize,
resumableIdentifier: $.fileObj.uniqueIdentifier,
resumableFilename: $.fileObj.fileName,
resumableRelativePath: $.fileObj.relativePath
var customQuery = $.getOpt('query');
if(typeof customQuery == "function") customQuery = customQuery($.fileObj, $);
$h.each(customQuery, function(k,v){
// Add data from header options
$h.each($.getOpt('headers'), function(k,v) {
$.xhr.setRequestHeader(k, v);
var func = ($.fileObj.file.slice ? 'slice' : ($.fileObj.file.mozSlice ? 'mozSlice' : ($.fileObj.file.webkitSlice ? 'webkitSlice' : 'slice'))),
bytes = $.fileObj.file[func]($.startByte,$.endByte),
target = $.getOpt('target');
if ($.getOpt('method') === 'octet') {
// Add data from the query options
$h.each(query, function(k,v){
params.push([encodeURIComponent(k), encodeURIComponent(v)].join('='));
target += '?' + params.join('&');
// Add data from the query options
$h.each(query, function(k,v){
data.append($.getOpt('fileParameterName'), bytes);
$.xhr.open('POST', target);
// Returns: 'pending', 'uploading', 'success', 'error'
} else if($.xhr.readyState<4) {
// Status is really 'OPENED', 'HEADERS_RECEIVED' or 'LOADING' - meaning that stuff is happening
} else if($h.contains($.getOpt('permanentErrors'), $.xhr.status) || $.retries >= $.getOpt('maxChunkRetries')) {
// HTTP 415/500/501, permanent error
// this should never happen, but we'll reset and queue a retry
// a likely case for this would be 503 service unavailable
return($.xhr ? $.xhr.responseText : '');
$.progress = function(relative){
if(typeof(relative)==='undefined') relative = false;
var factor = (relative ? ($.endByte-$.startByte)/$.fileObjSize : 1);
return($.loaded/($.endByte-$.startByte)*factor);
$.uploadNextChunk = function(){
// In some cases (such as videos) it's really handy to upload the first
// and last chunk of a file quickly; this let's the server check the file's
// metadata and determine if there's even a point in continuing.
if ($.getOpt('prioritizeFirstAndLastChunk')) {
$h.each($.files, function(file){
if(file.chunks.length && file.chunks[0].status()=='pending' && file.chunks[0].preprocessState === 0) {
if(file.chunks.length>1 && file.chunks[file.chunks.length-1].status()=='pending' && file.chunks[0].preprocessState === 0) {
file.chunks[file.chunks.length-1].send();
// Now, simply look for the next, best thing to upload
$h.each($.files, function(file){
$h.each(file.chunks, function(chunk){
if(chunk.status()=='pending' && chunk.preprocessState === 0) {
// The are no more outstanding chunks to upload, check is everything is done
$h.each($.files, function(file){
$h.each(file.chunks, function(chunk){
var status = chunk.status();
if(status=='pending' || status=='uploading' || chunk.preprocessState === 1) {
if(outstanding) return(false);
// All chunks have been uploaded, complete
// PUBLIC METHODS FOR RESUMABLE.JS
$.assignBrowse = function(domNodes, isDirectory){
if(typeof(domNodes.length)=='undefined') domNodes = [domNodes];
// We will create an <input> and overlay it on the domNode
// (crappy, but since HTML5 doesn't have a cross-browser.browse() method we haven't a choice.
// FF4+ allows click() for this though: https://developer.mozilla.org/en/using_files_from_web_applications)
$h.each(domNodes, function(domNode) {
if(domNode.tagName==='INPUT' && domNode.type==='file'){
input = document.createElement('input');
input.setAttribute('type', 'file');
// Place <input /> with the dom node an position the input to fill the entire space
domNode.style.display = 'inline-block';
domNode.style.position = 'relative';
input.style.position = 'absolute';
input.style.top = input.style.left = input.style.bottom = input.style.right = 0;
input.style.cursor = 'pointer';
domNode.appendChild(input);
var maxFiles = $.getOpt('maxFiles');
if (typeof(maxFiles)==='undefined'||maxFiles!=1){
input.setAttribute('multiple', 'multiple');
input.removeAttribute('multiple');
input.setAttribute('webkitdirectory', 'webkitdirectory');
input.removeAttribute('webkitdirectory');
// When new files are added, simply append them to the overall list
input.addEventListener('change', function(e){
appendFilesFromFileList(e.target.files);
$.assignDrop = function(domNodes){
if(typeof(domNodes.length)=='undefined') domNodes = [domNodes];
$h.each(domNodes, function(domNode) {
domNode.addEventListener('dragover', onDragOver, false);
domNode.addEventListener('drop', onDrop, false);
$.unAssignDrop = function(domNodes) {
if (typeof(domNodes.length) == 'undefined') domNodes = [domNodes];
$h.each(domNodes, function(domNode) {
domNode.removeEventListener('dragover', onDragOver);
domNode.removeEventListener('drop', onDrop);
$.isUploading = function(){
$h.each($.files, function(file){
$h.each(file.chunks, function(chunk){
if(chunk.status()=='uploading') {
if(uploading) return(false);
// Make sure we don't start too many uploads at once
if($.isUploading()) return;
for (var num=1; num<=$.getOpt('simultaneousUploads'); num++) {
// Resume all chunks currently being uploaded
$h.each($.files, function(file){
$h.each($.files, function(file){
// Resume all chunks currently being uploaded
$h.each($.files, function(file){
totalDone += file.progress()*file.size;
return(totalSize>0 ? totalDone/totalSize : 0);
$.addFile = function(file){
appendFilesFromFileList([file]);
$.removeFile = function(file){
$h.each($.files, function(f,i){
if(f!==file) files.push(f);
$.getFromUniqueIdentifier = function(uniqueIdentifier){
$h.each($.files, function(f){
if(f.uniqueIdentifier==uniqueIdentifier) ret = f;
$h.each($.files, function(file){
<title>Resumable.js - Multiple simultaneous, stable and resumable uploads via the HTML5 File API</title>
<link rel="stylesheet" type="text/css" href="style.css" />
<p>It's a JavaScript library providing multiple simultaneous, stable and resumable uploads via the HTML5 File API.</p>
<p>The library is designed to introduce fault-tolerance into the upload of large files through HTTP. This is done by splitting each files into small chunks; whenever the upload of a chunk fails, uploading is retried until the procedure completes. This allows uploads to automatically resume uploading after a network connection is lost either locally or to the server. Additionally, it allows for users to pause and resume uploads without loosing state.</p>
<p>Resumable.js relies on the HTML5 File API and the ability to chunks files into smaller pieces. Currently, this means that support is limited to Firefox 4+ and Chrome 11+.</p>
<script src="jquery.min.js"></script>
<script src="resumable.js"></script>
<div class="resumable-error">
Your browser, unfortunately, is not supported by Resumable.js. The library requires support for <a href="http://www.w3.org/TR/FileAPI/">the HTML5 File API</a> along with <a href="http://www.w3.org/TR/FileAPI/#normalization-of-params">file slicing</a>.
<div class="resumable-drop" οndragenter="jQuery(this).addClass('resumable-dragover');" οndragend="jQuery(this).removeClass('resumable-dragover');" οndrοp="jQuery(this).removeClass('resumable-dragover');">
Drop video files here to upload or <a class="resumable-browse"><u>select from your computer</u></a>
<div class="resumable-progress">
<td width="100%"><div class="progress-container"><div class="progress-bar"></div></div></td>
<td class="progress-text" nowrap="nowrap"></td>
<td class="progress-pause" nowrap="nowrap">
<a href="#" οnclick="r.upload(); return(false);" class="progress-resume-link"><img src="resume.png" title="Resume upload" /></a>
<a href="#" οnclick="r.pause(); return(false);" class="progress-pause-link"><img src="pause.png" title="Pause upload" /></a>
<ul class="resumable-list"></ul>
throttleProgressCallbacks:1
// Resumable.js isn't supported, fall back on a different method
$('.resumable-error').show();
// Show a place for dropping/selecting files
$('.resumable-drop').show();
r.assignDrop($('.resumable-drop')[0]);
r.assignBrowse($('.resumable-browse')[0]);
r.on('fileAdded', function(file){
$('.resumable-progress, .resumable-list').show();
// Show pause, hide resume
$('.resumable-progress .progress-resume-link').hide();
$('.resumable-progress .progress-pause-link').show();
// Add the file to the list
$('.resumable-list').append('<li class="resumable-file-'+file.uniqueIdentifier+'">Uploading <span class="resumable-file-name"></span> <span class="resumable-file-progress"></span>');
$('.resumable-file-'+file.uniqueIdentifier+' .resumable-file-name').html(file.fileName);
// Actually start the upload
r.on('pause', function(){
// Show resume, hide pause
$('.resumable-progress .progress-resume-link').show();
$('.resumable-progress .progress-pause-link').hide();
r.on('complete', function(){
// Hide pause/resume when the upload has completed
$('.resumable-progress .progress-resume-link, .resumable-progress .progress-pause-link').hide();
r.on('fileSuccess', function(file,message){
// Reflect that the file upload has completed
$('.resumable-file-'+file.uniqueIdentifier+' .resumable-file-progress').html('(completed)');
r.on('fileError', function(file, message){
// Reflect that the file upload has resulted in error
$('.resumable-file-'+file.uniqueIdentifier+' .resumable-file-progress').html('(file could not be uploaded: '+message+')');
r.on('fileProgress', function(file){
// Handle progress for both the file and the overall upload
$('.resumable-file-'+file.uniqueIdentifier+' .resumable-file-progress').html(Math.floor(file.progress()*100) + '%');
$('.progress-bar').css({width:Math.floor(r.progress()*100) + '%'});
需要注意index.html文件中下面的js代码:
throttleProgressCallbacks:1
target:后端的处理文件上传的upload.php代码
chunkSize:文件分割成小片段的时候的大小,这里为1M
testChunks:在上传每一段小文件的时候是否先去服务器检查是否存在
php主要处理的是:接受每一段文件,并且保存在临时目录下,然后得所有文件上传结束后,合并成一个完整文件过程。
desc server side upload.php provide for resumable.js
$REQUEST_METHOD=$_SERVER['REQUEST_METHOD'];
if($REQUEST_METHOD == "GET")
$chunkNumber = $_GET['resumableChunkNumber'];
$chunkSize = $_GET['resumableChunkSize'];
$totalSize = $_GET['resumableTotalSize'];
$identifier = $_GET['resumableIdentifier'];
$filename = iconv ( 'UTF-8', 'GB2312', $_GET ['resumableFilename'] );
if(validateRequest($chunkNumber, $chunkSize, $totalSize, $identifier, $filename)=='valid')
$chunkFilename = getChunkFilename($chunkNumber, $identifier,$filename);
if(file_exists($chunkFilename)){
header("HTTP/1.0 404 Not Found");
header("HTTP/1.0 404 Not Found");
function getChunkFilename ($chunkNumber, $identifier,$filename){
$temp_dir = $uploads_dir.'/'.$identifier;
return $temp_dir.'/'.$filename.'.part'.$chunkNumber;
function cleanIdentifier ($identifier){
//return preg_replace('/^0-9A-Za-z_-/', '', $identifier);
//$maxFileSize = 2*1024*1024*1024;
function validateRequest ($chunkNumber, $chunkSize, $totalSize, $identifier, $filename, $fileSize=''){
// Clean up the identifier
//$identifier = cleanIdentifier($identifier);
// Check if the request is sane
if ($chunkNumber==0 || $chunkSize==0 || $totalSize==0 || $identifier==0 || $filename=="") {
return 'non_resumable_request';
$numberOfChunks = max(floor($totalSize/($chunkSize*1.0)), 1);
if ($chunkNumber>$numberOfChunks) {
return 'invalid_resumable_request1';
// if($maxFileSize && $totalSize>$maxFileSize) {
// return 'invalid_resumable_request2';
if($chunkNumber<$numberOfChunks && $fileSize!=$chunkSize) {
// The chunk in the POST request isn't the correct size
return 'invalid_resumable_request3';
if($numberOfChunks>1 && $chunkNumber==$numberOfChunks && $fileSize!=(($totalSize%$chunkSize)+$chunkSize)) {
// The chunks in the POST is the last one, and the fil is not the correct size
return 'invalid_resumable_request4';
if($numberOfChunks==1 && $fileSize!=$totalSize) {
// The file is only a single chunk, and the data size does not fit
return 'invalid_resumable_request5';
// loop through files and move the chunks to a temporarily created directory
if($REQUEST_METHOD == "POST"){
$resumableFilename = iconv ( 'UTF-8', 'GB2312', $_POST ['resumableFilename'] );
$resumableIdentifier=$_POST['resumableIdentifier'];
$resumableChunkNumber=$_POST['resumableChunkNumber'];
$resumableTotalSize=$_POST['resumableTotalSize'];
$resumableChunkSize=$_POST['resumableChunkSize'];
if (!empty($_FILES)) foreach ($_FILES as $file) {
// check the error status
if ($file['error'] != 0) {
_log('error '.$file['error'].' in file '.$resumableFilename);
// init the destination file (format <filename.ext>.part<#chunk>
// the file is stored in a temporary directory
$temp_dir = $uploads_dir.'/'.$resumableIdentifier;
$dest_file = $temp_dir.'/'.$resumableFilename.'.part'.$resumableChunkNumber;
// create the temporary directory
if (!is_dir($temp_dir)) {
mkdir($temp_dir, 0777, true);
// move the temporary file
if (!move_uploaded_file($file['tmp_name'], $dest_file)) {
_log('Error saving (move_uploaded_file) chunk '.$resumableChunkNumber.' for file '.$resumableFilename);
// check if all the parts present, and create the final destination file
createFileFromChunks($temp_dir, $resumableFilename,$resumableChunkSize, $resumableTotalSize);
* Logging operation - to a file (upload_log.txt) and to the stdout
* @param string $str - the logging string
$log_str = date('d.m.Y').": {$str}\r\n";
if (($fp = fopen('upload_log.txt', 'a+')) !== false) {
* Delete a directory RECURSIVELY
* @param string $dir - directory path
* @link http://php.net/manual/en/function.rmdir.php
$objects = scandir($dir);
foreach ($objects as $object) {
if ($object != "." && $object != "..") {
if (filetype($dir . "/" . $object) == "dir") {
rrmdir($dir . "/" . $object);
unlink($dir . "/" . $object);
* Check if all the parts exist, and
* gather all the parts of the file together
* @param string $dir - the temporary directory holding all the parts of the file
* @param string $fileName - the original file name
* @param string $chunkSize - each chunk size (in bytes)
* @param string $totalSize - original file size (in bytes)
function createFileFromChunks($temp_dir, $fileName, $chunkSize, $totalSize) {
// count all the parts of this file
foreach(scandir($temp_dir) as $file) {
if (stripos($file, $fileName) !== false) {
// check that all the parts are present
// the size of the last part is between chunkSize and 2*$chunkSize
if ($total_files * $chunkSize >= ($totalSize - $chunkSize + 1)) {
// create the final destination file
if (($fp = fopen($uploads_dir.'/'.$fileName, 'w')) !== false) {
for ($i=1; $i<=$total_files; $i++) {
fwrite($fp, file_get_contents($temp_dir.'/'.$fileName.'.part'.$i));
//_log('writing chunk '.$i);
_log('cannot create the destination file');
// rename the temporary directory (to avoid access from other
// concurrent chunks uploads) and than delete it
if (rename($temp_dir, $temp_dir.'_UNUSED')) {
rrmdir($temp_dir.'_UNUSED');
通过以上脚本文件可以实现多文件上传,大文件上传,断点续传等功能,php猿们可以通过附件下载到本地,根据自己的实际需求运用到生产环境下。
下面演示一下在Chrome下上传过程中关闭它,然后用Firefox接着上传的过程。
本文转自birdinroom 51CTO博客,原文链接:http://blog.51cto.com/birdinroom/1343892,如需转载请自行联系原作者