Merge branch 'feature-3194'
@ -338,6 +338,7 @@ SUNSTONE_DIRS="$SUNSTONE_LOCATION/routes \
|
||||
$SUNSTONE_LOCATION/public/vendor/4.0/fontawesome/css \
|
||||
$SUNSTONE_LOCATION/public/vendor/4.0/fontawesome/fonts \
|
||||
$SUNSTONE_LOCATION/public/vendor/4.0/jgrowl \
|
||||
$SUNSTONE_LOCATION/public/vendor/4.0/resumablejs \
|
||||
$SUNSTONE_LOCATION/public/vendor/4.0/foundation \
|
||||
$SUNSTONE_LOCATION/public/vendor/4.0/modernizr \
|
||||
$SUNSTONE_LOCATION/public/vendor/4.0/nouislider \
|
||||
@ -514,6 +515,7 @@ INSTALL_SUNSTONE_FILES=(
|
||||
SUNSTONE_PUBLIC_NEW_VENDOR_DATATABLES:$SUNSTONE_LOCATION/public/vendor/4.0/datatables
|
||||
SUNSTONE_PUBLIC_NEW_VENDOR_FOUNDATION_DATATABLES:$SUNSTONE_LOCATION/public/vendor/4.0/foundation_datatables
|
||||
SUNSTONE_PUBLIC_NEW_VENDOR_JGROWL:$SUNSTONE_LOCATION/public/vendor/4.0/jgrowl
|
||||
SUNSTONE_PUBLIC_NEW_VENDOR_RESUMABLEJS:$SUNSTONE_LOCATION/public/vendor/4.0/resumablejs
|
||||
SUNSTONE_PUBLIC_NEW_VENDOR_JQUERY:$SUNSTONE_LOCATION/public/vendor/4.0/
|
||||
SUNSTONE_PUBLIC_NEW_VENDOR_FOUNDATION:$SUNSTONE_LOCATION/public/vendor/4.0/foundation
|
||||
SUNSTONE_PUBLIC_NEW_VENDOR_MODERNIZR:$SUNSTONE_LOCATION/public/vendor/4.0/modernizr
|
||||
@ -1573,6 +1575,9 @@ SUNSTONE_PUBLIC_NEW_VENDOR_JGROWL="\
|
||||
src/sunstone/public/bower_components/jgrowl/jquery.jgrowl.min.css"
|
||||
|
||||
|
||||
SUNSTONE_PUBLIC_NEW_VENDOR_RESUMABLEJS="\
|
||||
src/sunstone/public/bower_components/resumablejs/resumable.js"
|
||||
|
||||
SUNSTONE_PUBLIC_NEW_VENDOR_FOUNDATION_DATATABLES="\
|
||||
src/sunstone/public/bower_components/foundation-datatables/integration/foundation/dataTables.foundation.js"
|
||||
|
||||
|
@ -5,11 +5,11 @@
|
||||
"flot": "~0.8.3",
|
||||
"jquery-migrate": "~1.2.1",
|
||||
"datatables": "~1.10.2",
|
||||
"fine-uploader": "~5.0.3",
|
||||
"jgrowl": "~1.3.0",
|
||||
"fontawesome": "~4.1.0",
|
||||
"foundation-datatables": "https://github.com/DataTables/Plugins.git",
|
||||
"flot.tooltip": "~0.8.4",
|
||||
"no-vnc": "*"
|
||||
"no-vnc": "*",
|
||||
"resumablejs": "*"
|
||||
}
|
||||
}
|
||||
|
@ -1,36 +0,0 @@
|
||||
.*
|
||||
*.ipr
|
||||
*~
|
||||
.*.sw[a-z]
|
||||
*.iml
|
||||
!.gitignore
|
||||
!.jshintrc
|
||||
!.jshintignore
|
||||
Thumbs.db
|
||||
|
||||
_build/
|
||||
_dist/
|
||||
*.zip
|
||||
release/*
|
||||
master
|
||||
|
||||
!.travis.yml
|
||||
hardcopy*
|
||||
selenium.log*
|
||||
|
||||
pid.txt
|
||||
sauce_connect.log*
|
||||
lib/sauce/sauce-connect*
|
||||
|
||||
fine-uploader/
|
||||
test/upload/*
|
||||
test/uploadsTemp/
|
||||
test/coverage/*
|
||||
test/vendor/*
|
||||
test/uploads/*
|
||||
test/temp*
|
||||
test/_temp*
|
||||
test/_vendor*
|
||||
node_modules/
|
||||
|
||||
bin/
|
@ -1 +0,0 @@
|
||||
client/js/third-party/*.js
|
@ -1,117 +0,0 @@
|
||||
{
|
||||
// --------------------------------------------------------------------
|
||||
// JSHint Configuration, Strict Edition
|
||||
// --------------------------------------------------------------------
|
||||
//
|
||||
// This is a options template for [JSHint][1], using [JSHint example][2]
|
||||
// and [Ory Band's example][3] as basis and setting config values to
|
||||
// be most strict:
|
||||
//
|
||||
// * set all enforcing options to true
|
||||
// * set all relaxing options to false
|
||||
// * set all environment options to false, except the browser value
|
||||
// * set all JSLint legacy options to false
|
||||
//
|
||||
// [1]: http://www.jshint.com/
|
||||
// [2]: https://github.com/jshint/node-jshint/blob/master/example/config.json
|
||||
// [3]: https://github.com/oryband/dotfiles/blob/master/jshintrc
|
||||
//
|
||||
// @author http://michael.haschke.biz/
|
||||
// @license http://unlicense.org/
|
||||
|
||||
// == Enforcing Options ===============================================
|
||||
//
|
||||
// These options tell JSHint to be more strict towards your code. Use
|
||||
// them if you want to allow only a safe subset of JavaScript, very
|
||||
// useful when your codebase is shared with a big number of developers
|
||||
// with different skill levels.
|
||||
|
||||
"bitwise" : true, // Prohibit bitwise operators (&, |, ^, etc.).
|
||||
"curly" : true, // Require {} for every new block or scope.
|
||||
"eqeqeq" : true, // Require triple equals i.e. `===`.
|
||||
"forin" : true, // Tolerate `for in` loops without `hasOwnPrototype`.
|
||||
"immed" : true, // Require immediate invocations to be wrapped in parens e.g. `( function(){}() );`
|
||||
"latedef" : true, // Prohibit variable use before definition.
|
||||
"newcap" : true, // Require capitalization of all constructor functions e.g. `new F()`.
|
||||
"noarg" : true, // Prohibit use of `arguments.caller` and `arguments.callee`.
|
||||
"noempty" : true, // Prohibit use of empty blocks.
|
||||
"nonew" : true, // Prohibit use of constructors for side-effects.
|
||||
"plusplus" : false, // Prohibit use of `++` & `--`.
|
||||
"regexp" : true, // Prohibit `.` and `[^...]` in regular expressions.
|
||||
"undef" : true, // Require all non-global variables be declared before they are used.
|
||||
"unused" : false, // Prohibit the use of defined, yet unused variables.
|
||||
"strict" : true, // Require `use strict` pragma in every file.
|
||||
"trailing" : true, // Prohibit trailing whitespaces.
|
||||
|
||||
// == Relaxing Options ================================================
|
||||
//
|
||||
// These options allow you to suppress certain types of warnings. Use
|
||||
// them only if you are absolutely positive that you know what you are
|
||||
// doing.
|
||||
|
||||
"asi" : false, // Tolerate Automatic Semicolon Insertion (no semicolons).
|
||||
"boss" : false, // Tolerate assignments inside if, for & while. Usually conditions & loops are for comparison, not assignments.
|
||||
"debug" : false, // Allow debugger statements e.g. browser breakpoints.
|
||||
"eqnull" : true, // Tolerate use of `== null`.
|
||||
"esnext" : false, // Allow ES.next specific features such as `const` and `let`.
|
||||
"evil" : false, // Tolerate use of `eval`.
|
||||
"expr" : true, // Tolerate `ExpressionStatement` as Programs.
|
||||
"funcscope" : false, // Tolerate declarations of variables inside of control structures while accessing them later from the outside.
|
||||
"globalstrict" : false, // Allow global "use strict" (also enables 'strict').
|
||||
"iterator" : false, // Allow usage of __iterator__ property.
|
||||
"lastsemic" : false, // Tolerat missing semicolons when the it is omitted for the last statement in a one-line block.
|
||||
"laxbreak" : false, // Tolerate unsafe line breaks e.g. `return [\n] x` without semicolons.
|
||||
"laxcomma" : false, // Suppress warnings about comma-first coding style.
|
||||
"loopfunc" : false, // Allow functions to be defined within loops.
|
||||
"multistr" : false, // Tolerate multi-line strings.
|
||||
"onecase" : false, // Tolerate switches with just one case.
|
||||
"proto" : false, // Tolerate __proto__ property. This property is deprecated.
|
||||
"regexdash" : false, // Tolerate unescaped last dash i.e. `[-...]`.
|
||||
"scripturl" : false, // Tolerate script-targeted URLs.
|
||||
"smarttabs" : false, // Tolerate mixed tabs and spaces when the latter are used for alignmnent only.
|
||||
"shadow" : false, // Allows re-define variables later in code e.g. `var x=1; x=2;`.
|
||||
"sub" : false, // Tolerate all forms of subscript notation besides dot notation e.g. `dict['key']` instead of `dict.key`.
|
||||
"supernew" : false, // Tolerate `new function () { ... };` and `new Object;`.
|
||||
"validthis" : false, // Tolerate strict violations when the code is running in strict mode and you use this in a non-constructor function.
|
||||
|
||||
// == Environments ====================================================
|
||||
//
|
||||
// These options pre-define global variables that are exposed by
|
||||
// popular JavaScript libraries and runtime environments—such as
|
||||
// browser or node.js.
|
||||
|
||||
"browser" : true, // Standard browser globals e.g. `window`, `document`.
|
||||
"couch" : false, // Enable globals exposed by CouchDB.
|
||||
"devel" : false, // Allow development statements e.g. `console.log();`.
|
||||
"dojo" : false, // Enable globals exposed by Dojo Toolkit.
|
||||
"jquery" : true, // Enable globals exposed by jQuery JavaScript library.
|
||||
"mootools" : false, // Enable globals exposed by MooTools JavaScript framework.
|
||||
"node" : false, // Enable globals available when code is running inside of the NodeJS runtime environment.
|
||||
"nonstandard" : true, // Define non-standard but widely adopted globals such as escape and unescape.
|
||||
"prototypejs" : false, // Enable globals exposed by Prototype JavaScript framework.
|
||||
"rhino" : false, // Enable globals available when your code is running inside of the Rhino runtime environment.
|
||||
"wsh" : false, // Enable globals available when your code is running as a script for the Windows Script Host.
|
||||
|
||||
// == JSLint Legacy ===================================================
|
||||
//
|
||||
// These options are legacy from JSLint. Aside from bug fixes they will
|
||||
// not be improved in any way and might be removed at any point.
|
||||
|
||||
"nomen" : false, // Prohibit use of initial or trailing underbars in names.
|
||||
"onevar" : false, // Allow only one `var` statement per function.
|
||||
"passfail" : false, // Stop on first error.
|
||||
"white" : false, // Check against strict whitespace and indentation rules.
|
||||
|
||||
// == Undocumented Options ============================================
|
||||
//
|
||||
// While I've found these options in [example1][2] and [example2][3]
|
||||
// they are not described in the [JSHint Options documentation][4].
|
||||
//
|
||||
// [4]: http://www.jshint.com/options/
|
||||
|
||||
"maxerr" : 100, // Maximum errors before stopping.
|
||||
"predef" : ["qq"],
|
||||
"quotmark" : "double", // Enforces consistencey of quotation marks.
|
||||
//"maxlen" : "80", // Enfore a maximum line length
|
||||
"indent" : 4 // Specify indentation spacing
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
---
|
||||
addons:
|
||||
firefox: "25.0"
|
||||
|
||||
language: node_js
|
||||
|
||||
node_js:
|
||||
- '0.10'
|
||||
|
||||
env:
|
||||
global:
|
||||
- SAUCE_CONNECT_READY_FILE=/tmp/sauce-connect-ready
|
||||
- LOGS_DIR=/tmp/fineuploader-build/logs
|
||||
- SLIMERJSLAUNCHER=$(which firefox) DISPLAY=:99.0 PATH=$TRAVIS_BUILD_DIR/slimerjs:$PATH SLIMERJS_BIN=$TRAVIS_BUILD_DIR/slimerjs-0.9.0/slimerjs
|
||||
- secure: |-
|
||||
gHDpZQb3YZtwxHnYCwP/mMeqNDU1OCptFtZ/2wfY/R81Of5JXnwO4nvk3ZT+
|
||||
9TmUexNxC7pgM4sU5MPYBPVcUJ1jKXp/DifgFmLzdygMkMHkQNji0Ey53W/7
|
||||
9Rs6+kIGSez+S5RbR9itYuZ3NTBM54o+YdUHhz7fERyOjHaCPvY=
|
||||
- secure: |-
|
||||
ItUbCE5lEEhfjbRw2xcG8F8L4T7JriFXCphlNV26ZzrRTGNGXKi2D7TDv2S5
|
||||
rwbV6veaxef5UrKDyR5vbRuyXcKg7B05snD3jvKhp/Jwn3cU9NbddwFqffej
|
||||
EFukvU9VidYA2iyLTB4UTVBh7AhZU05hLo5P4npKKcpObvSAL/8=
|
||||
|
||||
before_install:
|
||||
- npm install -g grunt-cli
|
||||
- git submodule update --init --recursive
|
||||
|
||||
before_script:
|
||||
- "mkdir -p $LOGS_DIR"
|
||||
- "sh -e /etc/init.d/xvfb start"
|
||||
|
||||
script:
|
||||
- grunt travis
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- develop
|
||||
- /^feature.*$/
|
||||
- /^hotfix.*$/
|
||||
|
@ -1,24 +0,0 @@
|
||||
# Please Read This BEFORE Opening Up a New Issue or Pull Request #
|
||||
|
||||
The issue tracker in this project is for bug reports or feature requests ONLY. If you have a support question,
|
||||
please see http://fineuploader.com/support.html which contains instructions on where you can browse and open support
|
||||
requests.
|
||||
|
||||
|
||||
## Bug Reports ##
|
||||
If you believe you have discovered a bug, please include the following information in your report:
|
||||
* The version of Fine Uploader you are using
|
||||
* Your client-side javascript and HTML that relates to your use of Fine Uploader
|
||||
* Related browser(s)
|
||||
* Related operating system(s) or device(s)
|
||||
* The contents of your javascript console (when reproducing the error) with the `debug` option set to "true"
|
||||
|
||||
|
||||
|
||||
## Pull Requests ##
|
||||
When opening up a new pull request, please be sure (and note) that you have at least tested your changes in a browser that
|
||||
supports the File API (Chrome, Firefox, Safari, IE10) as well as a browser that does not support the File API (IE9 or older).
|
||||
Also, please note that all pull requests should be against the "develop" branch, and NOT master.
|
||||
|
||||
|
||||
Thank you!
|
@ -1,11 +0,0 @@
|
||||
Third-party credits (client/js/third-party/)
|
||||
|
||||
MegaPixImage module
|
||||
Licensed under MIT (https://github.com/stomita/ios-imagefile-megapixel/blob/master/LICENSE)
|
||||
https://github.com/stomita/ios-imagefile-megapixel
|
||||
Copyright (c) 2012 Shinichi Tomita <shinichi.tomita@gmail.com>
|
||||
|
||||
CryptoJS
|
||||
Licensed under the New BSD License (http://opensource.org/licenses/BSD-3-Clause)
|
||||
https://code.google.com/p/crypto-js/
|
||||
Copyright (c) 2009-2013 Jeff Mott <Jeff.Mott.OR@gmail.com>
|
@ -1,902 +0,0 @@
|
||||
###
|
||||
Fine Uploader
|
||||
-------------
|
||||
|
||||
Gruntfile
|
||||
|
||||
###
|
||||
|
||||
module.exports = (grunt) ->
|
||||
|
||||
fs = require 'fs'
|
||||
uuid = require 'uuid'
|
||||
async = require 'async'
|
||||
|
||||
# Utilities
|
||||
# ==========
|
||||
path = require 'path'
|
||||
|
||||
# Package
|
||||
# ==========
|
||||
pkg = require './package.json'
|
||||
|
||||
# Paths
|
||||
# ==========
|
||||
paths =
|
||||
'dist': './_dist'
|
||||
'build': './_build'
|
||||
'src': './client'
|
||||
'html': './client/html/templates'
|
||||
'docs': './docs'
|
||||
'test': './test'
|
||||
'custom': './_custom'
|
||||
|
||||
# Desitnation for custom builds. Appended with a uuid to make builds unique
|
||||
# and not overwrite each other (if, say, two builds were being generated in parallel
|
||||
|
||||
customBuildDest = path.join paths.custom, uuid.v1(1), "custom.#{pkg.name}-#{pkg.version}"
|
||||
#customBuildDest = path.join paths.custom, "custom.#{pkg.name}-#{pkg.version}"
|
||||
|
||||
# Browsers
|
||||
# ==========
|
||||
allBrowsers = require("./lib/browsers")
|
||||
browsers = allBrowsers.browsers
|
||||
|
||||
# Modules
|
||||
# ==========
|
||||
fineUploaderModules = require './lib/modules'
|
||||
|
||||
# Configuration
|
||||
# ==========
|
||||
grunt.initConfig
|
||||
|
||||
|
||||
pkg: pkg
|
||||
|
||||
bower:
|
||||
install:
|
||||
options:
|
||||
targetDir: "#{paths.test}/_vendor"
|
||||
install: true
|
||||
cleanTargetDir: true
|
||||
cleanBowerDir: true
|
||||
layout: 'byComponent'
|
||||
|
||||
clean:
|
||||
build:
|
||||
files:
|
||||
src: paths.build
|
||||
dist:
|
||||
files:
|
||||
src: paths.dist
|
||||
test:
|
||||
files:
|
||||
src: ["#{paths.test}/_temp*"]
|
||||
vendor:
|
||||
files:
|
||||
src: "#{paths.test}/_vendor"
|
||||
custom:
|
||||
files:
|
||||
src: "#{paths.custom}/*"
|
||||
|
||||
coffeelint:
|
||||
options:
|
||||
indentation:
|
||||
level: 'warn'
|
||||
no_trailing_whitespace:
|
||||
level: 'warn'
|
||||
no_backticks:
|
||||
level: 'ignore'
|
||||
max_line_length:
|
||||
level: 'ignore'
|
||||
grunt: ['./Gruntfile.coffee', 'lib/grunt/**/*.coffee']
|
||||
|
||||
compress:
|
||||
jquery:
|
||||
options:
|
||||
archive: "#{paths.dist}/jquery.<%= pkg.name %>-<%= pkg.version %>.zip"
|
||||
files: [
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.dist
|
||||
src: './jquery.<%= pkg.name %>-<%= pkg.version %>/*'
|
||||
}
|
||||
]
|
||||
jqueryS3:
|
||||
options:
|
||||
archive: "#{paths.dist}/s3.jquery.<%= pkg.name %>-<%= pkg.version %>.zip"
|
||||
files: [
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.dist
|
||||
src: './s3.jquery.<%= pkg.name %>-<%= pkg.version %>/*'
|
||||
}
|
||||
]
|
||||
jqueryAzure:
|
||||
options:
|
||||
archive: "#{paths.dist}/azure.jquery.<%= pkg.name %>-<%= pkg.version %>.zip"
|
||||
files: [
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.dist
|
||||
src: './azure.jquery.<%= pkg.name %>-<%= pkg.version %>/*'
|
||||
}
|
||||
]
|
||||
core:
|
||||
options:
|
||||
archive: "#{paths.dist}/<%= pkg.name %>-<%= pkg.version %>.zip"
|
||||
files: [
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.dist
|
||||
src: './<%= pkg.name %>-<%= pkg.version %>/*'
|
||||
}
|
||||
]
|
||||
coreS3:
|
||||
options:
|
||||
archive: "#{paths.dist}/s3.<%= pkg.name %>-<%= pkg.version %>.zip"
|
||||
files: [
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.dist
|
||||
src: './s3.<%= pkg.name %>-<%= pkg.version %>/*'
|
||||
}
|
||||
]
|
||||
coreAzure:
|
||||
options:
|
||||
archive: "#{paths.dist}/azure.<%= pkg.name %>-<%= pkg.version %>.zip"
|
||||
files: [
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.dist
|
||||
src: './azure.<%= pkg.name %>-<%= pkg.version %>/*'
|
||||
}
|
||||
]
|
||||
custom:
|
||||
options:
|
||||
archive: "#{customBuildDest}/custom.<%= pkg.name %>-<%= pkg.version %>.zip"
|
||||
files: [
|
||||
{
|
||||
expand: true
|
||||
cwd: customBuildDest + '/src/'
|
||||
src: "**/*"
|
||||
}
|
||||
]
|
||||
|
||||
concat:
|
||||
core:
|
||||
src: fineUploaderModules.mergeModules true, 'fuTraditional'
|
||||
dest: "#{paths.build}/<%= pkg.name %>.js"
|
||||
coreS3:
|
||||
src: fineUploaderModules.mergeModules true, 'fuS3'
|
||||
dest: "#{paths.build}/s3.<%= pkg.name %>.js"
|
||||
coreAzure:
|
||||
src: fineUploaderModules.mergeModules true, 'fuAzure'
|
||||
dest: "#{paths.build}/azure.<%= pkg.name %>.js"
|
||||
jquery:
|
||||
src: fineUploaderModules.mergeModules true, 'fuTraditionalJquery'
|
||||
dest: "#{paths.build}/jquery.<%= pkg.name %>.js"
|
||||
jqueryS3:
|
||||
src: fineUploaderModules.mergeModules true, 'fuS3Jquery'
|
||||
dest: "#{paths.build}/s3.jquery.<%= pkg.name %>.js"
|
||||
jqueryAzure:
|
||||
src: fineUploaderModules.mergeModules true, 'fuAzureJquery'
|
||||
dest: "#{paths.build}/azure.jquery.<%= pkg.name %>.js"
|
||||
all:
|
||||
src: fineUploaderModules.mergeModules true, 'fuAll'
|
||||
dest: paths.build + "/all.<%= pkg.name %>.js"
|
||||
css:
|
||||
src: ["#{paths.src}/*.css"]
|
||||
dest: "#{paths.build}/<%= pkg.name %>.css"
|
||||
|
||||
concurrent:
|
||||
minify: ['cssmin', 'uglify']
|
||||
lint: ['jshint', 'coffeelint']
|
||||
concat: ['concat']
|
||||
clean: ['clean']
|
||||
compress: ['compress']
|
||||
|
||||
connect:
|
||||
root_server:
|
||||
options:
|
||||
base: "."
|
||||
hostname: "0.0.0.0"
|
||||
port: 9000
|
||||
keepalive: true
|
||||
test_server:
|
||||
options:
|
||||
base: "test"
|
||||
hostname: "0.0.0.0"
|
||||
port: 9000
|
||||
|
||||
copy:
|
||||
dist:
|
||||
files: [
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.js', '!all.*', '!s3.*', '!azure.*', '!*.min.js', '!jquery*', '!*iframe*']
|
||||
dest: "#{paths.dist}/<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '-<%= pkg.version %>.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: [ '!all.*', 's3.*.js', '!*.min.js', '!s3.jquery*', '!azure.jquery*', '!*iframe*']
|
||||
dest: "#{paths.dist}/s3.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.<%= pkg.name %>-<%= pkg.version %>.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: [ '!all.*', 'azure.*.js', '!*.min.js', '!azure.jquery*', '!s3.jquery*', '!*iframe*']
|
||||
dest: "#{paths.dist}/azure.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.<%= pkg.name %>-<%= pkg.version %>.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.min.js', '!all.*', '!s3.*', '!azure.*', '!jquery*']
|
||||
dest: "#{paths.dist}/<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '-<%= pkg.version %>.min.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['s3.*.min.js', '!s3.jquery*']
|
||||
dest: "#{paths.dist}/s3.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.<%= pkg.name %>-<%= pkg.version %>.min.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['azure.*.min.js', '!azure.jquery*']
|
||||
dest: "#{paths.dist}/azure.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.<%= pkg.name %>-<%= pkg.version %>.min.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['jquery*js', '!s3.*', '!azure.*', '!*.min.js']
|
||||
dest: "#{paths.dist}/jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.<%= pkg.name %>-<%= pkg.version %>.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['s3.jquery*js', '!*.min.js']
|
||||
dest: "#{paths.dist}/s3.jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.jquery.<%= pkg.name %>-<%= pkg.version %>.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['azure.jquery*js', '!*.min.js']
|
||||
dest: "#{paths.dist}/azure.jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.jquery.<%= pkg.name %>-<%= pkg.version %>.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['jquery*min.js']
|
||||
dest: "#{paths.dist}/jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.<%= pkg.name %>-<%= pkg.version %>.min.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['s3.jquery*min.js']
|
||||
dest: "#{paths.dist}/s3.jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.jquery.<%= pkg.name %>-<%= pkg.version %>.min.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['azure.jquery*min.js']
|
||||
dest: "#{paths.dist}/azure.jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.jquery.<%= pkg.name %>-<%= pkg.version %>.min.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: "./#{paths.src}/js/"
|
||||
src: ['iframe.xss.response.js']
|
||||
dest: "#{paths.dist}/<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.xss.response-<%= pkg.version %>.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: "./#{paths.src}/js/"
|
||||
src: ['iframe.xss.response.js']
|
||||
dest: "#{paths.dist}/s3.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.xss.response-<%= pkg.version %>.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: "./#{paths.src}/js/"
|
||||
src: ['iframe.xss.response.js']
|
||||
dest: "#{paths.dist}/jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.xss.response-<%= pkg.version %>.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: "./#{paths.src}/js/"
|
||||
src: ['iframe.xss.response.js']
|
||||
dest: "#{paths.dist}/s3.jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
ext: '.xss.response-<%= pkg.version %>.js'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.src
|
||||
src: ['*.gif', 'placeholders/*.png']
|
||||
dest: "#{paths.dist}/<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.src
|
||||
src: ['*.gif', 'placeholders/*.png']
|
||||
dest: "#{paths.dist}/s3.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.src
|
||||
src: ['*.gif', 'placeholders/*.png']
|
||||
dest: "#{paths.dist}/azure.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.src
|
||||
src: ['*.gif', 'placeholders/*.png']
|
||||
dest: "#{paths.dist}/jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.src
|
||||
src: ['*.gif', 'placeholders/*.png']
|
||||
dest: "#{paths.dist}/s3.jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.src
|
||||
src: ['*.gif', 'placeholders/*.png']
|
||||
dest: "#{paths.dist}/azure.jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: './'
|
||||
src: ['LICENSE']
|
||||
dest: "#{paths.dist}/<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: './'
|
||||
src: ['LICENSE']
|
||||
dest: "#{paths.dist}/s3.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: './'
|
||||
src: ['LICENSE']
|
||||
dest: "#{paths.dist}/azure.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: './'
|
||||
src: ['LICENSE']
|
||||
dest: "#{paths.dist}/jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: './'
|
||||
src: ['LICENSE']
|
||||
dest: "#{paths.dist}/s3.jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: './'
|
||||
src: ['LICENSE']
|
||||
dest: "#{paths.dist}/azure.jquery.<%= pkg.name %>-<%= pkg.version %>/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.min.css']
|
||||
dest: "#{paths.dist}/<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.min.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.min.css']
|
||||
dest: "#{paths.dist}/s3.<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.min.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.min.css']
|
||||
dest: "#{paths.dist}/azure.<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.min.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.css', '!*.min.css']
|
||||
dest: "#{paths.dist}/<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.css', '!*.min.css']
|
||||
dest: "#{paths.dist}/s3.<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.css', '!*.min.css']
|
||||
dest: "#{paths.dist}/azure.<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.min.css']
|
||||
dest: "#{paths.dist}/jquery.<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.min.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.min.css']
|
||||
dest: "#{paths.dist}/s3.jquery.<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.min.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.min.css']
|
||||
dest: "#{paths.dist}/azure.jquery.<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.min.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.css', '!*.min.css']
|
||||
dest: "#{paths.dist}/jquery.<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.css', '!*.min.css']
|
||||
dest: "#{paths.dist}/s3.jquery.<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.css', '!*.min.css']
|
||||
dest: "#{paths.dist}/azure.jquery.<%= pkg.name %>-<%= pkg.version %>"
|
||||
ext: '-<%= pkg.version %>.css'
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.html
|
||||
src: ['*.html']
|
||||
dest: "#{paths.dist}/<%= pkg.name %>-<%= pkg.version %>/templates/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.html
|
||||
src: ['*.html']
|
||||
dest: "#{paths.dist}/s3.<%= pkg.name %>-<%= pkg.version %>/templates/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.html
|
||||
src: ['*.html']
|
||||
dest: "#{paths.dist}/azure.<%= pkg.name %>-<%= pkg.version %>/templates/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.html
|
||||
src: ['*.html']
|
||||
dest: "#{paths.dist}/jquery.<%= pkg.name %>-<%= pkg.version %>/templates/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.html
|
||||
src: ['*.html']
|
||||
dest: "#{paths.dist}/s3.jquery.<%= pkg.name %>-<%= pkg.version %>/templates/"
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.html
|
||||
src: ['*.html']
|
||||
dest: "#{paths.dist}/azure.jquery.<%= pkg.name %>-<%= pkg.version %>/templates/"
|
||||
}
|
||||
]
|
||||
build:
|
||||
files: [
|
||||
{
|
||||
expand: true
|
||||
cwd: "#{paths.src}/js/"
|
||||
src: ['iframe.xss.response.js']
|
||||
dest: paths.build
|
||||
},
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.src
|
||||
src: ['*.gif', 'placeholders/*.png']
|
||||
dest: paths.build
|
||||
}
|
||||
{
|
||||
expand: true
|
||||
cwd: paths.html
|
||||
src: ['*.html']
|
||||
dest: paths.build
|
||||
}
|
||||
]
|
||||
test:
|
||||
expand: true
|
||||
flatten: true
|
||||
src: ["#{paths.build}/*"]
|
||||
dest: "#{paths.test}/_temp"
|
||||
images:
|
||||
files: [
|
||||
expand: true
|
||||
cwd: paths.src
|
||||
src: ['*.gif', 'placeholders/*.png']
|
||||
dest: paths.build
|
||||
]
|
||||
templates:
|
||||
files: [
|
||||
expand: true
|
||||
cwd: paths.src + '/html'
|
||||
src: ['*.html']
|
||||
dest: paths.build
|
||||
]
|
||||
|
||||
cssmin:
|
||||
options:
|
||||
banner: '/*! <%= pkg.name %> <%= grunt.template.today("yyyy-mm-dd") %> */\n'
|
||||
report: 'min'
|
||||
all:
|
||||
expand: true
|
||||
cwd: paths.build
|
||||
src: ['*.css', '!*.min.css']
|
||||
dest: paths.build
|
||||
ext: ".min.css"
|
||||
custom:
|
||||
expand: true
|
||||
cwd: customBuildDest + '/src/'
|
||||
src: ['*.css', '!*.min.css']
|
||||
dest: customBuildDest + '/src/'
|
||||
ext: '.<%= pkg.name %>-<%= pkg.version %>.min.css'
|
||||
#src: ["#{customBuildDest}/src/<%= pkg.name %>-<%= pkg.version %>.css"]
|
||||
#dest: "#{customBuildDest}/src/<%= pkg.name %>-<%= pkg.version %>.min.css"
|
||||
|
||||
jshint:
|
||||
source: ["#{paths.src}/js/**/*.js"]
|
||||
tests: ["#{paths.test}/unit/**/*.js","#{paths.test}/static/local/*.js"]
|
||||
options:
|
||||
jshintrc: true
|
||||
ignores: ["#{paths.src}/js/third-party/**/*.js"]
|
||||
|
||||
custom:
|
||||
options:
|
||||
dest: customBuildDest
|
||||
|
||||
uglify:
|
||||
options:
|
||||
mangle: true
|
||||
compress:
|
||||
warnings: false
|
||||
report: 'min'
|
||||
preserveComments: 'some'
|
||||
core:
|
||||
src: ['<%= concat.core.dest %>']
|
||||
dest: "#{paths.build}/<%= pkg.name %>.min.js"
|
||||
jquery:
|
||||
src: ['<%= concat.jquery.dest %>']
|
||||
dest: "#{paths.build}/jquery.<%= pkg.name %>.min.js"
|
||||
coreAzure:
|
||||
src: ['<%= concat.coreAzure.dest %>']
|
||||
dest: "#{paths.build}/azure.<%= pkg.name %>.min.js"
|
||||
jqueryAzure:
|
||||
src: ['<%= concat.jqueryAzure.dest %>']
|
||||
dest: "#{paths.build}/azure.jquery.<%= pkg.name %>.min.js"
|
||||
coreS3:
|
||||
src: ['<%= concat.coreS3.dest %>']
|
||||
dest: "#{paths.build}/s3.<%= pkg.name %>.min.js"
|
||||
jqueryS3:
|
||||
src: ['<%= concat.jqueryS3.dest %>']
|
||||
dest: "#{paths.build}/s3.jquery.<%= pkg.name %>.min.js"
|
||||
all:
|
||||
src: ['<%= concat.all.dest %>']
|
||||
dest: "#{paths.build}/all.<%= pkg.name %>.min.js"
|
||||
custom:
|
||||
src: ["#{customBuildDest}/src/custom.<%= pkg.name %>-<%= pkg.version %>.js"]
|
||||
dest: "#{customBuildDest}/src/custom.<%= pkg.name %>-<%= pkg.version %>.min.js"
|
||||
|
||||
usebanner:
|
||||
allhead:
|
||||
src: ["#{paths.build}/*.{js,css}"]
|
||||
options:
|
||||
position: 'top'
|
||||
banner: '''
|
||||
/*!
|
||||
* <%= pkg.title %>
|
||||
*
|
||||
* Copyright 2013, <%= pkg.author %> info@fineuploader.com
|
||||
*
|
||||
* Version: <%= pkg.version %>
|
||||
*
|
||||
* Homepage: http://fineuploader.com
|
||||
*
|
||||
* Repository: <%= pkg.repository.url %>
|
||||
*
|
||||
* Licensed under GNU GPL v3, see LICENSE
|
||||
*/ \n\n'''
|
||||
allfoot:
|
||||
src: ["#{paths.build}/*.{js,css}"]
|
||||
options:
|
||||
position: 'bottom'
|
||||
banner: '/*! <%= grunt.template.today("yyyy-mm-dd") %> */\n'
|
||||
customhead:
|
||||
files:
|
||||
src: ["#{customBuildDest}/src/*.{js,css}"]
|
||||
options:
|
||||
position: 'top'
|
||||
banner: '''
|
||||
/*!
|
||||
* <%= pkg.title %>
|
||||
*
|
||||
* Copyright 2013-2014, <%= pkg.author %> info@fineuploader.com
|
||||
*
|
||||
* Version: <%= pkg.version %>
|
||||
*
|
||||
* Homepage: http://fineuploader.com
|
||||
*
|
||||
* Repository: <%= pkg.repository.url %>
|
||||
*
|
||||
* Licensed under GNU GPL v3, see LICENSE
|
||||
*
|
||||
* Third-party credits:
|
||||
* MegaPixImageModule (MIT)
|
||||
* https://github.com/stomita/ios-imagefile-megapixel
|
||||
* Copyright (c) 2012 Shinichi Tomita <shinichi.tomita@gmail.com>
|
||||
*
|
||||
* CryptoJS
|
||||
* code.google.com/p/crypto-js/wiki/License
|
||||
* (c) 2009-2013 by Jeff Mott. All rights reserved.
|
||||
*/ \n\n'''
|
||||
customfoot:
|
||||
files:
|
||||
src: ["#{customBuildDest}/*.{js,css}"]
|
||||
options:
|
||||
position: 'bottom'
|
||||
banner: '/*! <%= grunt.template.today("yyyy-mm-dd") %> */\n'
|
||||
|
||||
version:
|
||||
options:
|
||||
pkg: pkg,
|
||||
prefix: '[^\\-][Vv]ersion[\'"]?\\s*[:=]\\s*[\'"]?'
|
||||
major:
|
||||
options:
|
||||
release: 'major'
|
||||
src: fineUploaderModules.modules.versioned
|
||||
minor:
|
||||
options:
|
||||
release: 'minor'
|
||||
src: fineUploaderModules.modules.versioned
|
||||
hotfix:
|
||||
options:
|
||||
release: 'patch'
|
||||
src: fineUploaderModules.modules.versioned
|
||||
build:
|
||||
options:
|
||||
release: 'build'
|
||||
src: fineUploaderModules.modules.versioned
|
||||
release:
|
||||
options:
|
||||
release: pkg.version.replace /-\d+$/, ""
|
||||
src: fineUploaderModules.modules.versioned
|
||||
|
||||
watch:
|
||||
options:
|
||||
interrupt: true
|
||||
debounceDelay: 250
|
||||
js:
|
||||
files: ["#{paths.src}/js/*.js", "#{paths.src}/js/s3/*.js"]
|
||||
tasks: [
|
||||
'dev'
|
||||
'tests:local'
|
||||
]
|
||||
test:
|
||||
files: ["#{paths.test}/unit/*.js", "#{paths.test}/unit/s3/*.js"]
|
||||
tasks: [
|
||||
'jshint:tests'
|
||||
'tests:local'
|
||||
]
|
||||
grunt:
|
||||
files: ['./Gruntfile.coffee']
|
||||
tasks: [
|
||||
'coffeelint:grunt'
|
||||
'build'
|
||||
]
|
||||
images:
|
||||
files: ["#{paths.src}/*.gif", "#{paths.src}/placeholders/*.png"]
|
||||
tasks: [
|
||||
'copy:images'
|
||||
]
|
||||
|
||||
tests:
|
||||
local: 'karma-local.conf.coffee'
|
||||
travis: 'karma-travis.conf.coffee'
|
||||
|
||||
shell:
|
||||
start_saucecon:
|
||||
command: './lib/sauce/sauce_connect_setup.sh'
|
||||
kill_saucecon:
|
||||
command: 'cat /tmp/sauce-connect.pid | xargs kill'
|
||||
npm_install:
|
||||
command: 'npm install'
|
||||
version_custom_templates:
|
||||
command: "find #{customBuildDest}/ -type f -name '*.html' | xargs sed -i '' 's/{VERSION}/<%= pkg.version %>/'"
|
||||
options:
|
||||
cwd: __dirname
|
||||
stderr: true
|
||||
stdout: true
|
||||
version_dist_templates:
|
||||
command: "find #{paths.dist}/ -type f -name '*.html' | xargs sed -i '' 's/{VERSION}/<%= pkg.version %>/'"
|
||||
options:
|
||||
cwd: __dirname
|
||||
stderr: true
|
||||
stdout: true
|
||||
|
||||
|
||||
strip_code:
|
||||
options:
|
||||
start_comment: "<testing>"
|
||||
end_comment: "</testing>"
|
||||
build:
|
||||
files:
|
||||
src: "#{paths.build}/**/*.js"
|
||||
custom:
|
||||
files:
|
||||
src: "#{customBuildDest}/**/*.js"
|
||||
|
||||
nodestatic:
|
||||
server:
|
||||
options:
|
||||
port: 3000
|
||||
base: "test/unit/resources"
|
||||
headers:
|
||||
"Access-Control-Allow-Origin": "*"
|
||||
|
||||
# Dependencies
|
||||
# ==========
|
||||
for name of pkg.devDependencies when name.substring(0, 6) is 'grunt-'
|
||||
grunt.loadNpmTasks name
|
||||
|
||||
grunt.loadTasks './lib/grunt'
|
||||
|
||||
grunt.registerTask 'build_details', ->
|
||||
grunt.log.writeln "\n##########"
|
||||
grunt.log.writeln "Custom Build Generated: "
|
||||
grunt.log.write "### " + customBuildDest + " ###"
|
||||
grunt.log.writeln "\n##########\n"
|
||||
|
||||
# Tasks
|
||||
# ==========
|
||||
grunt.registerTask 'test', "Run unit tests. Allows: 'travis', 'server', 'headless', 'ie', and 'all'. Can also take browser names: 'PhantomJS', 'Firefox', 'Chrome', 'Safari', etc.. Comma-delimited.", (test_type) ->
|
||||
# To run this task:
|
||||
# % grunt test:<args>
|
||||
#
|
||||
# Where <args> is either:
|
||||
# * 'travis', 'server', 'headless', 'ie', 'ios', or 'all'
|
||||
# * a comma-delimited list of browsers.
|
||||
#
|
||||
# Example:
|
||||
# % grunt test:server
|
||||
# % grunt test:headless
|
||||
# % grunt test:PhantomJS --no-single-run
|
||||
# % grunt test:Firefox,Chrome,Opera,Safari
|
||||
# % grunt test:ie
|
||||
# % grunt test:Firefox,Chrome,Opera,Safari --autoWatch=true --singleRun=true
|
||||
# etc...
|
||||
taskList = ["server"]
|
||||
|
||||
setDefaultOption = (name, def) ->
|
||||
if not grunt.option(name)?
|
||||
grunt.option(name, def)
|
||||
|
||||
switch test_type
|
||||
when "travis" then do ->
|
||||
setDefaultOption('singleRun', true)
|
||||
setDefaultOption('autoWatch', true)
|
||||
taskList.push('tests:travis')
|
||||
when "server" then do ->
|
||||
setDefaultOption('singleRun', false)
|
||||
setDefaultOption('autoWatch', false)
|
||||
grunt.option('browsers', [])
|
||||
taskList.push('tests:local')
|
||||
when "headless" then do ->
|
||||
setDefaultOption('singleRun', true)
|
||||
setDefaultOption('autoWatch', true)
|
||||
#grunt.option('autoWatch') || true
|
||||
#grunt.option('singleRun') || true
|
||||
grunt.option('browsers', ['PhantomJS'])
|
||||
taskList.push('tests:local')
|
||||
when "ie" then do ->
|
||||
setDefaultOption('singleRun', true)
|
||||
setDefaultOption('autoWatch', true)
|
||||
#grunt.option('autoWatch') || true
|
||||
#grunt.option('singleRun') || true
|
||||
taskList.push('tests:local')
|
||||
grunt.option('browsers', [
|
||||
'IE7 - WinXP',
|
||||
'IE8 - WinXP',
|
||||
'IE9 - Win7',
|
||||
'IE10 - Win7',
|
||||
'IE11 - Win7'
|
||||
])
|
||||
when "ios" then do ->
|
||||
setDefaultOption('singleRun', true)
|
||||
setDefaultOption('autoWatch', true)
|
||||
grunt.option('browsers', ['iOS'])
|
||||
taskList.push('tests:local')
|
||||
when "all" then do ->
|
||||
setDefaultOption('singleRun', true)
|
||||
setDefaultOption('autoWatch', true)
|
||||
grunt.option('browsers', [
|
||||
'PhantomJS',
|
||||
'Firefox',
|
||||
'Chrome',
|
||||
'Safari',
|
||||
'Opera',
|
||||
'IE7 - WinXP',
|
||||
'IE8 - WinXP',
|
||||
'IE9 - Win7',
|
||||
'IE10 - Win7',
|
||||
'IE11 - Win7'
|
||||
])
|
||||
taskList.push('tests:local')
|
||||
else do ->
|
||||
if (test_type?)
|
||||
setDefaultOption('singleRun', true)
|
||||
setDefaultOption('autoWatch', true)
|
||||
if (',' in test_type)
|
||||
tests = test_type.split(',')
|
||||
grunt.option('browsers', tests)
|
||||
else
|
||||
grunt.option('browsers', [test_type])
|
||||
else
|
||||
grunt.option('browsers') || ['Chrome']
|
||||
taskList.push('tests:local')
|
||||
|
||||
grunt.task.run(taskList)
|
||||
|
||||
grunt.registerTask 'travis', 'Test with Travis CI', ['check_pull_req', 'dev', 'test:travis']
|
||||
|
||||
grunt.registerTask 'dev', 'Prepare code for testing', ['clean', 'bower', 'build', 'copy:test']
|
||||
|
||||
grunt.registerTask 'build', 'Build from latest source', ['jshint:source', 'jshint:tests', 'concat', 'minify', 'usebanner:allhead', 'usebanner:allfoot', 'copy:images']
|
||||
grunt.registerTask 'build_stripped', 'Build from latest source w/ test artifacts stripped out', ['concat', 'strip_code:build', 'minify', 'usebanner:allhead', 'usebanner:allfoot', 'copy:images']
|
||||
|
||||
grunt.registerTask 'package', 'Build a zipped distribution-worthy version', ['build_stripped', 'copy:dist', 'shell:version_dist_templates', 'compress:jquery', 'compress:jqueryS3', 'compress:jqueryAzure', 'compress:core', 'compress:coreS3', 'compress:coreAzure' ]
|
||||
|
||||
grunt.registerTask 'custom', 'Build a custom version', (modules) ->
|
||||
util = require './lib/grunt/utils'
|
||||
dest = customBuildDest
|
||||
if (modules?)
|
||||
util.build.call util, dest, modules.split(',')
|
||||
else
|
||||
util.build.call util, dest, []
|
||||
grunt.task.run(['uglify:custom', 'cssmin:custom', 'strip_code:custom', 'shell:version_custom_templates', 'usebanner:customhead', 'usebanner:customfoot', 'compress:custom', 'build_details'])
|
||||
|
||||
grunt.registerTask 'default', 'Default task: clean, bower, lint, build, & test', ['package']
|
||||
|
||||
grunt.registerTask "server", ["nodestatic"]
|
@ -1,676 +0,0 @@
|
||||
Fine Uploader is licensed under GNU GPL version 3:
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
@ -1,28 +0,0 @@
|
||||
[![Fine Uploader](http://fineuploader.com/img/FineUploader_logo.png)](http://fineuploader.com/)
|
||||
|
||||
Version: 5.0.3
|
||||
|
||||
[![Build Status](https://travis-ci.org/Widen/fine-uploader.png?branch=master)](https://travis-ci.org/Widen/fine-uploader) | [![Semver badge](http://calm-shore-6115.herokuapp.com/?label=SemVer&value=2.0.0&color=green)](http://semver.org/spec/v2.0.0.html)
|
||||
|
||||
[**Download**](http://fineuploader.com/downloads.html) |
|
||||
[**Documentation**](http://docs.fineuploader.com) |
|
||||
[**Examples**](http://fineuploader.com/demos) |
|
||||
[**Support**](http://fineuploader.com/support.html) |
|
||||
[**Blog**](http://blog.fineuploader.com/) |
|
||||
[**Changelog**](http://blog.fineuploader.com/category/changelog/)
|
||||
|
||||
---
|
||||
|
||||
Fine Uploader aims to make file-uploading on the web possible in every browser and mobile device. It is **cross-browser**, **dependency-free**, and **100% Javascript**.
|
||||
|
||||
FineUploader is simple to use. You only need to include one JavaScript file. There are absolutely no other dependencies.
|
||||
For more information, please see the [**documentation**](http://docs.fineuploader.com).
|
||||
|
||||
### License ###
|
||||
This plugin is open sourced under GNU GPL v3 or you may purchase a Widen Commerical license to release you from the terms of
|
||||
GPL v3. Please see the [downloads page](http://fineuploader.com/downloads.html) for details. For open-source users (those
|
||||
comfortable with the GPL v3 license) please see the [documentation](http://docs.fineuploader.com) for information on building
|
||||
your own version-stamped copy of the library.
|
||||
|
||||
|
||||
*Fine Uploader is an open-source component of [Widen Enterprises, Inc.](http://www.widen.com/)*
|
@ -1,30 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<web-app xmlns="http://java.sun.com/xml/ns/javaee"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee
|
||||
http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
|
||||
version="2.5">
|
||||
|
||||
|
||||
<servlet>
|
||||
<servlet-name>UploadReceiver</servlet-name>
|
||||
<servlet-class>fineuploader.UploadReceiver</servlet-class>
|
||||
</servlet>
|
||||
|
||||
<servlet>
|
||||
<servlet-name>S3Uploads</servlet-name>
|
||||
<servlet-class>fineuploader.S3Uploads</servlet-class>
|
||||
</servlet>
|
||||
|
||||
<servlet-mapping>
|
||||
<servlet-name>UploadReceiver</servlet-name>
|
||||
<url-pattern>/upload/receiver/*</url-pattern>
|
||||
</servlet-mapping>
|
||||
|
||||
<servlet-mapping>
|
||||
<servlet-name>S3Uploads</servlet-name>
|
||||
<url-pattern>/upload/s3/signature</url-pattern>
|
||||
<url-pattern>/upload/s3/files/*</url-pattern>
|
||||
<url-pattern>/upload/s3/success</url-pattern>
|
||||
</servlet-mapping>
|
||||
</web-app>
|
@ -1,18 +0,0 @@
|
||||
{
|
||||
"name": "fine-uploader",
|
||||
"version": "5.0.3",
|
||||
"devDependencies": {
|
||||
"jquery": "1.10.0",
|
||||
"purl": "https://github.com/allmarkedup/purl.git#~2.3.1",
|
||||
"jquery.simulate": "https://github.com/jquery/jquery-simulate.git",
|
||||
"json2": "latest",
|
||||
"mocha": "~1.11.0",
|
||||
"assert": "https://github.com/Jxck/assert.git"
|
||||
},
|
||||
"exportsOverride": {
|
||||
"mocha": {
|
||||
"js": "mocha.js",
|
||||
"css": "mocha.css"
|
||||
}
|
||||
}
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
Do not add files from this directory into your project. Please visit the
|
||||
downloads page for zips that contain combined and version-stamped javascript
|
||||
and css files, along with all required resources.
|
||||
|
||||
# Download
|
||||
To download a pre-packaged version of Fine Uploader visit: http://fineuploader.com/downloads.html
|
||||
|
||||
# Build
|
||||
For instructions on building your own packaged version of Fine Uploader visit: http://docs.fineuploader.com/contributing.htm
|
||||
|
Before Width: | Height: | Size: 145 B |
@ -1,194 +0,0 @@
|
||||
.qq-uploader {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
}
|
||||
.qq-upload-button {
|
||||
display: block;
|
||||
width: 105px;
|
||||
padding: 7px 0;
|
||||
text-align: center;
|
||||
background: #880000;
|
||||
border-bottom: 1px solid #DDD;
|
||||
color: #FFF;
|
||||
}
|
||||
.qq-upload-button-hover {
|
||||
background: #CC0000;
|
||||
}
|
||||
.qq-upload-button-focus {
|
||||
outline: 1px dotted #000000;
|
||||
}
|
||||
.qq-upload-drop-area, .qq-upload-extra-drop-area {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
min-height: 30px;
|
||||
z-index: 2;
|
||||
background: #FF9797;
|
||||
text-align: center;
|
||||
}
|
||||
.qq-upload-drop-area span {
|
||||
display: block;
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
width: 100%;
|
||||
margin-top: -8px;
|
||||
font-size: 16px;
|
||||
}
|
||||
.qq-upload-extra-drop-area {
|
||||
position: relative;
|
||||
margin-top: 50px;
|
||||
font-size: 16px;
|
||||
padding-top: 30px;
|
||||
height: 20px;
|
||||
min-height: 40px;
|
||||
}
|
||||
.qq-upload-drop-area-active {
|
||||
background: #FF7171;
|
||||
}
|
||||
.qq-upload-list {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
}
|
||||
.qq-upload-list li {
|
||||
margin: 0;
|
||||
padding: 9px;
|
||||
line-height: 15px;
|
||||
font-size: 16px;
|
||||
background-color: #FFF0BD;
|
||||
}
|
||||
.qq-upload-file, .qq-upload-spinner, .qq-upload-size,
|
||||
.qq-upload-cancel, .qq-upload-retry, .qq-upload-failed-text,
|
||||
.qq-upload-delete, .qq-upload-pause, .qq-upload-continue {
|
||||
margin-right: 12px;
|
||||
display: inline;
|
||||
}
|
||||
.qq-upload-file {
|
||||
}
|
||||
.qq-upload-spinner {
|
||||
display: inline-block;
|
||||
background: url("loading.gif");
|
||||
width: 15px;
|
||||
height: 15px;
|
||||
vertical-align: text-bottom;
|
||||
}
|
||||
.qq-drop-processing {
|
||||
display: block;
|
||||
}
|
||||
.qq-drop-processing-spinner {
|
||||
display: inline-block;
|
||||
background: url("processing.gif");
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
vertical-align: text-bottom;
|
||||
}
|
||||
|
||||
.qq-upload-delete, .qq-upload-pause, .qq-upload-continue {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.qq-upload-retry, .qq-upload-delete, .qq-upload-cancel,
|
||||
.qq-upload-pause, .qq-upload-continue {
|
||||
color: #000000;
|
||||
}
|
||||
|
||||
.qq-upload-size, .qq-upload-cancel, .qq-upload-retry,
|
||||
.qq-upload-delete, .qq-upload-pause, .qq-upload-continue {
|
||||
font-size: 12px;
|
||||
font-weight: normal;
|
||||
}
|
||||
.qq-upload-failed-text {
|
||||
display: none;
|
||||
font-style: italic;
|
||||
font-weight: bold;
|
||||
}
|
||||
.qq-upload-failed-icon {
|
||||
display:none;
|
||||
width:15px;
|
||||
height:15px;
|
||||
vertical-align:text-bottom;
|
||||
}
|
||||
.qq-upload-fail .qq-upload-failed-text {
|
||||
display: inline;
|
||||
}
|
||||
.qq-upload-retrying .qq-upload-failed-text {
|
||||
display: inline;
|
||||
color: #D60000;
|
||||
}
|
||||
.qq-upload-list li.qq-upload-success {
|
||||
background-color: #5DA30C;
|
||||
color: #FFFFFF;
|
||||
}
|
||||
.qq-upload-list li.qq-upload-fail {
|
||||
background-color: #D60000;
|
||||
color: #FFFFFF;
|
||||
}
|
||||
.qq-progress-bar {
|
||||
display: block;
|
||||
background: -moz-linear-gradient(top, rgba(30,87,153,1) 0%, rgba(41,137,216,1) 50%, rgba(32,124,202,1) 51%, rgba(125,185,232,1) 100%); /* FF3.6+ */
|
||||
background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,rgba(30,87,153,1)), color-stop(50%,rgba(41,137,216,1)), color-stop(51%,rgba(32,124,202,1)), color-stop(100%,rgba(125,185,232,1))); /* Chrome,Safari4+ */
|
||||
background: -webkit-linear-gradient(top, rgba(30,87,153,1) 0%,rgba(41,137,216,1) 50%,rgba(32,124,202,1) 51%,rgba(125,185,232,1) 100%); /* Chrome10+,Safari5.1+ */
|
||||
background: -o-linear-gradient(top, rgba(30,87,153,1) 0%,rgba(41,137,216,1) 50%,rgba(32,124,202,1) 51%,rgba(125,185,232,1) 100%); /* Opera 11.10+ */
|
||||
background: -ms-linear-gradient(top, rgba(30,87,153,1) 0%,rgba(41,137,216,1) 50%,rgba(32,124,202,1) 51%,rgba(125,185,232,1) 100%); /* IE10+ */
|
||||
background: linear-gradient(to bottom, rgba(30,87,153,1) 0%,rgba(41,137,216,1) 50%,rgba(32,124,202,1) 51%,rgba(125,185,232,1) 100%); /* W3C */
|
||||
width: 0%;
|
||||
height: 15px;
|
||||
border-radius: 6px;
|
||||
margin-bottom: 3px;
|
||||
}
|
||||
|
||||
.qq-total-progress-bar {
|
||||
height: 25px;
|
||||
border-radius: 9px;
|
||||
}
|
||||
|
||||
.qq-total-progress-bar-container {
|
||||
margin: 9px;
|
||||
}
|
||||
|
||||
INPUT.qq-edit-filename {
|
||||
position: absolute;
|
||||
opacity: 0;
|
||||
filter: alpha(opacity=0);
|
||||
z-index: -1;
|
||||
-ms-filter: "progid:DXImageTransform.Microsoft.Alpha(Opacity=0)";
|
||||
}
|
||||
|
||||
.qq-upload-file.qq-editable {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.qq-edit-filename-icon.qq-editable {
|
||||
display: inline-block;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
INPUT.qq-edit-filename.qq-editing {
|
||||
position: static;
|
||||
margin-top: -5px;
|
||||
margin-right: 10px;
|
||||
margin-bottom: -5px;
|
||||
|
||||
opacity: 1;
|
||||
filter: alpha(opacity=100);
|
||||
-ms-filter: "progid:DXImageTransform.Microsoft.Alpha(Opacity=100)";
|
||||
}
|
||||
|
||||
.qq-edit-filename-icon {
|
||||
display: none;
|
||||
background: url("edit.gif");
|
||||
width: 15px;
|
||||
height: 15px;
|
||||
vertical-align: text-bottom;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
INPUT.qq-edit-filename.qq-editing ~ .qq-upload-cancel {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.qq-hide {
|
||||
display: none;
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
<!--
|
||||
This is a document you can use when you start building the page that will contain
|
||||
one or more Fine Uploader UI instances. A default template that can be customized
|
||||
is located in the text/template script tag below.
|
||||
|
||||
Please see http://docs.fineuploader.com/features/styling.html for information
|
||||
on how to customize this default template.
|
||||
-->
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<link href="../../custom.fineuploader-{VERSION}.css" rel="stylesheet">
|
||||
<script src="../../custom.fineuploader-{VERSION}.js"></script>
|
||||
<script type="text/template" id="qq-template">
|
||||
<div class="qq-uploader-selector qq-uploader">
|
||||
<div class="qq-total-progress-bar-container-selector qq-total-progress-bar-container">
|
||||
<div class="qq-total-progress-bar-selector qq-progress-bar qq-total-progress-bar"></div>
|
||||
</div>
|
||||
<div class="qq-upload-drop-area-selector qq-upload-drop-area" qq-hide-dropzone>
|
||||
<span>Drop files here to upload</span>
|
||||
</div>
|
||||
<div class="qq-upload-button-selector qq-upload-button">
|
||||
<div>Upload a file</div>
|
||||
</div>
|
||||
<span class="qq-drop-processing-selector qq-drop-processing">
|
||||
<span>Processing dropped files...</span>
|
||||
<span class="qq-drop-processing-spinner-selector qq-drop-processing-spinner"></span>
|
||||
</span>
|
||||
<ul class="qq-upload-list-selector qq-upload-list">
|
||||
<li>
|
||||
<div class="qq-progress-bar-container-selector">
|
||||
<div class="qq-progress-bar-selector qq-progress-bar"></div>
|
||||
</div>
|
||||
<span class="qq-upload-spinner-selector qq-upload-spinner"></span>
|
||||
<span class="qq-edit-filename-icon-selector qq-edit-filename-icon"></span>
|
||||
<span class="qq-upload-file-selector qq-upload-file"></span>
|
||||
<input class="qq-edit-filename-selector qq-edit-filename" tabindex="0" type="text">
|
||||
<span class="qq-upload-size-selector qq-upload-size"></span>
|
||||
<a class="qq-upload-cancel-selector qq-upload-cancel" href="#">Cancel</a>
|
||||
<a class="qq-upload-retry-selector qq-upload-retry" href="#">Retry</a>
|
||||
<a class="qq-upload-delete-selector qq-upload-delete" href="#">Delete</a>
|
||||
<span class="qq-upload-status-text-selector qq-upload-status-text"></span>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</script>
|
||||
|
||||
<title>Fine Uploader default UI</title>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
|
@ -1,59 +0,0 @@
|
||||
<!--
|
||||
This is a document you can use when you start building the page that will contain
|
||||
one or more Fine Uploader UI instances. A default template that can be customized
|
||||
is located in the text/template script tag below. This template enabled the thumbnail/preview
|
||||
feature.
|
||||
|
||||
Please see http://docs.fineuploader.com/features/styling.html for information
|
||||
on how to customize this default template.
|
||||
-->
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<link href="../../custom.fineuploader-{VERSION}.css" rel="stylesheet">
|
||||
<script src="../../custom.fineuploader-{VERSION}.js"></script>
|
||||
<script type="text/template" id="qq-simple-thumbnails-template">
|
||||
<div class="qq-uploader-selector qq-uploader">
|
||||
<div class="qq-total-progress-bar-container-selector qq-total-progress-bar-container">
|
||||
<div class="qq-total-progress-bar-selector qq-progress-bar qq-total-progress-bar"></div>
|
||||
</div>
|
||||
<div class="qq-upload-drop-area-selector qq-upload-drop-area" qq-hide-dropzone>
|
||||
<span>Drop files here to upload</span>
|
||||
</div>
|
||||
<div class="qq-upload-button-selector qq-upload-button">
|
||||
<div>Upload a file</div>
|
||||
</div>
|
||||
<span class="qq-drop-processing-selector qq-drop-processing">
|
||||
<span>Processing dropped files...</span>
|
||||
<span class="qq-drop-processing-spinner-selector qq-drop-processing-spinner"></span>
|
||||
</span>
|
||||
<ul class="qq-upload-list-selector qq-upload-list">
|
||||
<li>
|
||||
<div class="qq-progress-bar-container-selector">
|
||||
<div class="qq-progress-bar-selector qq-progress-bar"></div>
|
||||
</div>
|
||||
<span class="qq-upload-spinner-selector qq-upload-spinner"></span>
|
||||
<img class="qq-thumbnail-selector" qq-max-size="100" qq-server-scale>
|
||||
<span class="qq-edit-filename-icon-selector qq-edit-filename-icon"></span>
|
||||
<span class="qq-upload-file-selector qq-upload-file"></span>
|
||||
<input class="qq-edit-filename-selector qq-edit-filename" tabindex="0" type="text">
|
||||
<span class="qq-upload-size-selector qq-upload-size"></span>
|
||||
<a class="qq-upload-cancel-selector qq-upload-cancel" href="#">Cancel</a>
|
||||
<a class="qq-upload-retry-selector qq-upload-retry" href="#">Retry</a>
|
||||
<a class="qq-upload-delete-selector qq-upload-delete" href="#">Delete</a>
|
||||
<span class="qq-upload-status-text-selector qq-upload-status-text"></span>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</script>
|
||||
|
||||
<title>Fine Uploader default UI with thumbnails</title>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
|
@ -1,390 +0,0 @@
|
||||
/*globals qq, XDomainRequest*/
|
||||
/** Generic class for sending non-upload ajax requests and handling the associated responses **/
|
||||
qq.AjaxRequester = function (o) {
|
||||
"use strict";
|
||||
|
||||
var log, shouldParamsBeInQueryString,
|
||||
queue = [],
|
||||
requestData = {},
|
||||
options = {
|
||||
acceptHeader: null,
|
||||
validMethods: ["POST"],
|
||||
method: "POST",
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
maxConnections: 3,
|
||||
customHeaders: {},
|
||||
endpointStore: {},
|
||||
paramsStore: {},
|
||||
mandatedParams: {},
|
||||
allowXRequestedWithAndCacheControl: true,
|
||||
successfulResponseCodes: {
|
||||
"DELETE": [200, 202, 204],
|
||||
"POST": [200, 204],
|
||||
"GET": [200]
|
||||
},
|
||||
cors: {
|
||||
expected: false,
|
||||
sendCredentials: false
|
||||
},
|
||||
log: function (str, level) {},
|
||||
onSend: function (id) {},
|
||||
onComplete: function (id, xhrOrXdr, isError) {},
|
||||
onProgress: null
|
||||
};
|
||||
|
||||
qq.extend(options, o);
|
||||
log = options.log;
|
||||
|
||||
if (qq.indexOf(options.validMethods, options.method) < 0) {
|
||||
throw new Error("'" + options.method + "' is not a supported method for this type of request!");
|
||||
}
|
||||
|
||||
// [Simple methods](http://www.w3.org/TR/cors/#simple-method)
|
||||
// are defined by the W3C in the CORS spec as a list of methods that, in part,
|
||||
// make a CORS request eligible to be exempt from preflighting.
|
||||
function isSimpleMethod() {
|
||||
return qq.indexOf(["GET", "POST", "HEAD"], options.method) >= 0;
|
||||
}
|
||||
|
||||
// [Simple headers](http://www.w3.org/TR/cors/#simple-header)
|
||||
// are defined by the W3C in the CORS spec as a list of headers that, in part,
|
||||
// make a CORS request eligible to be exempt from preflighting.
|
||||
function containsNonSimpleHeaders(headers) {
|
||||
var containsNonSimple = false;
|
||||
|
||||
qq.each(containsNonSimple, function(idx, header) {
|
||||
if (qq.indexOf(["Accept", "Accept-Language", "Content-Language", "Content-Type"], header) < 0) {
|
||||
containsNonSimple = true;
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
return containsNonSimple;
|
||||
}
|
||||
|
||||
function isXdr(xhr) {
|
||||
//The `withCredentials` test is a commonly accepted way to determine if XHR supports CORS.
|
||||
return options.cors.expected && xhr.withCredentials === undefined;
|
||||
}
|
||||
|
||||
// Returns either a new `XMLHttpRequest` or `XDomainRequest` instance.
|
||||
function getCorsAjaxTransport() {
|
||||
var xhrOrXdr;
|
||||
|
||||
if (window.XMLHttpRequest || window.ActiveXObject) {
|
||||
xhrOrXdr = qq.createXhrInstance();
|
||||
|
||||
if (xhrOrXdr.withCredentials === undefined) {
|
||||
xhrOrXdr = new XDomainRequest();
|
||||
}
|
||||
}
|
||||
|
||||
return xhrOrXdr;
|
||||
}
|
||||
|
||||
// Returns either a new XHR/XDR instance, or an existing one for the associated `File` or `Blob`.
|
||||
function getXhrOrXdr(id, suppliedXhr) {
|
||||
var xhrOrXdr = requestData[id].xhr;
|
||||
|
||||
if (!xhrOrXdr) {
|
||||
if (suppliedXhr) {
|
||||
xhrOrXdr = suppliedXhr;
|
||||
}
|
||||
else {
|
||||
if (options.cors.expected) {
|
||||
xhrOrXdr = getCorsAjaxTransport();
|
||||
}
|
||||
else {
|
||||
xhrOrXdr = qq.createXhrInstance();
|
||||
}
|
||||
}
|
||||
|
||||
requestData[id].xhr = xhrOrXdr;
|
||||
}
|
||||
|
||||
return xhrOrXdr;
|
||||
}
|
||||
|
||||
// Removes element from queue, sends next request
|
||||
function dequeue(id) {
|
||||
var i = qq.indexOf(queue, id),
|
||||
max = options.maxConnections,
|
||||
nextId;
|
||||
|
||||
delete requestData[id];
|
||||
queue.splice(i, 1);
|
||||
|
||||
if (queue.length >= max && i < max) {
|
||||
nextId = queue[max - 1];
|
||||
sendRequest(nextId);
|
||||
}
|
||||
}
|
||||
|
||||
function onComplete(id, xdrError) {
|
||||
var xhr = getXhrOrXdr(id),
|
||||
method = options.method,
|
||||
isError = xdrError === true;
|
||||
|
||||
dequeue(id);
|
||||
|
||||
if (isError) {
|
||||
log(method + " request for " + id + " has failed", "error");
|
||||
}
|
||||
else if (!isXdr(xhr) && !isResponseSuccessful(xhr.status)) {
|
||||
isError = true;
|
||||
log(method + " request for " + id + " has failed - response code " + xhr.status, "error");
|
||||
}
|
||||
|
||||
options.onComplete(id, xhr, isError);
|
||||
}
|
||||
|
||||
function getParams(id) {
|
||||
var onDemandParams = requestData[id].additionalParams,
|
||||
mandatedParams = options.mandatedParams,
|
||||
params;
|
||||
|
||||
if (options.paramsStore.get) {
|
||||
params = options.paramsStore.get(id);
|
||||
}
|
||||
|
||||
if (onDemandParams) {
|
||||
qq.each(onDemandParams, function (name, val) {
|
||||
params = params || {};
|
||||
params[name] = val;
|
||||
});
|
||||
}
|
||||
|
||||
if (mandatedParams) {
|
||||
qq.each(mandatedParams, function (name, val) {
|
||||
params = params || {};
|
||||
params[name] = val;
|
||||
});
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
function sendRequest(id, opt_xhr) {
|
||||
var xhr = getXhrOrXdr(id, opt_xhr),
|
||||
method = options.method,
|
||||
params = getParams(id),
|
||||
payload = requestData[id].payload,
|
||||
url;
|
||||
|
||||
options.onSend(id);
|
||||
|
||||
url = createUrl(id, params);
|
||||
|
||||
// XDR and XHR status detection APIs differ a bit.
|
||||
if (isXdr(xhr)) {
|
||||
xhr.onload = getXdrLoadHandler(id);
|
||||
xhr.onerror = getXdrErrorHandler(id);
|
||||
}
|
||||
else {
|
||||
xhr.onreadystatechange = getXhrReadyStateChangeHandler(id);
|
||||
}
|
||||
|
||||
|
||||
registerForUploadProgress(id);
|
||||
|
||||
// The last parameter is assumed to be ignored if we are actually using `XDomainRequest`.
|
||||
xhr.open(method, url, true);
|
||||
|
||||
// Instruct the transport to send cookies along with the CORS request,
|
||||
// unless we are using `XDomainRequest`, which is not capable of this.
|
||||
if (options.cors.expected && options.cors.sendCredentials && !isXdr(xhr)) {
|
||||
xhr.withCredentials = true;
|
||||
}
|
||||
|
||||
setHeaders(id);
|
||||
|
||||
log("Sending " + method + " request for " + id);
|
||||
|
||||
if (payload) {
|
||||
xhr.send(payload);
|
||||
}
|
||||
else if (shouldParamsBeInQueryString || !params) {
|
||||
xhr.send();
|
||||
}
|
||||
else if (params && options.contentType && options.contentType.toLowerCase().indexOf("application/x-www-form-urlencoded") >= 0) {
|
||||
xhr.send(qq.obj2url(params, ""));
|
||||
}
|
||||
else if (params && options.contentType && options.contentType.toLowerCase().indexOf("application/json") >= 0) {
|
||||
xhr.send(JSON.stringify(params));
|
||||
}
|
||||
else {
|
||||
xhr.send(params);
|
||||
}
|
||||
|
||||
return xhr;
|
||||
}
|
||||
|
||||
function createUrl(id, params) {
|
||||
var endpoint = options.endpointStore.get(id),
|
||||
addToPath = requestData[id].addToPath;
|
||||
|
||||
/*jshint -W116,-W041 */
|
||||
if (addToPath != undefined) {
|
||||
endpoint += "/" + addToPath;
|
||||
}
|
||||
|
||||
if (shouldParamsBeInQueryString && params) {
|
||||
return qq.obj2url(params, endpoint);
|
||||
}
|
||||
else {
|
||||
return endpoint;
|
||||
}
|
||||
}
|
||||
|
||||
// Invoked by the UA to indicate a number of possible states that describe
|
||||
// a live `XMLHttpRequest` transport.
|
||||
function getXhrReadyStateChangeHandler(id) {
|
||||
return function () {
|
||||
if (getXhrOrXdr(id).readyState === 4) {
|
||||
onComplete(id);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function registerForUploadProgress(id) {
|
||||
var onProgress = options.onProgress;
|
||||
|
||||
if (onProgress) {
|
||||
getXhrOrXdr(id).upload.onprogress = function(e) {
|
||||
if (e.lengthComputable) {
|
||||
onProgress(id, e.loaded, e.total);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// This will be called by IE to indicate **success** for an associated
|
||||
// `XDomainRequest` transported request.
|
||||
function getXdrLoadHandler(id) {
|
||||
return function () {
|
||||
onComplete(id);
|
||||
};
|
||||
}
|
||||
|
||||
// This will be called by IE to indicate **failure** for an associated
|
||||
// `XDomainRequest` transported request.
|
||||
function getXdrErrorHandler(id) {
|
||||
return function () {
|
||||
onComplete(id, true);
|
||||
};
|
||||
}
|
||||
|
||||
function setHeaders(id) {
|
||||
var xhr = getXhrOrXdr(id),
|
||||
customHeaders = options.customHeaders,
|
||||
onDemandHeaders = requestData[id].additionalHeaders || {},
|
||||
method = options.method,
|
||||
allHeaders = {};
|
||||
|
||||
// If XDomainRequest is being used, we can't set headers, so just ignore this block.
|
||||
if (!isXdr(xhr)) {
|
||||
options.acceptHeader && xhr.setRequestHeader("Accept", options.acceptHeader);
|
||||
|
||||
// Only attempt to add X-Requested-With & Cache-Control if permitted
|
||||
if (options.allowXRequestedWithAndCacheControl) {
|
||||
// Do not add X-Requested-With & Cache-Control if this is a cross-origin request
|
||||
// OR the cross-origin request contains a non-simple method or header.
|
||||
// This is done to ensure a preflight is not triggered exclusively based on the
|
||||
// addition of these 2 non-simple headers.
|
||||
if (!options.cors.expected || (!isSimpleMethod() || containsNonSimpleHeaders(customHeaders))) {
|
||||
xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest");
|
||||
xhr.setRequestHeader("Cache-Control", "no-cache");
|
||||
}
|
||||
}
|
||||
|
||||
if (options.contentType && (method === "POST" || method === "PUT")) {
|
||||
xhr.setRequestHeader("Content-Type", options.contentType);
|
||||
}
|
||||
|
||||
qq.extend(allHeaders, qq.isFunction(customHeaders) ? customHeaders(id) : customHeaders);
|
||||
qq.extend(allHeaders, onDemandHeaders);
|
||||
|
||||
qq.each(allHeaders, function (name, val) {
|
||||
xhr.setRequestHeader(name, val);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function isResponseSuccessful(responseCode) {
|
||||
return qq.indexOf(options.successfulResponseCodes[options.method], responseCode) >= 0;
|
||||
}
|
||||
|
||||
function prepareToSend(id, opt_xhr, addToPath, additionalParams, additionalHeaders, payload) {
|
||||
requestData[id] = {
|
||||
addToPath: addToPath,
|
||||
additionalParams: additionalParams,
|
||||
additionalHeaders: additionalHeaders,
|
||||
payload: payload
|
||||
};
|
||||
|
||||
var len = queue.push(id);
|
||||
|
||||
// if too many active connections, wait...
|
||||
if (len <= options.maxConnections) {
|
||||
return sendRequest(id, opt_xhr);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
shouldParamsBeInQueryString = options.method === "GET" || options.method === "DELETE";
|
||||
|
||||
qq.extend(this, {
|
||||
// Start the process of sending the request. The ID refers to the file associated with the request.
|
||||
initTransport: function(id) {
|
||||
var path, params, headers, payload, cacheBuster;
|
||||
|
||||
return {
|
||||
// Optionally specify the end of the endpoint path for the request.
|
||||
withPath: function(appendToPath) {
|
||||
path = appendToPath;
|
||||
return this;
|
||||
},
|
||||
|
||||
// Optionally specify additional parameters to send along with the request.
|
||||
// These will be added to the query string for GET/DELETE requests or the payload
|
||||
// for POST/PUT requests. The Content-Type of the request will be used to determine
|
||||
// how these parameters should be formatted as well.
|
||||
withParams: function(additionalParams) {
|
||||
params = additionalParams;
|
||||
return this;
|
||||
},
|
||||
|
||||
// Optionally specify additional headers to send along with the request.
|
||||
withHeaders: function(additionalHeaders) {
|
||||
headers = additionalHeaders;
|
||||
return this;
|
||||
},
|
||||
|
||||
// Optionally specify a payload/body for the request.
|
||||
withPayload: function(thePayload) {
|
||||
payload = thePayload;
|
||||
return this;
|
||||
},
|
||||
|
||||
// Appends a cache buster (timestamp) to the request URL as a query parameter (only if GET or DELETE)
|
||||
withCacheBuster: function() {
|
||||
cacheBuster = true;
|
||||
return this;
|
||||
},
|
||||
|
||||
// Send the constructed request.
|
||||
send: function(opt_xhr) {
|
||||
if (cacheBuster && qq.indexOf(["GET", "DELETE"], options.method) >= 0) {
|
||||
params.qqtimestamp = new Date().getTime();
|
||||
}
|
||||
|
||||
return prepareToSend(id, opt_xhr, path, params, headers, payload);
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
canceled: function(id) {
|
||||
dequeue(id);
|
||||
}
|
||||
});
|
||||
};
|
@ -1,236 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* Upload handler used by the upload to Azure module that depends on File API support, and, therefore, makes use of
|
||||
* `XMLHttpRequest` level 2 to upload `File`s and `Blob`s directly to Azure Blob Storage containers via the
|
||||
* associated Azure API.
|
||||
*
|
||||
* @param spec Options passed from the base handler
|
||||
* @param proxy Callbacks & methods used to query for or push out data/changes
|
||||
*/
|
||||
// TODO l18n for error messages returned to UI
|
||||
qq.azure.XhrUploadHandler = function(spec, proxy) {
|
||||
"use strict";
|
||||
|
||||
var handler = this,
|
||||
log = proxy.log,
|
||||
cors = spec.cors,
|
||||
endpointStore = spec.endpointStore,
|
||||
paramsStore = spec.paramsStore,
|
||||
signature = spec.signature,
|
||||
filenameParam = spec.filenameParam,
|
||||
minFileSizeForChunking = spec.chunking.minFileSize,
|
||||
deleteBlob = spec.deleteBlob,
|
||||
onGetBlobName = spec.onGetBlobName,
|
||||
getName = proxy.getName,
|
||||
getSize = proxy.getSize,
|
||||
|
||||
getBlobMetadata = function(id) {
|
||||
var params = paramsStore.get(id);
|
||||
params[filenameParam] = getName(id);
|
||||
return params;
|
||||
},
|
||||
|
||||
api = {
|
||||
putBlob: new qq.azure.PutBlob({
|
||||
getBlobMetadata: getBlobMetadata,
|
||||
log: log
|
||||
}),
|
||||
|
||||
putBlock: new qq.azure.PutBlock({
|
||||
log: log
|
||||
}),
|
||||
|
||||
putBlockList: new qq.azure.PutBlockList({
|
||||
getBlobMetadata: getBlobMetadata,
|
||||
log: log
|
||||
}),
|
||||
|
||||
getSasForPutBlobOrBlock: new qq.azure.GetSas({
|
||||
cors: cors,
|
||||
customHeaders: signature.customHeaders,
|
||||
endpointStore: {
|
||||
get: function() {
|
||||
return signature.endpoint;
|
||||
}
|
||||
},
|
||||
log: log,
|
||||
restRequestVerb: "PUT"
|
||||
})
|
||||
};
|
||||
|
||||
|
||||
function combineChunks(id) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
getSignedUrl(id).then(function(sasUri) {
|
||||
var mimeType = handler._getMimeType(id),
|
||||
blockIdEntries = handler._getPersistableData(id).blockIdEntries;
|
||||
|
||||
api.putBlockList.send(id, sasUri, blockIdEntries, mimeType, function(xhr) {
|
||||
handler._registerXhr(id, null, xhr, api.putBlockList);
|
||||
})
|
||||
.then(function(xhr) {
|
||||
log("Success combining chunks for id " + id);
|
||||
promise.success({}, xhr);
|
||||
}, function(xhr) {
|
||||
log("Attempt to combine chunks failed for id " + id, "error");
|
||||
handleFailure(xhr, promise);
|
||||
});
|
||||
|
||||
},
|
||||
promise.failure);
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
function determineBlobUrl(id) {
|
||||
var containerUrl = endpointStore.get(id),
|
||||
promise = new qq.Promise(),
|
||||
getBlobNameSuccess = function(blobName) {
|
||||
handler._setThirdPartyFileId(id, blobName);
|
||||
promise.success(containerUrl + "/" + blobName);
|
||||
},
|
||||
getBlobNameFailure = function(reason) {
|
||||
promise.failure(reason);
|
||||
};
|
||||
|
||||
onGetBlobName(id).then(getBlobNameSuccess, getBlobNameFailure);
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
function getSignedUrl(id, opt_chunkIdx) {
|
||||
// We may have multiple SAS requests in progress for the same file, so we must include the chunk idx
|
||||
// as part of the ID when communicating with the SAS ajax requester to avoid collisions.
|
||||
var getSasId = opt_chunkIdx == null ? id : id + "." + opt_chunkIdx,
|
||||
|
||||
promise = new qq.Promise(),
|
||||
getSasSuccess = function(sasUri) {
|
||||
log("GET SAS request succeeded.");
|
||||
promise.success(sasUri);
|
||||
},
|
||||
getSasFailure = function(reason, getSasXhr) {
|
||||
log("GET SAS request failed: " + reason, "error");
|
||||
promise.failure({error: "Problem communicating with local server"}, getSasXhr);
|
||||
},
|
||||
determineBlobUrlSuccess = function(blobUrl) {
|
||||
api.getSasForPutBlobOrBlock.request(getSasId, blobUrl).then(
|
||||
getSasSuccess,
|
||||
getSasFailure
|
||||
);
|
||||
},
|
||||
determineBlobUrlFailure = function(reason) {
|
||||
log(qq.format("Failed to determine blob name for ID {} - {}", id, reason), "error");
|
||||
promise.failure({error: reason});
|
||||
};
|
||||
|
||||
determineBlobUrl(id).then(determineBlobUrlSuccess, determineBlobUrlFailure);
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
function handleFailure(xhr, promise) {
|
||||
var azureError = qq.azure.util.parseAzureError(xhr.responseText, log),
|
||||
errorMsg = "Problem sending file to Azure";
|
||||
|
||||
promise.failure({error: errorMsg,
|
||||
azureError: azureError && azureError.message,
|
||||
reset: xhr.status === 403
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
uploadChunk: function(id, chunkIdx) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
getSignedUrl(id, chunkIdx).then(
|
||||
function(sasUri) {
|
||||
var xhr = handler._createXhr(id, chunkIdx),
|
||||
chunkData = handler._getChunkData(id, chunkIdx);
|
||||
|
||||
handler._registerProgressHandler(id, chunkIdx, chunkData.size);
|
||||
handler._registerXhr(id, chunkIdx, xhr, api.putBlock);
|
||||
|
||||
// We may have multiple put block requests in progress for the same file, so we must include the chunk idx
|
||||
// as part of the ID when communicating with the put block ajax requester to avoid collisions.
|
||||
api.putBlock.upload(id + "." + chunkIdx, xhr, sasUri, chunkIdx, chunkData.blob).then(
|
||||
function(blockIdEntry) {
|
||||
if (!handler._getPersistableData(id).blockIdEntries) {
|
||||
handler._getPersistableData(id).blockIdEntries = [];
|
||||
}
|
||||
|
||||
handler._getPersistableData(id).blockIdEntries.push(blockIdEntry);
|
||||
log("Put Block call succeeded for " + id);
|
||||
promise.success({}, xhr);
|
||||
},
|
||||
function() {
|
||||
log(qq.format("Put Block call failed for ID {} on part {}", id, chunkIdx), "error");
|
||||
handleFailure(xhr, promise);
|
||||
}
|
||||
);
|
||||
},
|
||||
promise.failure
|
||||
);
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
uploadFile: function(id) {
|
||||
var promise = new qq.Promise(),
|
||||
fileOrBlob = handler.getFile(id);
|
||||
|
||||
getSignedUrl(id).then(function(sasUri) {
|
||||
var xhr = handler._createXhr(id);
|
||||
|
||||
handler._registerProgressHandler(id);
|
||||
|
||||
api.putBlob.upload(id, xhr, sasUri, fileOrBlob).then(
|
||||
function() {
|
||||
log("Put Blob call succeeded for " + id);
|
||||
promise.success({}, xhr);
|
||||
},
|
||||
function() {
|
||||
log("Put Blob call failed for " + id, "error");
|
||||
handleFailure(xhr, promise);
|
||||
}
|
||||
);
|
||||
},
|
||||
promise.failure);
|
||||
|
||||
return promise;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
qq.extend(this, new qq.XhrUploadHandler({
|
||||
options: qq.extend({namespace: "azure"}, spec),
|
||||
proxy: qq.extend({getEndpoint: spec.endpointStore.get}, proxy)
|
||||
}
|
||||
));
|
||||
|
||||
qq.override(this, function(super_) {
|
||||
return {
|
||||
expunge: function(id) {
|
||||
var relatedToCancel = handler._wasCanceled(id),
|
||||
chunkingData = handler._getPersistableData(id),
|
||||
blockIdEntries = (chunkingData && chunkingData.blockIdEntries) || [];
|
||||
|
||||
if (relatedToCancel && blockIdEntries.length > 0) {
|
||||
deleteBlob(id);
|
||||
}
|
||||
|
||||
super_.expunge(id);
|
||||
},
|
||||
|
||||
finalizeChunks: function(id) {
|
||||
return combineChunks(id);
|
||||
},
|
||||
|
||||
_shouldChunkThisFile: function(id) {
|
||||
var maybePossible = super_._shouldChunkThisFile(id);
|
||||
return maybePossible && getSize(id) >= minFileSizeForChunking;
|
||||
}
|
||||
};
|
||||
});
|
||||
};
|
@ -1,78 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Sends a GET request to the integrator's server, which should return a Shared Access Signature URI used to
|
||||
* make a specific request on a Blob via the Azure REST API.
|
||||
*/
|
||||
qq.azure.GetSas = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
options = {
|
||||
cors: {
|
||||
expected: false,
|
||||
sendCredentials: false
|
||||
},
|
||||
customHeaders: {},
|
||||
restRequestVerb: "PUT",
|
||||
endpointStore: null,
|
||||
log: function(str, level) {}
|
||||
},
|
||||
requestPromises = {};
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
function sasResponseReceived(id, xhr, isError) {
|
||||
var promise = requestPromises[id];
|
||||
|
||||
if (isError) {
|
||||
promise.failure("Received response code " + xhr.status, xhr);
|
||||
}
|
||||
else {
|
||||
if (xhr.responseText.length) {
|
||||
promise.success(xhr.responseText);
|
||||
}
|
||||
else {
|
||||
promise.failure("Empty response.", xhr);
|
||||
}
|
||||
}
|
||||
|
||||
delete requestPromises[id];
|
||||
}
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
acceptHeader: "application/json",
|
||||
validMethods: ["GET"],
|
||||
method: "GET",
|
||||
successfulResponseCodes: {
|
||||
"GET": [200]
|
||||
},
|
||||
contentType: null,
|
||||
customHeaders: options.customHeaders,
|
||||
endpointStore: options.endpointStore,
|
||||
cors: options.cors,
|
||||
log: options.log,
|
||||
onComplete: sasResponseReceived
|
||||
}));
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
request: function(id, blobUri) {
|
||||
var requestPromise = new qq.Promise(),
|
||||
restVerb = options.restRequestVerb;
|
||||
|
||||
options.log(qq.format("Submitting GET SAS request for a {} REST request related to file ID {}.", restVerb, id));
|
||||
|
||||
requestPromises[id] = requestPromise;
|
||||
|
||||
requester.initTransport(id)
|
||||
.withParams({
|
||||
bloburi: blobUri,
|
||||
_method: restVerb
|
||||
})
|
||||
.withCacheBuster()
|
||||
.send();
|
||||
|
||||
return requestPromise;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,22 +0,0 @@
|
||||
/*globals jQuery*/
|
||||
/**
|
||||
* Simply an alias for the `fineUploader` plug-in wrapper, but hides the required `endpointType` option from the
|
||||
* integrator. I thought it may be confusing to convey to the integrator that, when using Fine Uploader in Azure mode,
|
||||
* you need to specify an `endpointType` with a value of "azure", and perhaps an `uploaderType` with a value of "basic" if
|
||||
* you want to use basic mode when uploading directly to Azure as well. So, you can use this plug-in alias and not worry
|
||||
* about the `endpointType` option at all.
|
||||
*/
|
||||
(function($) {
|
||||
"use strict";
|
||||
|
||||
$.fn.fineUploaderAzure = function(optionsOrCommand) {
|
||||
if (typeof optionsOrCommand === "object") {
|
||||
|
||||
// This option is used to tell the plug-in wrapper to instantiate the appropriate Azure-namespace modules.
|
||||
optionsOrCommand.endpointType = "azure";
|
||||
}
|
||||
|
||||
return $.fn.fineUploader.apply(this, arguments);
|
||||
};
|
||||
|
||||
}(jQuery));
|
@ -1,47 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Implements the Delete Blob Azure REST API call. http://msdn.microsoft.com/en-us/library/windowsazure/dd179413.aspx.
|
||||
*/
|
||||
qq.azure.DeleteBlob = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
method = "DELETE",
|
||||
options = {
|
||||
endpointStore: {},
|
||||
onDelete: function(id) {},
|
||||
onDeleteComplete: function(id, xhr, isError) {},
|
||||
log: function(str, level) {}
|
||||
};
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
validMethods: [method],
|
||||
method: method,
|
||||
successfulResponseCodes: (function() {
|
||||
var codes = {};
|
||||
codes[method] = [202];
|
||||
return codes;
|
||||
}()),
|
||||
contentType: null,
|
||||
endpointStore: options.endpointStore,
|
||||
allowXRequestedWithAndCacheControl: false,
|
||||
cors: {
|
||||
expected: true
|
||||
},
|
||||
log: options.log,
|
||||
onSend: options.onDelete,
|
||||
onComplete: options.onDeleteComplete
|
||||
}));
|
||||
|
||||
qq.extend(this, {
|
||||
method: method,
|
||||
send: function(id) {
|
||||
options.log("Submitting Delete Blob request for " + id);
|
||||
|
||||
return requester.initTransport(id)
|
||||
.send();
|
||||
}
|
||||
});
|
||||
};
|
@ -1,81 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Implements the Put Blob Azure REST API call. http://msdn.microsoft.com/en-us/library/windowsazure/dd179451.aspx.
|
||||
*/
|
||||
qq.azure.PutBlob = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
method = "PUT",
|
||||
options = {
|
||||
getBlobMetadata: function(id) {},
|
||||
log: function(str, level) {}
|
||||
},
|
||||
endpoints = {},
|
||||
promises = {},
|
||||
endpointHandler = {
|
||||
get: function(id) {
|
||||
return endpoints[id];
|
||||
}
|
||||
};
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
validMethods: [method],
|
||||
method: method,
|
||||
successfulResponseCodes: (function() {
|
||||
var codes = {};
|
||||
codes[method] = [201];
|
||||
return codes;
|
||||
}()),
|
||||
contentType: null,
|
||||
customHeaders: function(id) {
|
||||
var params = options.getBlobMetadata(id),
|
||||
headers = qq.azure.util.getParamsAsHeaders(params);
|
||||
|
||||
headers["x-ms-blob-type"] = "BlockBlob";
|
||||
|
||||
return headers;
|
||||
},
|
||||
endpointStore: endpointHandler,
|
||||
allowXRequestedWithAndCacheControl: false,
|
||||
cors: {
|
||||
expected: true
|
||||
},
|
||||
log: options.log,
|
||||
onComplete: function(id, xhr, isError) {
|
||||
var promise = promises[id];
|
||||
|
||||
delete endpoints[id];
|
||||
delete promises[id];
|
||||
|
||||
if (isError) {
|
||||
promise.failure();
|
||||
}
|
||||
else {
|
||||
promise.success();
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
method: method,
|
||||
upload: function(id, xhr, url, file) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
options.log("Submitting Put Blob request for " + id);
|
||||
|
||||
promises[id] = promise;
|
||||
endpoints[id] = url;
|
||||
|
||||
requester.initTransport(id)
|
||||
.withPayload(file)
|
||||
.withHeaders({"Content-Type": file.type})
|
||||
.send(xhr);
|
||||
|
||||
return promise;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,104 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Implements the Put Block List Azure REST API call. http://msdn.microsoft.com/en-us/library/windowsazure/dd179467.aspx.
|
||||
*/
|
||||
qq.azure.PutBlockList = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
method = "PUT",
|
||||
promises = {},
|
||||
options = {
|
||||
getBlobMetadata: function(id) {},
|
||||
log: function(str, level) {}
|
||||
},
|
||||
endpoints = {},
|
||||
endpointHandler = {
|
||||
get: function(id) {
|
||||
return endpoints[id];
|
||||
}
|
||||
};
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
validMethods: [method],
|
||||
method: method,
|
||||
successfulResponseCodes: (function() {
|
||||
var codes = {};
|
||||
codes[method] = [201];
|
||||
return codes;
|
||||
}()),
|
||||
customHeaders: function(id) {
|
||||
var params = options.getBlobMetadata(id);
|
||||
|
||||
return qq.azure.util.getParamsAsHeaders(params);
|
||||
},
|
||||
contentType: "text/plain",
|
||||
endpointStore: endpointHandler,
|
||||
allowXRequestedWithAndCacheControl: false,
|
||||
cors: {
|
||||
expected: true
|
||||
},
|
||||
log: options.log,
|
||||
onSend: function() {},
|
||||
onComplete: function(id, xhr, isError) {
|
||||
var promise = promises[id];
|
||||
|
||||
delete endpoints[id];
|
||||
delete promises[id];
|
||||
|
||||
if (isError) {
|
||||
promise.failure(xhr);
|
||||
}
|
||||
else {
|
||||
promise.success(xhr);
|
||||
}
|
||||
|
||||
}
|
||||
}));
|
||||
|
||||
function createRequestBody(blockIdEntries) {
|
||||
var doc = document.implementation.createDocument(null, "BlockList", null);
|
||||
|
||||
// If we don't sort the block ID entries by part number, the file will be combined incorrectly by Azure
|
||||
blockIdEntries.sort(function(a, b) {
|
||||
return a.part - b.part;
|
||||
});
|
||||
|
||||
// Construct an XML document for each pair of etag/part values that correspond to part uploads.
|
||||
qq.each(blockIdEntries, function(idx, blockIdEntry) {
|
||||
var latestEl = doc.createElement("Latest"),
|
||||
latestTextEl = doc.createTextNode(blockIdEntry.id);
|
||||
|
||||
latestEl.appendChild(latestTextEl);
|
||||
qq(doc).children()[0].appendChild(latestEl);
|
||||
});
|
||||
|
||||
// Turn the resulting XML document into a string fit for transport.
|
||||
return new XMLSerializer().serializeToString(doc);
|
||||
}
|
||||
|
||||
qq.extend(this, {
|
||||
method: method,
|
||||
send: function(id, sasUri, blockIdEntries, fileMimeType, registerXhrCallback) {
|
||||
var promise = new qq.Promise(),
|
||||
blockIdsXml = createRequestBody(blockIdEntries),
|
||||
xhr;
|
||||
|
||||
promises[id] = promise;
|
||||
|
||||
options.log(qq.format("Submitting Put Block List request for {}", id));
|
||||
|
||||
endpoints[id] = qq.format("{}&comp=blocklist", sasUri);
|
||||
|
||||
xhr = requester.initTransport(id)
|
||||
.withPayload(blockIdsXml)
|
||||
.withHeaders({"x-ms-blob-content-type": fileMimeType})
|
||||
.send();
|
||||
registerXhrCallback(xhr);
|
||||
|
||||
return promise;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,84 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Implements the Put Block Azure REST API call. http://msdn.microsoft.com/en-us/library/windowsazure/dd135726.aspx.
|
||||
*/
|
||||
qq.azure.PutBlock = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
method = "PUT",
|
||||
blockIdEntries = {},
|
||||
promises = {},
|
||||
options = {
|
||||
log: function(str, level) {}
|
||||
},
|
||||
endpoints = {},
|
||||
endpointHandler = {
|
||||
get: function(id) {
|
||||
return endpoints[id];
|
||||
}
|
||||
};
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
validMethods: [method],
|
||||
method: method,
|
||||
successfulResponseCodes: (function() {
|
||||
var codes = {};
|
||||
codes[method] = [201];
|
||||
return codes;
|
||||
}()),
|
||||
contentType: null,
|
||||
endpointStore: endpointHandler,
|
||||
allowXRequestedWithAndCacheControl: false,
|
||||
cors: {
|
||||
expected: true
|
||||
},
|
||||
log: options.log,
|
||||
onComplete: function(id, xhr, isError) {
|
||||
var promise = promises[id],
|
||||
blockIdEntry = blockIdEntries[id];
|
||||
|
||||
delete endpoints[id];
|
||||
delete promises[id];
|
||||
delete blockIdEntries[id];
|
||||
|
||||
if (isError) {
|
||||
promise.failure();
|
||||
}
|
||||
else {
|
||||
promise.success(blockIdEntry);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
function createBlockId(partNum) {
|
||||
var digits = 5,
|
||||
zeros = new Array(digits + 1).join("0"),
|
||||
paddedPartNum = (zeros + partNum).slice(-digits);
|
||||
|
||||
return btoa(paddedPartNum);
|
||||
}
|
||||
|
||||
qq.extend(this, {
|
||||
method: method,
|
||||
upload: function(id, xhr, sasUri, partNum, blob) {
|
||||
var promise = new qq.Promise(),
|
||||
blockId = createBlockId(partNum);
|
||||
|
||||
promises[id] = promise;
|
||||
|
||||
options.log(qq.format("Submitting Put Block request for {} = part {}", id, partNum));
|
||||
|
||||
endpoints[id] = qq.format("{}&comp=block&blockid={}", sasUri, encodeURIComponent(blockId));
|
||||
blockIdEntries[id] = {part: partNum, id: blockId};
|
||||
|
||||
requester.initTransport(id)
|
||||
.withPayload(blob)
|
||||
.send(xhr);
|
||||
|
||||
return promise;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,215 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* This defines FineUploaderBasic mode w/ support for uploading to Azure, which provides all the basic
|
||||
* functionality of Fine Uploader Basic as well as code to handle uploads directly to Azure.
|
||||
* Some inherited options and API methods have a special meaning in the context of the Azure uploader.
|
||||
*/
|
||||
(function(){
|
||||
"use strict";
|
||||
|
||||
qq.azure.FineUploaderBasic = function(o) {
|
||||
if (!qq.supportedFeatures.ajaxUploading) {
|
||||
throw new qq.Error("Uploading directly to Azure is not possible in this browser.");
|
||||
}
|
||||
|
||||
var options = {
|
||||
signature: {
|
||||
endpoint: null,
|
||||
|
||||
customHeaders: {}
|
||||
},
|
||||
|
||||
// 'uuid', 'filename', or a function which may be promissory
|
||||
blobProperties: {
|
||||
name: "uuid"
|
||||
},
|
||||
|
||||
uploadSuccess: {
|
||||
endpoint: null,
|
||||
|
||||
// In addition to the default params sent by Fine Uploader
|
||||
params: {},
|
||||
|
||||
customHeaders: {}
|
||||
},
|
||||
|
||||
chunking: {
|
||||
// If this is increased, Azure may respond with a 413
|
||||
partSize: 4000000,
|
||||
// Don't chunk files less than this size
|
||||
minFileSize: 4000001
|
||||
}
|
||||
};
|
||||
|
||||
// Replace any default options with user defined ones
|
||||
qq.extend(options, o, true);
|
||||
|
||||
// Call base module
|
||||
qq.FineUploaderBasic.call(this, options);
|
||||
|
||||
this._uploadSuccessParamsStore = this._createStore(this._options.uploadSuccess.params);
|
||||
|
||||
// This will hold callbacks for failed uploadSuccess requests that will be invoked on retry.
|
||||
// Indexed by file ID.
|
||||
this._failedSuccessRequestCallbacks = {};
|
||||
|
||||
// Holds blob names for file representations constructed from a session request.
|
||||
this._cannedBlobNames = {};
|
||||
};
|
||||
|
||||
// Inherit basic public & private API methods.
|
||||
qq.extend(qq.azure.FineUploaderBasic.prototype, qq.basePublicApi);
|
||||
qq.extend(qq.azure.FineUploaderBasic.prototype, qq.basePrivateApi);
|
||||
qq.extend(qq.azure.FineUploaderBasic.prototype, qq.nonTraditionalBasePublicApi);
|
||||
qq.extend(qq.azure.FineUploaderBasic.prototype, qq.nonTraditionalBasePrivateApi);
|
||||
|
||||
// Define public & private API methods for this module.
|
||||
qq.extend(qq.azure.FineUploaderBasic.prototype, {
|
||||
getBlobName: function(id) {
|
||||
/* jshint eqnull:true */
|
||||
if (this._cannedBlobNames[id] == null) {
|
||||
return this._handler.getThirdPartyFileId(id);
|
||||
}
|
||||
return this._cannedBlobNames[id];
|
||||
},
|
||||
|
||||
_getEndpointSpecificParams: function(id) {
|
||||
return {
|
||||
blob: this.getBlobName(id),
|
||||
uuid: this.getUuid(id),
|
||||
name: this.getName(id),
|
||||
container: this._endpointStore.get(id)
|
||||
};
|
||||
},
|
||||
|
||||
_createUploadHandler: function() {
|
||||
return qq.FineUploaderBasic.prototype._createUploadHandler.call(this,
|
||||
{
|
||||
signature: this._options.signature,
|
||||
onGetBlobName: qq.bind(this._determineBlobName, this),
|
||||
deleteBlob: qq.bind(this._deleteBlob, this, true)
|
||||
},
|
||||
"azure");
|
||||
},
|
||||
|
||||
_determineBlobName: function(id) {
|
||||
var self = this,
|
||||
blobNameOptionValue = this._options.blobProperties.name,
|
||||
uuid = this.getUuid(id),
|
||||
filename = this.getName(id),
|
||||
fileExtension = qq.getExtension(filename);
|
||||
|
||||
if (qq.isString(blobNameOptionValue)) {
|
||||
switch(blobNameOptionValue) {
|
||||
case "uuid":
|
||||
return new qq.Promise().success(uuid + "." + fileExtension);
|
||||
case "filename":
|
||||
return new qq.Promise().success(filename);
|
||||
default:
|
||||
return new qq.Promise.failure("Invalid blobName option value - " + blobNameOptionValue);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return blobNameOptionValue.call(this, id);
|
||||
}
|
||||
},
|
||||
|
||||
_addCannedFile: function(sessionData) {
|
||||
var id;
|
||||
|
||||
/* jshint eqnull:true */
|
||||
if (sessionData.blobName == null) {
|
||||
throw new qq.Error("Did not find blob name property in server session response. This is required!");
|
||||
}
|
||||
else {
|
||||
id = qq.FineUploaderBasic.prototype._addCannedFile.apply(this, arguments);
|
||||
this._cannedBlobNames[id] = sessionData.blobName;
|
||||
}
|
||||
|
||||
return id;
|
||||
},
|
||||
|
||||
_deleteBlob: function(relatedToCancel, id) {
|
||||
var self = this,
|
||||
deleteBlobSasUri = {},
|
||||
blobUriStore = {
|
||||
get: function(id) {
|
||||
return self._endpointStore.get(id) + "/" + self.getBlobName(id);
|
||||
}
|
||||
},
|
||||
deleteFileEndpointStore = {
|
||||
get: function(id) {
|
||||
return deleteBlobSasUri[id];
|
||||
}
|
||||
},
|
||||
getSasSuccess = function(id, sasUri) {
|
||||
deleteBlobSasUri[id] = sasUri;
|
||||
deleteBlob.send(id);
|
||||
},
|
||||
getSasFailure = function(id, reason, xhr) {
|
||||
if (relatedToCancel) {
|
||||
self.log("Will cancel upload, but cannot remove uncommitted parts from Azure due to issue retrieving SAS", "error");
|
||||
qq.FineUploaderBasic.prototype._onCancel.call(self, id, self.getName(id));
|
||||
}
|
||||
else {
|
||||
self._onDeleteComplete(id, xhr, true);
|
||||
self._options.callbacks.onDeleteComplete(id, xhr, true);
|
||||
}
|
||||
},
|
||||
deleteBlob = new qq.azure.DeleteBlob({
|
||||
endpointStore: deleteFileEndpointStore,
|
||||
log: qq.bind(self.log, self),
|
||||
onDelete: function(id) {
|
||||
self._onDelete(id);
|
||||
self._options.callbacks.onDelete(id);
|
||||
},
|
||||
onDeleteComplete: function(id, xhrOrXdr, isError) {
|
||||
delete deleteBlobSasUri[id];
|
||||
|
||||
if (isError) {
|
||||
if (relatedToCancel) {
|
||||
self.log("Will cancel upload, but failed to remove uncommitted parts from Azure.", "error");
|
||||
}
|
||||
else {
|
||||
qq.azure.util.parseAzureError(xhrOrXdr.responseText, qq.bind(self.log, self));
|
||||
}
|
||||
}
|
||||
|
||||
if (relatedToCancel) {
|
||||
qq.FineUploaderBasic.prototype._onCancel.call(self, id, self.getName(id));
|
||||
self.log("Deleted uncommitted blob chunks for " + id);
|
||||
}
|
||||
else {
|
||||
self._onDeleteComplete(id, xhrOrXdr, isError);
|
||||
self._options.callbacks.onDeleteComplete(id, xhrOrXdr, isError);
|
||||
}
|
||||
}
|
||||
}),
|
||||
getSas = new qq.azure.GetSas({
|
||||
cors: this._options.cors,
|
||||
endpointStore: {
|
||||
get: function() {
|
||||
return self._options.signature.endpoint;
|
||||
}
|
||||
},
|
||||
restRequestVerb: deleteBlob.method,
|
||||
log: qq.bind(self.log, self)
|
||||
});
|
||||
|
||||
|
||||
getSas.request(id, blobUriStore.get(id)).then(
|
||||
qq.bind(getSasSuccess, self, id),
|
||||
qq.bind(getSasFailure, self, id));
|
||||
},
|
||||
|
||||
_createDeleteHandler: function() {
|
||||
var self = this;
|
||||
|
||||
return {
|
||||
sendDelete: function(id, uuid) {
|
||||
self._deleteBlob(false, id);
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}());
|
@ -1,36 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* This defines FineUploader mode w/ support for uploading to Azure, which provides all the basic
|
||||
* functionality of Fine Uploader as well as code to handle uploads directly to Azure.
|
||||
* This module inherits all logic from UI & core mode and adds some UI-related logic
|
||||
* specific to the upload-to-Azure workflow. Some inherited options and API methods have a special meaning
|
||||
* in the context of the Azure uploader.
|
||||
*/
|
||||
(function(){
|
||||
"use strict";
|
||||
|
||||
qq.azure.FineUploader = function(o) {
|
||||
var options = {
|
||||
failedUploadTextDisplay: {
|
||||
mode: "custom"
|
||||
}
|
||||
};
|
||||
|
||||
// Replace any default options with user defined ones
|
||||
qq.extend(options, o, true);
|
||||
|
||||
// Inherit instance data from FineUploader, which should in turn inherit from azure.FineUploaderBasic.
|
||||
qq.FineUploader.call(this, options, "azure");
|
||||
};
|
||||
|
||||
// Inherit the API methods from FineUploaderBasicS3
|
||||
qq.extend(qq.azure.FineUploader.prototype, qq.azure.FineUploaderBasic.prototype);
|
||||
|
||||
// Inherit public and private API methods related to UI
|
||||
qq.extend(qq.azure.FineUploader.prototype, qq.uiPublicApi);
|
||||
qq.extend(qq.azure.FineUploader.prototype, qq.uiPrivateApi);
|
||||
|
||||
// Define public & private API methods for this module.
|
||||
qq.extend(qq.azure.FineUploader.prototype, {
|
||||
});
|
||||
}());
|
@ -1,55 +0,0 @@
|
||||
/*globals qq */
|
||||
qq.azure = qq.azure || {};
|
||||
qq.azure.util = qq.azure.util || (function() {
|
||||
"use strict";
|
||||
|
||||
return {
|
||||
AZURE_PARAM_PREFIX: "x-ms-meta-",
|
||||
|
||||
getParamsAsHeaders: function(params) {
|
||||
var headers = {};
|
||||
|
||||
qq.each(params, function(name, val) {
|
||||
var headerName = qq.azure.util.AZURE_PARAM_PREFIX + name;
|
||||
|
||||
if (qq.isFunction(val)) {
|
||||
headers[headerName] = encodeURIComponent(String(val()));
|
||||
}
|
||||
else if (qq.isObject(val)) {
|
||||
qq.extend(headers, qq.azure.util.getParamsAsHeaders(val));
|
||||
}
|
||||
else {
|
||||
headers[headerName] = encodeURIComponent(String(val));
|
||||
}
|
||||
});
|
||||
|
||||
return headers;
|
||||
},
|
||||
|
||||
parseAzureError: function(responseText, log) {
|
||||
var domParser = new DOMParser(),
|
||||
responseDoc = domParser.parseFromString(responseText, "application/xml"),
|
||||
errorTag = responseDoc.getElementsByTagName("Error")[0],
|
||||
errorDetails = {},
|
||||
codeTag, messageTag;
|
||||
|
||||
log("Received error response: " + responseText, "error");
|
||||
|
||||
if (errorTag) {
|
||||
messageTag = errorTag.getElementsByTagName("Message")[0];
|
||||
if (messageTag) {
|
||||
errorDetails.message = messageTag.textContent;
|
||||
}
|
||||
|
||||
codeTag = errorTag.getElementsByTagName("Code")[0];
|
||||
if (codeTag) {
|
||||
errorDetails.code = codeTag.textContent;
|
||||
}
|
||||
|
||||
log("Parsed Azure error: " + JSON.stringify(errorDetails), "error");
|
||||
|
||||
return errorDetails;
|
||||
}
|
||||
}
|
||||
};
|
||||
}());
|
@ -1,19 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Placeholder for a Blob that will be generated on-demand.
|
||||
*
|
||||
* @param referenceBlob Parent of the generated blob
|
||||
* @param onCreate Function to invoke when the blob must be created. Must be promissory.
|
||||
* @constructor
|
||||
*/
|
||||
qq.BlobProxy = function(referenceBlob, onCreate) {
|
||||
"use strict";
|
||||
|
||||
qq.extend(this, {
|
||||
referenceBlob: referenceBlob,
|
||||
|
||||
create: function() {
|
||||
return onCreate(referenceBlob);
|
||||
}
|
||||
});
|
||||
};
|
@ -1,169 +0,0 @@
|
||||
/*globals qq*/
|
||||
|
||||
/**
|
||||
* This module represents an upload or "Select File(s)" button. It's job is to embed an opaque `<input type="file">`
|
||||
* element as a child of a provided "container" element. This "container" element (`options.element`) is used to provide
|
||||
* a custom style for the `<input type="file">` element. The ability to change the style of the container element is also
|
||||
* provided here by adding CSS classes to the container on hover/focus.
|
||||
*
|
||||
* TODO Eliminate the mouseover and mouseout event handlers since the :hover CSS pseudo-class should now be
|
||||
* available on all supported browsers.
|
||||
*
|
||||
* @param o Options to override the default values
|
||||
*/
|
||||
qq.UploadButton = function(o) {
|
||||
"use strict";
|
||||
|
||||
|
||||
var disposeSupport = new qq.DisposeSupport(),
|
||||
|
||||
options = {
|
||||
// "Container" element
|
||||
element: null,
|
||||
|
||||
// If true adds `multiple` attribute to `<input type="file">`
|
||||
multiple: false,
|
||||
|
||||
// Corresponds to the `accept` attribute on the associated `<input type="file">`
|
||||
acceptFiles: null,
|
||||
|
||||
// A true value allows folders to be selected, if supported by the UA
|
||||
folders: false,
|
||||
|
||||
// `name` attribute of `<input type="file">`
|
||||
name: "qqfile",
|
||||
|
||||
// Called when the browser invokes the onchange handler on the `<input type="file">`
|
||||
onChange: function(input) {},
|
||||
|
||||
// **This option will be removed** in the future as the :hover CSS pseudo-class is available on all supported browsers
|
||||
hoverClass: "qq-upload-button-hover",
|
||||
|
||||
focusClass: "qq-upload-button-focus"
|
||||
},
|
||||
input, buttonId;
|
||||
|
||||
// Overrides any of the default option values with any option values passed in during construction.
|
||||
qq.extend(options, o);
|
||||
|
||||
buttonId = qq.getUniqueId();
|
||||
|
||||
// Embed an opaque `<input type="file">` element as a child of `options.element`.
|
||||
function createInput() {
|
||||
var input = document.createElement("input");
|
||||
|
||||
input.setAttribute(qq.UploadButton.BUTTON_ID_ATTR_NAME, buttonId);
|
||||
|
||||
if (options.multiple) {
|
||||
input.setAttribute("multiple", "");
|
||||
}
|
||||
|
||||
if (options.folders && qq.supportedFeatures.folderSelection) {
|
||||
// selecting directories is only possible in Chrome now, via a vendor-specific prefixed attribute
|
||||
input.setAttribute("webkitdirectory", "");
|
||||
}
|
||||
|
||||
if (options.acceptFiles) {
|
||||
input.setAttribute("accept", options.acceptFiles);
|
||||
}
|
||||
|
||||
input.setAttribute("type", "file");
|
||||
input.setAttribute("name", options.name);
|
||||
|
||||
qq(input).css({
|
||||
position: "absolute",
|
||||
// in Opera only 'browse' button
|
||||
// is clickable and it is located at
|
||||
// the right side of the input
|
||||
right: 0,
|
||||
top: 0,
|
||||
fontFamily: "Arial",
|
||||
// 4 persons reported this, the max values that worked for them were 243, 236, 236, 118
|
||||
fontSize: "118px",
|
||||
margin: 0,
|
||||
padding: 0,
|
||||
cursor: "pointer",
|
||||
opacity: 0
|
||||
});
|
||||
|
||||
options.element.appendChild(input);
|
||||
|
||||
disposeSupport.attach(input, "change", function(){
|
||||
options.onChange(input);
|
||||
});
|
||||
|
||||
// **These event handlers will be removed** in the future as the :hover CSS pseudo-class is available on all supported browsers
|
||||
disposeSupport.attach(input, "mouseover", function(){
|
||||
qq(options.element).addClass(options.hoverClass);
|
||||
});
|
||||
disposeSupport.attach(input, "mouseout", function(){
|
||||
qq(options.element).removeClass(options.hoverClass);
|
||||
});
|
||||
|
||||
disposeSupport.attach(input, "focus", function(){
|
||||
qq(options.element).addClass(options.focusClass);
|
||||
});
|
||||
disposeSupport.attach(input, "blur", function(){
|
||||
qq(options.element).removeClass(options.focusClass);
|
||||
});
|
||||
|
||||
// IE and Opera, unfortunately have 2 tab stops on file input
|
||||
// which is unacceptable in our case, disable keyboard access
|
||||
if (window.attachEvent) {
|
||||
// it is IE or Opera
|
||||
input.setAttribute("tabIndex", "-1");
|
||||
}
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
// Make button suitable container for input
|
||||
qq(options.element).css({
|
||||
position: "relative",
|
||||
overflow: "hidden",
|
||||
// Make sure browse button is in the right side in Internet Explorer
|
||||
direction: "ltr"
|
||||
});
|
||||
|
||||
input = createInput();
|
||||
|
||||
|
||||
// Exposed API
|
||||
qq.extend(this, {
|
||||
getInput: function() {
|
||||
return input;
|
||||
},
|
||||
|
||||
getButtonId: function() {
|
||||
return buttonId;
|
||||
},
|
||||
|
||||
setMultiple: function(isMultiple) {
|
||||
if (isMultiple !== options.multiple) {
|
||||
if (isMultiple) {
|
||||
input.setAttribute("multiple", "");
|
||||
}
|
||||
else {
|
||||
input.removeAttribute("multiple");
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
setAcceptFiles: function(acceptFiles) {
|
||||
if (acceptFiles !== options.acceptFiles) {
|
||||
input.setAttribute("accept", acceptFiles);
|
||||
}
|
||||
},
|
||||
|
||||
reset: function(){
|
||||
if (input.parentNode){
|
||||
qq(input).remove();
|
||||
}
|
||||
|
||||
qq(options.element).removeClass(options.focusClass);
|
||||
input = createInput();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
qq.UploadButton.BUTTON_ID_ATTR_NAME = "qq-button-id";
|
@ -1,74 +0,0 @@
|
||||
/*globals qq, XMLHttpRequest*/
|
||||
qq.DeleteFileAjaxRequester = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
options = {
|
||||
method: "DELETE",
|
||||
uuidParamName: "qquuid",
|
||||
endpointStore: {},
|
||||
maxConnections: 3,
|
||||
customHeaders: function(id) {return {};},
|
||||
paramsStore: {},
|
||||
demoMode: false,
|
||||
cors: {
|
||||
expected: false,
|
||||
sendCredentials: false
|
||||
},
|
||||
log: function(str, level) {},
|
||||
onDelete: function(id) {},
|
||||
onDeleteComplete: function(id, xhrOrXdr, isError) {}
|
||||
};
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
function getMandatedParams() {
|
||||
if (options.method.toUpperCase() === "POST") {
|
||||
return {
|
||||
"_method": "DELETE"
|
||||
};
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
acceptHeader: "application/json",
|
||||
validMethods: ["POST", "DELETE"],
|
||||
method: options.method,
|
||||
endpointStore: options.endpointStore,
|
||||
paramsStore: options.paramsStore,
|
||||
mandatedParams: getMandatedParams(),
|
||||
maxConnections: options.maxConnections,
|
||||
customHeaders: function(id) {
|
||||
return options.customHeaders.get(id);
|
||||
},
|
||||
demoMode: options.demoMode,
|
||||
log: options.log,
|
||||
onSend: options.onDelete,
|
||||
onComplete: options.onDeleteComplete,
|
||||
cors: options.cors
|
||||
}));
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
sendDelete: function(id, uuid, additionalMandatedParams) {
|
||||
var additionalOptions = additionalMandatedParams || {};
|
||||
|
||||
options.log("Submitting delete file request for " + id);
|
||||
|
||||
if (options.method === "DELETE") {
|
||||
requester.initTransport(id)
|
||||
.withPath(uuid)
|
||||
.withParams(additionalOptions)
|
||||
.send();
|
||||
}
|
||||
else {
|
||||
additionalOptions[options.uuidParamName] = uuid;
|
||||
requester.initTransport(id)
|
||||
.withParams(additionalOptions)
|
||||
.send();
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
@ -1,492 +0,0 @@
|
||||
/*globals qq, document, CustomEvent*/
|
||||
qq.DragAndDrop = function(o) {
|
||||
"use strict";
|
||||
|
||||
var options,
|
||||
HIDE_ZONES_EVENT_NAME = "qq-hidezones",
|
||||
HIDE_BEFORE_ENTER_ATTR = "qq-hide-dropzone",
|
||||
uploadDropZones = [],
|
||||
droppedFiles = [],
|
||||
disposeSupport = new qq.DisposeSupport();
|
||||
|
||||
options = {
|
||||
dropZoneElements: [],
|
||||
allowMultipleItems: true,
|
||||
classes: {
|
||||
dropActive: null
|
||||
},
|
||||
callbacks: new qq.DragAndDrop.callbacks()
|
||||
};
|
||||
|
||||
qq.extend(options, o, true);
|
||||
|
||||
function uploadDroppedFiles(files, uploadDropZone) {
|
||||
// We need to convert the `FileList` to an actual `Array` to avoid iteration issues
|
||||
var filesAsArray = Array.prototype.slice.call(files);
|
||||
|
||||
options.callbacks.dropLog("Grabbed " + files.length + " dropped files.");
|
||||
uploadDropZone.dropDisabled(false);
|
||||
options.callbacks.processingDroppedFilesComplete(filesAsArray, uploadDropZone.getElement());
|
||||
}
|
||||
|
||||
function traverseFileTree(entry) {
|
||||
var parseEntryPromise = new qq.Promise();
|
||||
|
||||
if (entry.isFile) {
|
||||
entry.file(function(file) {
|
||||
var name = entry.name,
|
||||
fullPath = entry.fullPath,
|
||||
indexOfNameInFullPath = fullPath.indexOf(name);
|
||||
|
||||
// remove file name from full path string
|
||||
fullPath = fullPath.substr(0, indexOfNameInFullPath);
|
||||
|
||||
// remove leading slash in full path string
|
||||
if (fullPath.charAt(0) === "/") {
|
||||
fullPath = fullPath.substr(1);
|
||||
}
|
||||
|
||||
file.qqPath = fullPath;
|
||||
droppedFiles.push(file);
|
||||
parseEntryPromise.success();
|
||||
},
|
||||
function(fileError) {
|
||||
options.callbacks.dropLog("Problem parsing '" + entry.fullPath + "'. FileError code " + fileError.code + ".", "error");
|
||||
parseEntryPromise.failure();
|
||||
});
|
||||
}
|
||||
else if (entry.isDirectory) {
|
||||
getFilesInDirectory(entry).then(
|
||||
function allEntriesRead(entries) {
|
||||
var entriesLeft = entries.length;
|
||||
|
||||
qq.each(entries, function(idx, entry) {
|
||||
traverseFileTree(entry).done(function() {
|
||||
entriesLeft-=1;
|
||||
|
||||
if (entriesLeft === 0) {
|
||||
parseEntryPromise.success();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (!entries.length) {
|
||||
parseEntryPromise.success();
|
||||
}
|
||||
},
|
||||
|
||||
function readFailure(fileError) {
|
||||
options.callbacks.dropLog("Problem parsing '" + entry.fullPath + "'. FileError code " + fileError.code + ".", "error");
|
||||
parseEntryPromise.failure();
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return parseEntryPromise;
|
||||
}
|
||||
|
||||
// Promissory. Guaranteed to read all files in the root of the passed directory.
|
||||
function getFilesInDirectory(entry, reader, accumEntries, existingPromise) {
|
||||
var promise = existingPromise || new qq.Promise(),
|
||||
dirReader = reader || entry.createReader();
|
||||
|
||||
dirReader.readEntries(
|
||||
function readSuccess(entries) {
|
||||
var newEntries = accumEntries ? accumEntries.concat(entries) : entries;
|
||||
|
||||
if (entries.length) {
|
||||
setTimeout(function() { // prevent stack oveflow, however unlikely
|
||||
getFilesInDirectory(entry, dirReader, newEntries, promise);
|
||||
}, 0);
|
||||
}
|
||||
else {
|
||||
promise.success(newEntries);
|
||||
}
|
||||
},
|
||||
|
||||
promise.failure
|
||||
);
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
function handleDataTransfer(dataTransfer, uploadDropZone) {
|
||||
var pendingFolderPromises = [],
|
||||
handleDataTransferPromise = new qq.Promise();
|
||||
|
||||
options.callbacks.processingDroppedFiles();
|
||||
uploadDropZone.dropDisabled(true);
|
||||
|
||||
if (dataTransfer.files.length > 1 && !options.allowMultipleItems) {
|
||||
options.callbacks.processingDroppedFilesComplete([]);
|
||||
options.callbacks.dropError("tooManyFilesError", "");
|
||||
uploadDropZone.dropDisabled(false);
|
||||
handleDataTransferPromise.failure();
|
||||
}
|
||||
else {
|
||||
droppedFiles = [];
|
||||
|
||||
if (qq.isFolderDropSupported(dataTransfer)) {
|
||||
qq.each(dataTransfer.items, function(idx, item) {
|
||||
var entry = item.webkitGetAsEntry();
|
||||
|
||||
if (entry) {
|
||||
//due to a bug in Chrome's File System API impl - #149735
|
||||
if (entry.isFile) {
|
||||
droppedFiles.push(item.getAsFile());
|
||||
}
|
||||
|
||||
else {
|
||||
pendingFolderPromises.push(traverseFileTree(entry).done(function() {
|
||||
pendingFolderPromises.pop();
|
||||
if (pendingFolderPromises.length === 0) {
|
||||
handleDataTransferPromise.success();
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
droppedFiles = dataTransfer.files;
|
||||
}
|
||||
|
||||
if (pendingFolderPromises.length === 0) {
|
||||
handleDataTransferPromise.success();
|
||||
}
|
||||
}
|
||||
|
||||
return handleDataTransferPromise;
|
||||
}
|
||||
|
||||
function setupDropzone(dropArea) {
|
||||
var dropZone = new qq.UploadDropZone({
|
||||
HIDE_ZONES_EVENT_NAME: HIDE_ZONES_EVENT_NAME,
|
||||
element: dropArea,
|
||||
onEnter: function(e){
|
||||
qq(dropArea).addClass(options.classes.dropActive);
|
||||
e.stopPropagation();
|
||||
},
|
||||
onLeaveNotDescendants: function(e){
|
||||
qq(dropArea).removeClass(options.classes.dropActive);
|
||||
},
|
||||
onDrop: function(e){
|
||||
handleDataTransfer(e.dataTransfer, dropZone).then(
|
||||
function() {
|
||||
uploadDroppedFiles(droppedFiles, dropZone);
|
||||
},
|
||||
function() {
|
||||
options.callbacks.dropLog("Drop event DataTransfer parsing failed. No files will be uploaded.", "error");
|
||||
}
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
disposeSupport.addDisposer(function() {
|
||||
dropZone.dispose();
|
||||
});
|
||||
|
||||
qq(dropArea).hasAttribute(HIDE_BEFORE_ENTER_ATTR) && qq(dropArea).hide();
|
||||
|
||||
uploadDropZones.push(dropZone);
|
||||
|
||||
return dropZone;
|
||||
}
|
||||
|
||||
function isFileDrag(dragEvent) {
|
||||
var fileDrag;
|
||||
|
||||
qq.each(dragEvent.dataTransfer.types, function(key, val) {
|
||||
if (val === "Files") {
|
||||
fileDrag = true;
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
return fileDrag;
|
||||
}
|
||||
|
||||
// Attempt to determine when the file has left the document. It is not always possible to detect this
|
||||
// in all cases, but it is generally possible in all browsers, with a few exceptions.
|
||||
//
|
||||
// Exceptions:
|
||||
// * IE10+ & Safari: We can't detect a file leaving the document if the Explorer window housing the file
|
||||
// overlays the browser window.
|
||||
// * IE10+: If the file is dragged out of the window too quickly, IE does not set the expected values of the
|
||||
// event's X & Y properties.
|
||||
function leavingDocumentOut(e) {
|
||||
if (qq.firefox()) {
|
||||
return !e.relatedTarget;
|
||||
}
|
||||
|
||||
if (qq.safari()) {
|
||||
return e.x < 0 || e.y < 0;
|
||||
}
|
||||
|
||||
return e.x === 0 && e.y === 0;
|
||||
}
|
||||
|
||||
function setupDragDrop() {
|
||||
var dropZones = options.dropZoneElements,
|
||||
|
||||
maybeHideDropZones = function() {
|
||||
setTimeout(function() {
|
||||
qq.each(dropZones, function(idx, dropZone) {
|
||||
qq(dropZone).hasAttribute(HIDE_BEFORE_ENTER_ATTR) && qq(dropZone).hide();
|
||||
qq(dropZone).removeClass(options.classes.dropActive);
|
||||
});
|
||||
}, 10);
|
||||
};
|
||||
|
||||
qq.each(dropZones, function(idx, dropZone) {
|
||||
var uploadDropZone = setupDropzone(dropZone);
|
||||
|
||||
// IE <= 9 does not support the File API used for drag+drop uploads
|
||||
if (dropZones.length && (!qq.ie() || qq.ie10())) {
|
||||
disposeSupport.attach(document, "dragenter", function(e) {
|
||||
if (!uploadDropZone.dropDisabled() && isFileDrag(e)) {
|
||||
qq.each(dropZones, function(idx, dropZone) {
|
||||
// We can't apply styles to non-HTMLElements, since they lack the `style` property
|
||||
if (dropZone instanceof HTMLElement) {
|
||||
qq(dropZone).css({display: "block"});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
disposeSupport.attach(document, "dragleave", function(e) {
|
||||
if (leavingDocumentOut(e)) {
|
||||
maybeHideDropZones();
|
||||
}
|
||||
});
|
||||
|
||||
// Just in case we were not able to detect when a dragged file has left the document,
|
||||
// hide all relevant drop zones the next time the mouse enters the document.
|
||||
// Note that mouse events such as this one are not fired during drag operations.
|
||||
disposeSupport.attach(qq(document).children()[0], "mouseenter", function(e) {
|
||||
maybeHideDropZones();
|
||||
});
|
||||
|
||||
disposeSupport.attach(document, "drop", function(e){
|
||||
e.preventDefault();
|
||||
maybeHideDropZones();
|
||||
});
|
||||
|
||||
disposeSupport.attach(document, HIDE_ZONES_EVENT_NAME, maybeHideDropZones);
|
||||
}
|
||||
|
||||
setupDragDrop();
|
||||
|
||||
qq.extend(this, {
|
||||
setupExtraDropzone: function(element) {
|
||||
options.dropZoneElements.push(element);
|
||||
setupDropzone(element);
|
||||
},
|
||||
|
||||
removeDropzone: function(element) {
|
||||
var i,
|
||||
dzs = options.dropZoneElements;
|
||||
|
||||
for(i in dzs) {
|
||||
if (dzs[i] === element) {
|
||||
return dzs.splice(i, 1);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
dispose: function() {
|
||||
disposeSupport.dispose();
|
||||
qq.each(uploadDropZones, function(idx, dropZone) {
|
||||
dropZone.dispose();
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
qq.DragAndDrop.callbacks = function() {
|
||||
"use strict";
|
||||
|
||||
return {
|
||||
processingDroppedFiles: function() {},
|
||||
processingDroppedFilesComplete: function(files, targetEl) {},
|
||||
dropError: function(code, errorSpecifics) {
|
||||
qq.log("Drag & drop error code '" + code + " with these specifics: '" + errorSpecifics + "'", "error");
|
||||
},
|
||||
dropLog: function(message, level) {
|
||||
qq.log(message, level);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
qq.UploadDropZone = function(o){
|
||||
"use strict";
|
||||
|
||||
var disposeSupport = new qq.DisposeSupport(),
|
||||
options, element, preventDrop, dropOutsideDisabled;
|
||||
|
||||
options = {
|
||||
element: null,
|
||||
onEnter: function(e){},
|
||||
onLeave: function(e){},
|
||||
// is not fired when leaving element by hovering descendants
|
||||
onLeaveNotDescendants: function(e){},
|
||||
onDrop: function(e){}
|
||||
};
|
||||
|
||||
qq.extend(options, o);
|
||||
element = options.element;
|
||||
|
||||
function dragover_should_be_canceled(){
|
||||
return qq.safari() || (qq.firefox() && qq.windows());
|
||||
}
|
||||
|
||||
function disableDropOutside(e){
|
||||
// run only once for all instances
|
||||
if (!dropOutsideDisabled ){
|
||||
|
||||
// for these cases we need to catch onDrop to reset dropArea
|
||||
if (dragover_should_be_canceled){
|
||||
disposeSupport.attach(document, "dragover", function(e){
|
||||
e.preventDefault();
|
||||
});
|
||||
} else {
|
||||
disposeSupport.attach(document, "dragover", function(e){
|
||||
if (e.dataTransfer){
|
||||
e.dataTransfer.dropEffect = "none";
|
||||
e.preventDefault();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
dropOutsideDisabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
function isValidFileDrag(e){
|
||||
// e.dataTransfer currently causing IE errors
|
||||
// IE9 does NOT support file API, so drag-and-drop is not possible
|
||||
if (qq.ie() && !qq.ie10()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var effectTest, dt = e.dataTransfer,
|
||||
// do not check dt.types.contains in webkit, because it crashes safari 4
|
||||
isSafari = qq.safari();
|
||||
|
||||
// dt.effectAllowed is none in Safari 5
|
||||
// dt.types.contains check is for firefox
|
||||
|
||||
// dt.effectAllowed crashes IE11 when files have been dragged from
|
||||
// the filesystem
|
||||
effectTest = (qq.ie10() || qq.ie11()) ? true : dt.effectAllowed !== "none";
|
||||
return dt && effectTest && (dt.files || (!isSafari && dt.types.contains && dt.types.contains("Files")));
|
||||
}
|
||||
|
||||
function isOrSetDropDisabled(isDisabled) {
|
||||
if (isDisabled !== undefined) {
|
||||
preventDrop = isDisabled;
|
||||
}
|
||||
return preventDrop;
|
||||
}
|
||||
|
||||
function triggerHidezonesEvent() {
|
||||
var hideZonesEvent;
|
||||
|
||||
function triggerUsingOldApi() {
|
||||
hideZonesEvent = document.createEvent("Event");
|
||||
hideZonesEvent.initEvent(options.HIDE_ZONES_EVENT_NAME, true, true);
|
||||
}
|
||||
|
||||
if (window.CustomEvent) {
|
||||
try {
|
||||
hideZonesEvent = new CustomEvent(options.HIDE_ZONES_EVENT_NAME);
|
||||
}
|
||||
catch (err) {
|
||||
triggerUsingOldApi();
|
||||
}
|
||||
}
|
||||
else {
|
||||
triggerUsingOldApi();
|
||||
}
|
||||
|
||||
document.dispatchEvent(hideZonesEvent);
|
||||
}
|
||||
|
||||
function attachEvents(){
|
||||
disposeSupport.attach(element, "dragover", function(e){
|
||||
if (!isValidFileDrag(e)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// dt.effectAllowed crashes IE11 when files have been dragged from
|
||||
// the filesystem
|
||||
var effect = (qq.ie() || qq.ie11()) ? null : e.dataTransfer.effectAllowed;
|
||||
if (effect === "move" || effect === "linkMove"){
|
||||
e.dataTransfer.dropEffect = "move"; // for FF (only move allowed)
|
||||
} else {
|
||||
e.dataTransfer.dropEffect = "copy"; // for Chrome
|
||||
}
|
||||
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
disposeSupport.attach(element, "dragenter", function(e){
|
||||
if (!isOrSetDropDisabled()) {
|
||||
if (!isValidFileDrag(e)) {
|
||||
return;
|
||||
}
|
||||
options.onEnter(e);
|
||||
}
|
||||
});
|
||||
|
||||
disposeSupport.attach(element, "dragleave", function(e){
|
||||
if (!isValidFileDrag(e)) {
|
||||
return;
|
||||
}
|
||||
|
||||
options.onLeave(e);
|
||||
|
||||
var relatedTarget = document.elementFromPoint(e.clientX, e.clientY);
|
||||
// do not fire when moving a mouse over a descendant
|
||||
if (qq(this).contains(relatedTarget)) {
|
||||
return;
|
||||
}
|
||||
|
||||
options.onLeaveNotDescendants(e);
|
||||
});
|
||||
|
||||
disposeSupport.attach(element, "drop", function(e) {
|
||||
if (!isOrSetDropDisabled()) {
|
||||
if (!isValidFileDrag(e)) {
|
||||
return;
|
||||
}
|
||||
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
options.onDrop(e);
|
||||
|
||||
triggerHidezonesEvent();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
disableDropOutside();
|
||||
attachEvents();
|
||||
|
||||
qq.extend(this, {
|
||||
dropDisabled: function(isDisabled) {
|
||||
return isOrSetDropDisabled(isDisabled);
|
||||
},
|
||||
|
||||
dispose: function() {
|
||||
disposeSupport.dispose();
|
||||
},
|
||||
|
||||
getElement: function() {
|
||||
return element;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,13 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Fine Uploader top-level Error container. Inherits from `Error`.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
qq.Error = function(message) {
|
||||
this.message = "[Fine Uploader " + qq.version + "] " + message;
|
||||
};
|
||||
|
||||
qq.Error.prototype = new Error();
|
||||
}());
|
@ -1,148 +0,0 @@
|
||||
/* globals qq */
|
||||
qq.supportedFeatures = (function () {
|
||||
"use strict";
|
||||
|
||||
var supportsUploading,
|
||||
supportsUploadingBlobs,
|
||||
supportsAjaxFileUploading,
|
||||
supportsFolderDrop,
|
||||
supportsChunking,
|
||||
supportsResume,
|
||||
supportsUploadViaPaste,
|
||||
supportsUploadCors,
|
||||
supportsDeleteFileXdr,
|
||||
supportsDeleteFileCorsXhr,
|
||||
supportsDeleteFileCors,
|
||||
supportsFolderSelection,
|
||||
supportsImagePreviews,
|
||||
supportsUploadProgress;
|
||||
|
||||
|
||||
function testSupportsFileInputElement() {
|
||||
var supported = true,
|
||||
tempInput;
|
||||
|
||||
try {
|
||||
tempInput = document.createElement("input");
|
||||
tempInput.type = "file";
|
||||
qq(tempInput).hide();
|
||||
|
||||
if (tempInput.disabled) {
|
||||
supported = false;
|
||||
}
|
||||
}
|
||||
catch (ex) {
|
||||
supported = false;
|
||||
}
|
||||
|
||||
return supported;
|
||||
}
|
||||
|
||||
//only way to test for Filesystem API support since webkit does not expose the DataTransfer interface
|
||||
function isChrome21OrHigher() {
|
||||
return (qq.chrome() || qq.opera()) &&
|
||||
navigator.userAgent.match(/Chrome\/[2][1-9]|Chrome\/[3-9][0-9]/) !== undefined;
|
||||
}
|
||||
|
||||
//only way to test for complete Clipboard API support at this time
|
||||
function isChrome14OrHigher() {
|
||||
return (qq.chrome() || qq.opera()) &&
|
||||
navigator.userAgent.match(/Chrome\/[1][4-9]|Chrome\/[2-9][0-9]/) !== undefined;
|
||||
}
|
||||
|
||||
//Ensure we can send cross-origin `XMLHttpRequest`s
|
||||
function isCrossOriginXhrSupported() {
|
||||
if (window.XMLHttpRequest) {
|
||||
var xhr = qq.createXhrInstance();
|
||||
|
||||
//Commonly accepted test for XHR CORS support.
|
||||
return xhr.withCredentials !== undefined;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
//Test for (terrible) cross-origin ajax transport fallback for IE9 and IE8
|
||||
function isXdrSupported() {
|
||||
return window.XDomainRequest !== undefined;
|
||||
}
|
||||
|
||||
// CORS Ajax requests are supported if it is either possible to send credentialed `XMLHttpRequest`s,
|
||||
// or if `XDomainRequest` is an available alternative.
|
||||
function isCrossOriginAjaxSupported() {
|
||||
if (isCrossOriginXhrSupported()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return isXdrSupported();
|
||||
}
|
||||
|
||||
function isFolderSelectionSupported() {
|
||||
// We know that folder selection is only supported in Chrome via this proprietary attribute for now
|
||||
return document.createElement("input").webkitdirectory !== undefined;
|
||||
}
|
||||
|
||||
|
||||
supportsUploading = testSupportsFileInputElement();
|
||||
|
||||
supportsAjaxFileUploading = supportsUploading && qq.isXhrUploadSupported();
|
||||
|
||||
supportsUploadingBlobs = supportsAjaxFileUploading && !qq.androidStock();
|
||||
|
||||
supportsFolderDrop = supportsAjaxFileUploading && isChrome21OrHigher();
|
||||
|
||||
supportsChunking = supportsAjaxFileUploading && qq.isFileChunkingSupported();
|
||||
|
||||
supportsResume = supportsAjaxFileUploading && supportsChunking && !!window.localStorage;
|
||||
|
||||
supportsUploadViaPaste = supportsAjaxFileUploading && isChrome14OrHigher();
|
||||
|
||||
supportsUploadCors = supportsUploading && (window.postMessage !== undefined || supportsAjaxFileUploading);
|
||||
|
||||
supportsDeleteFileCorsXhr = isCrossOriginXhrSupported();
|
||||
|
||||
supportsDeleteFileXdr = isXdrSupported();
|
||||
|
||||
supportsDeleteFileCors = isCrossOriginAjaxSupported();
|
||||
|
||||
supportsFolderSelection = isFolderSelectionSupported();
|
||||
|
||||
supportsImagePreviews = supportsAjaxFileUploading && window.FileReader !== undefined;
|
||||
|
||||
supportsUploadProgress = (function() {
|
||||
if (supportsAjaxFileUploading) {
|
||||
return !qq.androidStock() &&
|
||||
!(qq.ios() && navigator.userAgent.indexOf("CriOS") >= 0);
|
||||
}
|
||||
return false;
|
||||
}());
|
||||
|
||||
|
||||
return {
|
||||
ajaxUploading: supportsAjaxFileUploading,
|
||||
blobUploading: supportsUploadingBlobs,
|
||||
canDetermineSize: supportsAjaxFileUploading,
|
||||
chunking: supportsChunking,
|
||||
deleteFileCors: supportsDeleteFileCors,
|
||||
deleteFileCorsXdr: supportsDeleteFileXdr, //NOTE: will also return true in IE10, where XDR is also supported
|
||||
deleteFileCorsXhr: supportsDeleteFileCorsXhr,
|
||||
fileDrop: supportsAjaxFileUploading, //NOTE: will also return true for touch-only devices. It's not currently possible to accurately test for touch-only devices
|
||||
folderDrop: supportsFolderDrop,
|
||||
folderSelection: supportsFolderSelection,
|
||||
imagePreviews: supportsImagePreviews,
|
||||
imageValidation: supportsImagePreviews,
|
||||
itemSizeValidation: supportsAjaxFileUploading,
|
||||
pause: supportsChunking,
|
||||
progressBar: supportsUploadProgress,
|
||||
resume: supportsResume,
|
||||
scaling: supportsImagePreviews && supportsUploadingBlobs,
|
||||
tiffPreviews: qq.safari(), // Not the best solution, but simple and probably accurate enough (for now)
|
||||
unlimitedScaledImageSize: !qq.ios(), // false simply indicates that there is some known limit
|
||||
uploading: supportsUploading,
|
||||
uploadCors: supportsUploadCors,
|
||||
uploadCustomHeaders: supportsAjaxFileUploading,
|
||||
uploadNonMultipart: supportsAjaxFileUploading,
|
||||
uploadViaPaste: supportsUploadViaPaste
|
||||
};
|
||||
|
||||
}());
|
@ -1,165 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Module that handles support for existing forms.
|
||||
*
|
||||
* @param options Options passed from the integrator-supplied options related to form support.
|
||||
* @param startUpload Callback to invoke when files "stored" should be uploaded.
|
||||
* @param log Proxy for the logger
|
||||
* @constructor
|
||||
*/
|
||||
qq.FormSupport = function(options, startUpload, log) {
|
||||
"use strict";
|
||||
var self = this,
|
||||
interceptSubmit = options.interceptSubmit,
|
||||
formEl = options.element,
|
||||
autoUpload = options.autoUpload;
|
||||
|
||||
// Available on the public API associated with this module.
|
||||
qq.extend(this, {
|
||||
// To be used by the caller to determine if the endpoint will be determined by some processing
|
||||
// that occurs in this module, such as if the form has an action attribute.
|
||||
// Ignore if `attachToForm === false`.
|
||||
newEndpoint: null,
|
||||
|
||||
// To be used by the caller to determine if auto uploading should be allowed.
|
||||
// Ignore if `attachToForm === false`.
|
||||
newAutoUpload: autoUpload,
|
||||
|
||||
// true if a form was detected and is being tracked by this module
|
||||
attachedToForm: false,
|
||||
|
||||
// Returns an object with names and values for all valid form elements associated with the attached form.
|
||||
getFormInputsAsObject: function() {
|
||||
/* jshint eqnull:true */
|
||||
if (formEl == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return self._form2Obj(formEl);
|
||||
}
|
||||
});
|
||||
|
||||
// If the form contains an action attribute, this should be the new upload endpoint.
|
||||
function determineNewEndpoint(formEl) {
|
||||
if (formEl.getAttribute("action")) {
|
||||
self.newEndpoint = formEl.getAttribute("action");
|
||||
}
|
||||
}
|
||||
|
||||
// Return true only if the form is valid, or if we cannot make this determination.
|
||||
// If the form is invalid, ensure invalid field(s) are highlighted in the UI.
|
||||
function validateForm(formEl, nativeSubmit) {
|
||||
if (formEl.checkValidity && !formEl.checkValidity()) {
|
||||
log("Form did not pass validation checks - will not upload.", "error");
|
||||
nativeSubmit();
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Intercept form submit attempts, unless the integrator has told us not to do this.
|
||||
function maybeUploadOnSubmit(formEl) {
|
||||
var nativeSubmit = formEl.submit;
|
||||
|
||||
// Intercept and squelch submit events.
|
||||
qq(formEl).attach("submit", function(event) {
|
||||
event = event || window.event;
|
||||
|
||||
if (event.preventDefault) {
|
||||
event.preventDefault();
|
||||
}
|
||||
else {
|
||||
event.returnValue = false;
|
||||
}
|
||||
|
||||
validateForm(formEl, nativeSubmit) && startUpload();
|
||||
});
|
||||
|
||||
// The form's `submit()` function may be called instead (i.e. via jQuery.submit()).
|
||||
// Intercept that too.
|
||||
formEl.submit = function() {
|
||||
validateForm(formEl, nativeSubmit) && startUpload();
|
||||
};
|
||||
}
|
||||
|
||||
// If the element value passed from the uploader is a string, assume it is an element ID - select it.
|
||||
// The rest of the code in this module depends on this being an HTMLElement.
|
||||
function determineFormEl(formEl) {
|
||||
if (formEl) {
|
||||
if (qq.isString(formEl)) {
|
||||
formEl = document.getElementById(formEl);
|
||||
}
|
||||
|
||||
if (formEl) {
|
||||
log("Attaching to form element.");
|
||||
determineNewEndpoint(formEl);
|
||||
interceptSubmit && maybeUploadOnSubmit(formEl);
|
||||
}
|
||||
}
|
||||
|
||||
return formEl;
|
||||
}
|
||||
|
||||
formEl = determineFormEl(formEl);
|
||||
this.attachedToForm = !!formEl;
|
||||
};
|
||||
|
||||
qq.extend(qq.FormSupport.prototype, {
|
||||
// Converts all relevant form fields to key/value pairs. This is meant to mimic the data a browser will
|
||||
// construct from a given form when the form is submitted.
|
||||
_form2Obj: function(form) {
|
||||
"use strict";
|
||||
var obj = {},
|
||||
notIrrelevantType = function(type) {
|
||||
var irrelevantTypes = [
|
||||
"button",
|
||||
"image",
|
||||
"reset",
|
||||
"submit"
|
||||
];
|
||||
|
||||
return qq.indexOf(irrelevantTypes, type.toLowerCase()) < 0;
|
||||
},
|
||||
radioOrCheckbox = function(type) {
|
||||
return qq.indexOf(["checkbox", "radio"], type.toLowerCase()) >= 0;
|
||||
},
|
||||
ignoreValue = function(el) {
|
||||
if (radioOrCheckbox(el.type) && !el.checked) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return el.disabled && el.type.toLowerCase() !== "hidden";
|
||||
},
|
||||
selectValue = function(select) {
|
||||
var value = null;
|
||||
|
||||
qq.each(qq(select).children(), function(idx, child) {
|
||||
if (child.tagName.toLowerCase() === "option" && child.selected) {
|
||||
value = child.value;
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
qq.each(form.elements, function(idx, el) {
|
||||
if ((qq.isInput(el, true) || el.tagName.toLowerCase() === "textarea") &&
|
||||
notIrrelevantType(el.type) &&
|
||||
!ignoreValue(el)) {
|
||||
|
||||
obj[el.name] = el.value;
|
||||
}
|
||||
else if (el.tagName.toLowerCase() === "select" && !ignoreValue(el)) {
|
||||
var value = selectValue(el);
|
||||
|
||||
if (value !== null) {
|
||||
obj[el.name] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return obj;
|
||||
}
|
||||
});
|
@ -1,110 +0,0 @@
|
||||
/*globals qq */
|
||||
qq.Identify = function(fileOrBlob, log) {
|
||||
"use strict";
|
||||
|
||||
function isIdentifiable(magicBytes, questionableBytes) {
|
||||
var identifiable = false,
|
||||
magicBytesEntries = [].concat(magicBytes);
|
||||
|
||||
qq.each(magicBytesEntries, function(idx, magicBytesArrayEntry) {
|
||||
if (questionableBytes.indexOf(magicBytesArrayEntry) === 0) {
|
||||
identifiable = true;
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
return identifiable;
|
||||
}
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* Determines if a Blob can be displayed natively in the current browser. This is done by reading magic
|
||||
* bytes in the beginning of the file, so this is an asynchronous operation. Before we attempt to read the
|
||||
* file, we will examine the blob's type attribute to save CPU cycles.
|
||||
*
|
||||
* @returns {qq.Promise} Promise that is fulfilled when identification is complete.
|
||||
* If successful, the MIME string is passed to the success handler.
|
||||
*/
|
||||
isPreviewable: function() {
|
||||
var self = this,
|
||||
idenitifer = new qq.Promise(),
|
||||
previewable = false,
|
||||
name = fileOrBlob.name === undefined ? "blob" : fileOrBlob.name;
|
||||
|
||||
log(qq.format("Attempting to determine if {} can be rendered in this browser", name));
|
||||
|
||||
log("First pass: check type attribute of blob object.");
|
||||
|
||||
if (this.isPreviewableSync()) {
|
||||
log("Second pass: check for magic bytes in file header.");
|
||||
|
||||
qq.readBlobToHex(fileOrBlob, 0, 4).then(function(hex) {
|
||||
qq.each(self.PREVIEWABLE_MIME_TYPES, function(mime, bytes) {
|
||||
if (isIdentifiable(bytes, hex)) {
|
||||
// Safari is the only supported browser that can deal with TIFFs natively,
|
||||
// so, if this is a TIFF and the UA isn't Safari, declare this file "non-previewable".
|
||||
if (mime !== "image/tiff" || qq.supportedFeatures.tiffPreviews) {
|
||||
previewable = true;
|
||||
idenitifer.success(mime);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
log(qq.format("'{}' is {} able to be rendered in this browser", name, previewable ? "" : "NOT"));
|
||||
|
||||
if (!previewable) {
|
||||
idenitifer.failure();
|
||||
}
|
||||
},
|
||||
function() {
|
||||
log("Error reading file w/ name '" + name + "'. Not able to be rendered in this browser.");
|
||||
idenitifer.failure();
|
||||
});
|
||||
}
|
||||
else {
|
||||
idenitifer.failure();
|
||||
}
|
||||
|
||||
return idenitifer;
|
||||
},
|
||||
|
||||
/**
|
||||
* Determines if a Blob can be displayed natively in the current browser. This is done by checking the
|
||||
* blob's type attribute. This is a synchronous operation, useful for situations where an asynchronous operation
|
||||
* would be challenging to support. Note that the blob's type property is not as accurate as reading the
|
||||
* file's magic bytes.
|
||||
*
|
||||
* @returns {Boolean} true if the blob can be rendered in the current browser
|
||||
*/
|
||||
isPreviewableSync: function() {
|
||||
var fileMime = fileOrBlob.type,
|
||||
// Assumption: This will only ever be executed in browsers that support `Object.keys`.
|
||||
isRecognizedImage = qq.indexOf(Object.keys(this.PREVIEWABLE_MIME_TYPES), fileMime) >= 0,
|
||||
previewable = false,
|
||||
name = fileOrBlob.name === undefined ? "blob" : fileOrBlob.name;
|
||||
|
||||
if (isRecognizedImage) {
|
||||
if (fileMime === "image/tiff") {
|
||||
previewable = qq.supportedFeatures.tiffPreviews;
|
||||
}
|
||||
else {
|
||||
previewable = true;
|
||||
}
|
||||
}
|
||||
|
||||
!previewable && log(name + " is not previewable in this browser per the blob's type attr");
|
||||
|
||||
return previewable;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
qq.Identify.prototype.PREVIEWABLE_MIME_TYPES = {
|
||||
"image/jpeg": "ffd8ff",
|
||||
"image/gif": "474946",
|
||||
"image/png": "89504e",
|
||||
"image/bmp": "424d",
|
||||
"image/tiff": ["49492a00", "4d4d002a"]
|
||||
};
|
@ -1,7 +0,0 @@
|
||||
(function() {
|
||||
"use strict";
|
||||
var match = /(\{.*\})/.exec(document.body.innerHTML);
|
||||
if (match) {
|
||||
parent.postMessage(match[1], "*");
|
||||
}
|
||||
}());
|
@ -1,207 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* EXIF image data parser. Currently only parses the Orientation tag value,
|
||||
* but this may be expanded to other tags in the future.
|
||||
*
|
||||
* @param fileOrBlob Attempt to parse EXIF data in this `Blob`
|
||||
* @constructor
|
||||
*/
|
||||
qq.Exif = function(fileOrBlob, log) {
|
||||
"use strict";
|
||||
|
||||
// Orientation is the only tag parsed here at this time.
|
||||
var TAG_IDS = [274],
|
||||
TAG_INFO = {
|
||||
274: {
|
||||
name: "Orientation",
|
||||
bytes: 2
|
||||
}
|
||||
};
|
||||
|
||||
// Convert a little endian (hex string) to big endian (decimal).
|
||||
function parseLittleEndian(hex) {
|
||||
var result = 0,
|
||||
pow = 0;
|
||||
|
||||
while (hex.length > 0) {
|
||||
result += parseInt(hex.substring(0, 2), 16) * Math.pow(2, pow);
|
||||
hex = hex.substring(2, hex.length);
|
||||
pow += 8;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Find the byte offset, of Application Segment 1 (EXIF).
|
||||
// External callers need not supply any arguments.
|
||||
function seekToApp1(offset, promise) {
|
||||
var theOffset = offset,
|
||||
thePromise = promise;
|
||||
if (theOffset === undefined) {
|
||||
theOffset = 2;
|
||||
thePromise = new qq.Promise();
|
||||
}
|
||||
|
||||
qq.readBlobToHex(fileOrBlob, theOffset, 4).then(function(hex) {
|
||||
var match = /^ffe([0-9])/.exec(hex);
|
||||
if (match) {
|
||||
if (match[1] !== "1") {
|
||||
var segmentLength = parseInt(hex.slice(4, 8), 16);
|
||||
seekToApp1(theOffset + segmentLength + 2, thePromise);
|
||||
}
|
||||
else {
|
||||
thePromise.success(theOffset);
|
||||
}
|
||||
}
|
||||
else {
|
||||
thePromise.failure("No EXIF header to be found!");
|
||||
}
|
||||
});
|
||||
|
||||
return thePromise;
|
||||
}
|
||||
|
||||
// Find the byte offset of Application Segment 1 (EXIF) for valid JPEGs only.
|
||||
function getApp1Offset() {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
qq.readBlobToHex(fileOrBlob, 0, 6).then(function(hex) {
|
||||
if (hex.indexOf("ffd8") !== 0) {
|
||||
promise.failure("Not a valid JPEG!");
|
||||
}
|
||||
else {
|
||||
seekToApp1().then(function(offset) {
|
||||
promise.success(offset);
|
||||
},
|
||||
function(error) {
|
||||
promise.failure(error);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
// Determine the byte ordering of the EXIF header.
|
||||
function isLittleEndian(app1Start) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
qq.readBlobToHex(fileOrBlob, app1Start + 10, 2).then(function(hex) {
|
||||
promise.success(hex === "4949");
|
||||
});
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
// Determine the number of directory entries in the EXIF header.
|
||||
function getDirEntryCount(app1Start, littleEndian) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
qq.readBlobToHex(fileOrBlob, app1Start + 18, 2).then(function(hex) {
|
||||
if (littleEndian) {
|
||||
return promise.success(parseLittleEndian(hex));
|
||||
}
|
||||
else {
|
||||
promise.success(parseInt(hex, 16));
|
||||
}
|
||||
});
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
// Get the IFD portion of the EXIF header as a hex string.
|
||||
function getIfd(app1Start, dirEntries) {
|
||||
var offset = app1Start + 20,
|
||||
bytes = dirEntries * 12;
|
||||
|
||||
return qq.readBlobToHex(fileOrBlob, offset, bytes);
|
||||
}
|
||||
|
||||
// Obtain an array of all directory entries (as hex strings) in the EXIF header.
|
||||
function getDirEntries(ifdHex) {
|
||||
var entries = [],
|
||||
offset = 0;
|
||||
|
||||
while (offset+24 <= ifdHex.length) {
|
||||
entries.push(ifdHex.slice(offset, offset + 24));
|
||||
offset += 24;
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
// Obtain values for all relevant tags and return them.
|
||||
function getTagValues(littleEndian, dirEntries) {
|
||||
var TAG_VAL_OFFSET = 16,
|
||||
tagsToFind = qq.extend([], TAG_IDS),
|
||||
vals = {};
|
||||
|
||||
qq.each(dirEntries, function(idx, entry) {
|
||||
var idHex = entry.slice(0, 4),
|
||||
id = littleEndian ? parseLittleEndian(idHex) : parseInt(idHex, 16),
|
||||
tagsToFindIdx = tagsToFind.indexOf(id),
|
||||
tagValHex, tagName, tagValLength;
|
||||
|
||||
if (tagsToFindIdx >= 0) {
|
||||
tagName = TAG_INFO[id].name;
|
||||
tagValLength = TAG_INFO[id].bytes;
|
||||
tagValHex = entry.slice(TAG_VAL_OFFSET, TAG_VAL_OFFSET + (tagValLength*2));
|
||||
vals[tagName] = littleEndian ? parseLittleEndian(tagValHex) : parseInt(tagValHex, 16);
|
||||
|
||||
tagsToFind.splice(tagsToFindIdx, 1);
|
||||
}
|
||||
|
||||
if (tagsToFind.length === 0) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
return vals;
|
||||
}
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* Attempt to parse the EXIF header for the `Blob` associated with this instance.
|
||||
*
|
||||
* @returns {qq.Promise} To be fulfilled when the parsing is complete.
|
||||
* If successful, the parsed EXIF header as an object will be included.
|
||||
*/
|
||||
parse: function() {
|
||||
var parser = new qq.Promise(),
|
||||
onParseFailure = function(message) {
|
||||
log(qq.format("EXIF header parse failed: '{}' ", message));
|
||||
parser.failure(message);
|
||||
};
|
||||
|
||||
getApp1Offset().then(function(app1Offset) {
|
||||
log(qq.format("Moving forward with EXIF header parsing for '{}'", fileOrBlob.name === undefined ? "blob" : fileOrBlob.name));
|
||||
|
||||
isLittleEndian(app1Offset).then(function(littleEndian) {
|
||||
|
||||
log(qq.format("EXIF Byte order is {} endian", littleEndian ? "little" : "big"));
|
||||
|
||||
getDirEntryCount(app1Offset, littleEndian).then(function(dirEntryCount) {
|
||||
|
||||
log(qq.format("Found {} APP1 directory entries", dirEntryCount));
|
||||
|
||||
getIfd(app1Offset, dirEntryCount).then(function(ifdHex) {
|
||||
var dirEntries = getDirEntries(ifdHex),
|
||||
tagValues = getTagValues(littleEndian, dirEntries);
|
||||
|
||||
log("Successfully parsed some EXIF tags");
|
||||
|
||||
parser.success(tagValues);
|
||||
}, onParseFailure);
|
||||
}, onParseFailure);
|
||||
}, onParseFailure);
|
||||
}, onParseFailure);
|
||||
|
||||
return parser;
|
||||
}
|
||||
});
|
||||
|
||||
/*<testing>*/
|
||||
this._testing = {};
|
||||
this._testing.parseLittleEndian = parseLittleEndian;
|
||||
/*</testing>*/
|
||||
};
|
@ -1,305 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* Draws a thumbnail of a Blob/File/URL onto an <img> or <canvas>.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
qq.ImageGenerator = function(log) {
|
||||
"use strict";
|
||||
|
||||
function isImg(el) {
|
||||
return el.tagName.toLowerCase() === "img";
|
||||
}
|
||||
|
||||
function isCanvas(el) {
|
||||
return el.tagName.toLowerCase() === "canvas";
|
||||
}
|
||||
|
||||
function isImgCorsSupported() {
|
||||
return new Image().crossOrigin !== undefined;
|
||||
}
|
||||
|
||||
function isCanvasSupported() {
|
||||
var canvas = document.createElement("canvas");
|
||||
|
||||
return canvas.getContext && canvas.getContext("2d");
|
||||
}
|
||||
|
||||
// This is only meant to determine the MIME type of a renderable image file.
|
||||
// It is used to ensure images drawn from a URL that have transparent backgrounds
|
||||
// are rendered correctly, among other things.
|
||||
function determineMimeOfFileName(nameWithPath) {
|
||||
/*jshint -W015 */
|
||||
var pathSegments = nameWithPath.split("/"),
|
||||
name = pathSegments[pathSegments.length - 1],
|
||||
extension = qq.getExtension(name);
|
||||
|
||||
extension = extension && extension.toLowerCase();
|
||||
|
||||
switch(extension) {
|
||||
case "jpeg":
|
||||
case "jpg":
|
||||
return "image/jpeg";
|
||||
case "png":
|
||||
return "image/png";
|
||||
case "bmp":
|
||||
return "image/bmp";
|
||||
case "gif":
|
||||
return "image/gif";
|
||||
case "tiff":
|
||||
case "tif":
|
||||
return "image/tiff";
|
||||
}
|
||||
}
|
||||
|
||||
// This will likely not work correctly in IE8 and older.
|
||||
// It's only used as part of a formula to determine
|
||||
// if a canvas can be used to scale a server-hosted thumbnail.
|
||||
// If canvas isn't supported by the UA (IE8 and older)
|
||||
// this method should not even be called.
|
||||
function isCrossOrigin(url) {
|
||||
var targetAnchor = document.createElement("a"),
|
||||
targetProtocol, targetHostname, targetPort;
|
||||
|
||||
targetAnchor.href = url;
|
||||
|
||||
targetProtocol = targetAnchor.protocol;
|
||||
targetPort = targetAnchor.port;
|
||||
targetHostname = targetAnchor.hostname;
|
||||
|
||||
if (targetProtocol.toLowerCase() !== window.location.protocol.toLowerCase()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (targetHostname.toLowerCase() !== window.location.hostname.toLowerCase()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// IE doesn't take ports into consideration when determining if two endpoints are same origin.
|
||||
if (targetPort !== window.location.port && !qq.ie()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function registerImgLoadListeners(img, promise) {
|
||||
img.onload = function() {
|
||||
img.onload = null;
|
||||
img.onerror = null;
|
||||
promise.success(img);
|
||||
};
|
||||
|
||||
img.onerror = function() {
|
||||
img.onload = null;
|
||||
img.onerror = null;
|
||||
log("Problem drawing thumbnail!", "error");
|
||||
promise.failure(img, "Problem drawing thumbnail!");
|
||||
};
|
||||
}
|
||||
|
||||
function registerCanvasDrawImageListener(canvas, promise) {
|
||||
// The image is drawn on the canvas by a third-party library,
|
||||
// and we want to know when this is completed. Since the library
|
||||
// may invoke drawImage many times in a loop, we need to be called
|
||||
// back when the image is fully rendered. So, we are expecting the
|
||||
// code that draws this image to follow a convention that involves a
|
||||
// function attached to the canvas instance be invoked when it is done.
|
||||
canvas.qqImageRendered = function() {
|
||||
promise.success(canvas);
|
||||
};
|
||||
}
|
||||
|
||||
// Fulfills a `qq.Promise` when an image has been drawn onto the target,
|
||||
// whether that is a <canvas> or an <img>. The attempt is considered a
|
||||
// failure if the target is not an <img> or a <canvas>, or if the drawing
|
||||
// attempt was not successful.
|
||||
function registerThumbnailRenderedListener(imgOrCanvas, promise) {
|
||||
var registered = isImg(imgOrCanvas) || isCanvas(imgOrCanvas);
|
||||
|
||||
if (isImg(imgOrCanvas)) {
|
||||
registerImgLoadListeners(imgOrCanvas, promise);
|
||||
}
|
||||
else if (isCanvas(imgOrCanvas)) {
|
||||
registerCanvasDrawImageListener(imgOrCanvas, promise);
|
||||
}
|
||||
else {
|
||||
promise.failure(imgOrCanvas);
|
||||
log(qq.format("Element container of type {} is not supported!", imgOrCanvas.tagName), "error");
|
||||
}
|
||||
|
||||
return registered;
|
||||
}
|
||||
|
||||
// Draw a preview iff the current UA can natively display it.
|
||||
// Also rotate the image if necessary.
|
||||
function draw(fileOrBlob, container, options) {
|
||||
var drawPreview = new qq.Promise(),
|
||||
identifier = new qq.Identify(fileOrBlob, log),
|
||||
maxSize = options.maxSize,
|
||||
// jshint eqnull:true
|
||||
orient = options.orient == null ? true : options.orient,
|
||||
megapixErrorHandler = function() {
|
||||
container.onerror = null;
|
||||
container.onload = null;
|
||||
log("Could not render preview, file may be too large!", "error");
|
||||
drawPreview.failure(container, "Browser cannot render image!");
|
||||
};
|
||||
|
||||
identifier.isPreviewable().then(
|
||||
function(mime) {
|
||||
// If options explicitly specify that Orientation is not desired,
|
||||
// replace the orient task with a dummy promise that "succeeds" immediately.
|
||||
var dummyExif = {
|
||||
parse: function() {
|
||||
return new qq.Promise().success();
|
||||
}
|
||||
},
|
||||
exif = orient ? new qq.Exif(fileOrBlob, log) : dummyExif,
|
||||
mpImg = new qq.MegaPixImage(fileOrBlob, megapixErrorHandler);
|
||||
|
||||
if (registerThumbnailRenderedListener(container, drawPreview)) {
|
||||
exif.parse().then(
|
||||
function(exif) {
|
||||
var orientation = exif && exif.Orientation;
|
||||
|
||||
mpImg.render(container, {
|
||||
maxWidth: maxSize,
|
||||
maxHeight: maxSize,
|
||||
orientation: orientation,
|
||||
mime: mime
|
||||
});
|
||||
},
|
||||
|
||||
function(failureMsg) {
|
||||
log(qq.format("EXIF data could not be parsed ({}). Assuming orientation = 1.", failureMsg));
|
||||
|
||||
mpImg.render(container, {
|
||||
maxWidth: maxSize,
|
||||
maxHeight: maxSize,
|
||||
mime: mime
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
function() {
|
||||
log("Not previewable");
|
||||
drawPreview.failure(container, "Not previewable");
|
||||
}
|
||||
);
|
||||
|
||||
return drawPreview;
|
||||
}
|
||||
|
||||
function drawOnCanvasOrImgFromUrl(url, canvasOrImg, draw, maxSize) {
|
||||
var tempImg = new Image(),
|
||||
tempImgRender = new qq.Promise();
|
||||
|
||||
registerThumbnailRenderedListener(tempImg, tempImgRender);
|
||||
|
||||
if (isCrossOrigin(url)) {
|
||||
tempImg.crossOrigin = "anonymous";
|
||||
}
|
||||
|
||||
tempImg.src = url;
|
||||
|
||||
tempImgRender.then(function() {
|
||||
registerThumbnailRenderedListener(canvasOrImg, draw);
|
||||
|
||||
var mpImg = new qq.MegaPixImage(tempImg);
|
||||
mpImg.render(canvasOrImg, {
|
||||
maxWidth: maxSize,
|
||||
maxHeight: maxSize,
|
||||
mime: determineMimeOfFileName(url)
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function drawOnImgFromUrlWithCssScaling(url, img, draw, maxSize) {
|
||||
registerThumbnailRenderedListener(img, draw);
|
||||
qq(img).css({
|
||||
maxWidth: maxSize + "px",
|
||||
maxHeight: maxSize + "px"
|
||||
});
|
||||
|
||||
img.src = url;
|
||||
}
|
||||
|
||||
// Draw a (server-hosted) thumbnail given a URL.
|
||||
// This will optionally scale the thumbnail as well.
|
||||
// It attempts to use <canvas> to scale, but will fall back
|
||||
// to max-width and max-height style properties if the UA
|
||||
// doesn't support canvas or if the images is cross-domain and
|
||||
// the UA doesn't support the crossorigin attribute on img tags,
|
||||
// which is required to scale a cross-origin image using <canvas> &
|
||||
// then export it back to an <img>.
|
||||
function drawFromUrl(url, container, options) {
|
||||
var draw = new qq.Promise(),
|
||||
scale = options.scale,
|
||||
maxSize = scale ? options.maxSize : null;
|
||||
|
||||
// container is an img, scaling needed
|
||||
if (scale && isImg(container)) {
|
||||
// Iff canvas is available in this UA, try to use it for scaling.
|
||||
// Otherwise, fall back to CSS scaling
|
||||
if (isCanvasSupported()) {
|
||||
// Attempt to use <canvas> for image scaling,
|
||||
// but we must fall back to scaling via CSS/styles
|
||||
// if this is a cross-origin image and the UA doesn't support <img> CORS.
|
||||
if (isCrossOrigin(url) && !isImgCorsSupported()) {
|
||||
drawOnImgFromUrlWithCssScaling(url, container, draw, maxSize);
|
||||
}
|
||||
else {
|
||||
drawOnCanvasOrImgFromUrl(url, container, draw, maxSize);
|
||||
}
|
||||
}
|
||||
else {
|
||||
drawOnImgFromUrlWithCssScaling(url, container, draw, maxSize);
|
||||
}
|
||||
}
|
||||
// container is a canvas, scaling optional
|
||||
else if (isCanvas(container)) {
|
||||
drawOnCanvasOrImgFromUrl(url, container, draw, maxSize);
|
||||
}
|
||||
// container is an img & no scaling: just set the src attr to the passed url
|
||||
else if (registerThumbnailRenderedListener(container, draw)) {
|
||||
container.src = url;
|
||||
}
|
||||
|
||||
return draw;
|
||||
}
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* Generate a thumbnail. Depending on the arguments, this may either result in
|
||||
* a client-side rendering of an image (if a `Blob` is supplied) or a server-generated
|
||||
* image that may optionally be scaled client-side using <canvas> or CSS/styles (as a fallback).
|
||||
*
|
||||
* @param fileBlobOrUrl a `File`, `Blob`, or a URL pointing to the image
|
||||
* @param container <img> or <canvas> to contain the preview
|
||||
* @param options possible properties include `maxSize` (int), `orient` (bool - default true), and `resize` (bool - default true)
|
||||
* @returns qq.Promise fulfilled when the preview has been drawn, or the attempt has failed
|
||||
*/
|
||||
generate: function(fileBlobOrUrl, container, options) {
|
||||
if (qq.isString(fileBlobOrUrl)) {
|
||||
log("Attempting to update thumbnail based on server response.");
|
||||
return drawFromUrl(fileBlobOrUrl, container, options || {});
|
||||
}
|
||||
else {
|
||||
log("Attempting to draw client-side image preview.");
|
||||
return draw(fileBlobOrUrl, container, options || {});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/*<testing>*/
|
||||
this._testing = {};
|
||||
this._testing.isImg = isImg;
|
||||
this._testing.isCanvas = isCanvas;
|
||||
this._testing.isCrossOrigin = isCrossOrigin;
|
||||
this._testing.determineMimeOfFileName = determineMimeOfFileName;
|
||||
/*</testing>*/
|
||||
};
|
@ -1,299 +0,0 @@
|
||||
/*global qq, define */
|
||||
/*jshint strict:false,bitwise:false,nonew:false,asi:true,-W064,-W116,-W089 */
|
||||
/**
|
||||
* Mega pixel image rendering library for iOS6+
|
||||
*
|
||||
* Fixes iOS6+'s image file rendering issue for large size image (over mega-pixel),
|
||||
* which causes unexpected subsampling when drawing it in canvas.
|
||||
* By using this library, you can safely render the image with proper stretching.
|
||||
*
|
||||
* Copyright (c) 2012 Shinichi Tomita <shinichi.tomita@gmail.com>
|
||||
* Released under the MIT license
|
||||
*
|
||||
* Modified by Widen for Fine Uploader
|
||||
*/
|
||||
(function() {
|
||||
|
||||
/**
|
||||
* Detect subsampling in loaded image.
|
||||
* In iOS, larger images than 2M pixels may be subsampled in rendering.
|
||||
*/
|
||||
function detectSubsampling(img) {
|
||||
var iw = img.naturalWidth, ih = img.naturalHeight;
|
||||
if (iw * ih > 1024 * 1024) { // subsampling may happen over megapixel image
|
||||
var canvas = document.createElement("canvas");
|
||||
canvas.width = canvas.height = 1;
|
||||
var ctx = canvas.getContext("2d");
|
||||
ctx.drawImage(img, -iw + 1, 0);
|
||||
// subsampled image becomes half smaller in rendering size.
|
||||
// check alpha channel value to confirm image is covering edge pixel or not.
|
||||
// if alpha value is 0 image is not covering, hence subsampled.
|
||||
return ctx.getImageData(0, 0, 1, 1).data[3] === 0;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detecting vertical squash in loaded image.
|
||||
* Fixes a bug which squash image vertically while drawing into canvas for some images.
|
||||
*/
|
||||
function detectVerticalSquash(img, iw, ih) {
|
||||
var canvas = document.createElement("canvas");
|
||||
canvas.width = 1;
|
||||
canvas.height = ih;
|
||||
var ctx = canvas.getContext("2d");
|
||||
ctx.drawImage(img, 0, 0);
|
||||
var data = ctx.getImageData(0, 0, 1, ih).data;
|
||||
// search image edge pixel position in case it is squashed vertically.
|
||||
var sy = 0;
|
||||
var ey = ih;
|
||||
var py = ih;
|
||||
while (py > sy) {
|
||||
var alpha = data[(py - 1) * 4 + 3];
|
||||
if (alpha === 0) {
|
||||
ey = py;
|
||||
} else {
|
||||
sy = py;
|
||||
}
|
||||
py = (ey + sy) >> 1;
|
||||
}
|
||||
var ratio = (py / ih);
|
||||
return (ratio===0)?1:ratio;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rendering image element (with resizing) and get its data URL
|
||||
*/
|
||||
function renderImageToDataURL(img, options, doSquash) {
|
||||
var canvas = document.createElement("canvas"),
|
||||
mime = options.mime || "image/jpeg";
|
||||
|
||||
renderImageToCanvas(img, canvas, options, doSquash);
|
||||
return canvas.toDataURL(mime, options.quality || 0.8);
|
||||
}
|
||||
|
||||
function maybeCalculateDownsampledDimensions(spec) {
|
||||
var maxPixels = 5241000; //iOS specific value
|
||||
|
||||
if (!qq.ios()) {
|
||||
throw new qq.Error("Downsampled dimensions can only be reliably calculated for iOS!");
|
||||
}
|
||||
|
||||
if (spec.origHeight * spec.origWidth > maxPixels) {
|
||||
return {
|
||||
newHeight: Math.round(Math.sqrt(maxPixels * (spec.origHeight / spec.origWidth))),
|
||||
newWidth: Math.round(Math.sqrt(maxPixels * (spec.origWidth / spec.origHeight)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rendering image element (with resizing) into the canvas element
|
||||
*/
|
||||
function renderImageToCanvas(img, canvas, options, doSquash) {
|
||||
var iw = img.naturalWidth, ih = img.naturalHeight;
|
||||
var width = options.width, height = options.height;
|
||||
var ctx = canvas.getContext("2d");
|
||||
ctx.save();
|
||||
|
||||
if (!qq.supportedFeatures.unlimitedScaledImageSize) {
|
||||
var modifiedDimensions = maybeCalculateDownsampledDimensions({
|
||||
origWidth: width,
|
||||
origHeight: height
|
||||
});
|
||||
|
||||
if (modifiedDimensions) {
|
||||
qq.log(qq.format("Had to reduce dimensions due to device limitations from {}w / {}h to {}w / {}h",
|
||||
width, height, modifiedDimensions.newWidth, modifiedDimensions.newHeight),
|
||||
"warn");
|
||||
|
||||
width = modifiedDimensions.newWidth;
|
||||
height = modifiedDimensions.newHeight;
|
||||
}
|
||||
}
|
||||
|
||||
transformCoordinate(canvas, width, height, options.orientation);
|
||||
|
||||
// Fine Uploader specific: Save some CPU cycles if not using iOS
|
||||
// Assumption: This logic is only needed to overcome iOS image sampling issues
|
||||
if (qq.ios()) {
|
||||
var subsampled = detectSubsampling(img);
|
||||
if (subsampled) {
|
||||
iw /= 2;
|
||||
ih /= 2;
|
||||
}
|
||||
var d = 1024; // size of tiling canvas
|
||||
var tmpCanvas = document.createElement("canvas");
|
||||
tmpCanvas.width = tmpCanvas.height = d;
|
||||
var tmpCtx = tmpCanvas.getContext("2d");
|
||||
var vertSquashRatio = doSquash ? detectVerticalSquash(img, iw, ih) : 1;
|
||||
var dw = Math.ceil(d * width / iw);
|
||||
var dh = Math.ceil(d * height / ih / vertSquashRatio);
|
||||
var sy = 0;
|
||||
var dy = 0;
|
||||
while (sy < ih) {
|
||||
var sx = 0;
|
||||
var dx = 0;
|
||||
while (sx < iw) {
|
||||
tmpCtx.clearRect(0, 0, d, d);
|
||||
tmpCtx.drawImage(img, -sx, -sy);
|
||||
ctx.drawImage(tmpCanvas, 0, 0, d, d, dx, dy, dw, dh);
|
||||
sx += d;
|
||||
dx += dw;
|
||||
}
|
||||
sy += d;
|
||||
dy += dh;
|
||||
}
|
||||
ctx.restore();
|
||||
tmpCanvas = tmpCtx = null;
|
||||
}
|
||||
else {
|
||||
ctx.drawImage(img, 0, 0, width, height);
|
||||
}
|
||||
|
||||
canvas.qqImageRendered && canvas.qqImageRendered();
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform canvas coordination according to specified frame size and orientation
|
||||
* Orientation value is from EXIF tag
|
||||
*/
|
||||
function transformCoordinate(canvas, width, height, orientation) {
|
||||
switch (orientation) {
|
||||
case 5:
|
||||
case 6:
|
||||
case 7:
|
||||
case 8:
|
||||
canvas.width = height;
|
||||
canvas.height = width;
|
||||
break;
|
||||
default:
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
}
|
||||
var ctx = canvas.getContext("2d");
|
||||
switch (orientation) {
|
||||
case 2:
|
||||
// horizontal flip
|
||||
ctx.translate(width, 0);
|
||||
ctx.scale(-1, 1);
|
||||
break;
|
||||
case 3:
|
||||
// 180 rotate left
|
||||
ctx.translate(width, height);
|
||||
ctx.rotate(Math.PI);
|
||||
break;
|
||||
case 4:
|
||||
// vertical flip
|
||||
ctx.translate(0, height);
|
||||
ctx.scale(1, -1);
|
||||
break;
|
||||
case 5:
|
||||
// vertical flip + 90 rotate right
|
||||
ctx.rotate(0.5 * Math.PI);
|
||||
ctx.scale(1, -1);
|
||||
break;
|
||||
case 6:
|
||||
// 90 rotate right
|
||||
ctx.rotate(0.5 * Math.PI);
|
||||
ctx.translate(0, -height);
|
||||
break;
|
||||
case 7:
|
||||
// horizontal flip + 90 rotate right
|
||||
ctx.rotate(0.5 * Math.PI);
|
||||
ctx.translate(width, -height);
|
||||
ctx.scale(-1, 1);
|
||||
break;
|
||||
case 8:
|
||||
// 90 rotate left
|
||||
ctx.rotate(-0.5 * Math.PI);
|
||||
ctx.translate(-width, 0);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* MegaPixImage class
|
||||
*/
|
||||
function MegaPixImage(srcImage, errorCallback) {
|
||||
if (window.Blob && srcImage instanceof Blob) {
|
||||
var img = new Image();
|
||||
var URL = window.URL && window.URL.createObjectURL ? window.URL :
|
||||
window.webkitURL && window.webkitURL.createObjectURL ? window.webkitURL :
|
||||
null;
|
||||
if (!URL) { throw Error("No createObjectURL function found to create blob url"); }
|
||||
img.src = URL.createObjectURL(srcImage);
|
||||
this.blob = srcImage;
|
||||
srcImage = img;
|
||||
}
|
||||
if (!srcImage.naturalWidth && !srcImage.naturalHeight) {
|
||||
var _this = this;
|
||||
srcImage.onload = function() {
|
||||
var listeners = _this.imageLoadListeners;
|
||||
if (listeners) {
|
||||
_this.imageLoadListeners = null;
|
||||
// IE11 doesn't reliably report actual image dimensions immediately after onload for small files,
|
||||
// so let's push this to the end of the UI thread queue.
|
||||
setTimeout(function() {
|
||||
for (var i=0, len=listeners.length; i<len; i++) {
|
||||
listeners[i]();
|
||||
}
|
||||
}, 0);
|
||||
}
|
||||
};
|
||||
srcImage.onerror = errorCallback;
|
||||
this.imageLoadListeners = [];
|
||||
}
|
||||
this.srcImage = srcImage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rendering megapix image into specified target element
|
||||
*/
|
||||
MegaPixImage.prototype.render = function(target, options) {
|
||||
if (this.imageLoadListeners) {
|
||||
var _this = this;
|
||||
this.imageLoadListeners.push(function() { _this.render(target, options) });
|
||||
return;
|
||||
}
|
||||
options = options || {};
|
||||
var imgWidth = this.srcImage.naturalWidth, imgHeight = this.srcImage.naturalHeight,
|
||||
width = options.width, height = options.height,
|
||||
maxWidth = options.maxWidth, maxHeight = options.maxHeight,
|
||||
doSquash = !this.blob || this.blob.type === "image/jpeg";
|
||||
if (width && !height) {
|
||||
height = (imgHeight * width / imgWidth) << 0;
|
||||
} else if (height && !width) {
|
||||
width = (imgWidth * height / imgHeight) << 0;
|
||||
} else {
|
||||
width = imgWidth;
|
||||
height = imgHeight;
|
||||
}
|
||||
if (maxWidth && width > maxWidth) {
|
||||
width = maxWidth;
|
||||
height = (imgHeight * width / imgWidth) << 0;
|
||||
}
|
||||
if (maxHeight && height > maxHeight) {
|
||||
height = maxHeight;
|
||||
width = (imgWidth * height / imgHeight) << 0;
|
||||
}
|
||||
var opt = { width : width, height : height };
|
||||
for (var k in options) opt[k] = options[k];
|
||||
|
||||
var tagName = target.tagName.toLowerCase();
|
||||
if (tagName === "img") {
|
||||
target.src = renderImageToDataURL(this.srcImage, opt, doSquash);
|
||||
} else if (tagName === "canvas") {
|
||||
renderImageToCanvas(this.srcImage, target, opt, doSquash);
|
||||
}
|
||||
if (typeof this.onrender === "function") {
|
||||
this.onrender(target);
|
||||
}
|
||||
};
|
||||
|
||||
qq.MegaPixImage = MegaPixImage;
|
||||
})();
|
@ -1,398 +0,0 @@
|
||||
/* globals qq, ExifRestorer */
|
||||
/**
|
||||
* Controls generation of scaled images based on a reference image encapsulated in a `File` or `Blob`.
|
||||
* Scaled images are generated and converted to blobs on-demand.
|
||||
* Multiple scaled images per reference image with varying sizes and other properties are supported.
|
||||
*
|
||||
* @param spec Information about the scaled images to generate.
|
||||
* @param log Logger instance
|
||||
* @constructor
|
||||
*/
|
||||
qq.Scaler = function(spec, log) {
|
||||
"use strict";
|
||||
|
||||
var self = this,
|
||||
includeReference = spec.sendOriginal,
|
||||
orient = spec.orient,
|
||||
defaultType = spec.defaultType,
|
||||
defaultQuality = spec.defaultQuality / 100,
|
||||
failedToScaleText = spec.failureText,
|
||||
includeExif = spec.includeExif,
|
||||
sizes = this._getSortedSizes(spec.sizes);
|
||||
|
||||
// Revealed API for instances of this module
|
||||
qq.extend(this, {
|
||||
// If no targeted sizes have been declared or if this browser doesn't support
|
||||
// client-side image preview generation, there is no scaling to do.
|
||||
enabled: qq.supportedFeatures.scaling && sizes.length > 0,
|
||||
|
||||
getFileRecords: function(originalFileUuid, originalFileName, originalBlobOrBlobData) {
|
||||
var self = this,
|
||||
records = [],
|
||||
originalBlob = originalBlobOrBlobData.blob ? originalBlobOrBlobData.blob : originalBlobOrBlobData,
|
||||
idenitifier = new qq.Identify(originalBlob, log);
|
||||
|
||||
// If the reference file cannot be rendered natively, we can't create scaled versions.
|
||||
if (idenitifier.isPreviewableSync()) {
|
||||
// Create records for each scaled version & add them to the records array, smallest first.
|
||||
qq.each(sizes, function(idx, sizeRecord) {
|
||||
var outputType = self._determineOutputType({
|
||||
defaultType: defaultType,
|
||||
requestedType: sizeRecord.type,
|
||||
refType: originalBlob.type
|
||||
});
|
||||
|
||||
records.push({
|
||||
uuid: qq.getUniqueId(),
|
||||
name: self._getName(originalFileName, {
|
||||
name: sizeRecord.name,
|
||||
type: outputType,
|
||||
refType: originalBlob.type
|
||||
}),
|
||||
blob: new qq.BlobProxy(originalBlob,
|
||||
qq.bind(self._generateScaledImage, self, {
|
||||
maxSize: sizeRecord.maxSize,
|
||||
orient: orient,
|
||||
type: outputType,
|
||||
quality: defaultQuality,
|
||||
failedText: failedToScaleText,
|
||||
includeExif: includeExif,
|
||||
log: log
|
||||
}))
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
includeReference && records.push({
|
||||
uuid: originalFileUuid,
|
||||
name: originalFileName,
|
||||
blob: originalBlob
|
||||
});
|
||||
}
|
||||
else {
|
||||
records.push({
|
||||
uuid: originalFileUuid,
|
||||
name: originalFileName,
|
||||
blob: originalBlob
|
||||
});
|
||||
}
|
||||
|
||||
return records;
|
||||
},
|
||||
|
||||
handleNewFile: function(file, name, uuid, size, fileList, uuidParamName, batchId, api) {
|
||||
var self = this,
|
||||
buttonId = file.qqButtonId || (file.blob && file.blob.qqButtonId),
|
||||
scaledIds = [],
|
||||
originalId = null,
|
||||
addFileToHandler = api.addFileToHandler,
|
||||
uploadData = api.uploadData,
|
||||
paramsStore = api.paramsStore,
|
||||
proxyGroupId = qq.getUniqueId();
|
||||
|
||||
qq.each(self.getFileRecords(uuid, name, file), function(idx, record) {
|
||||
var relatedBlob = file,
|
||||
relatedSize = size,
|
||||
id;
|
||||
|
||||
if (record.blob instanceof qq.BlobProxy) {
|
||||
relatedBlob = record.blob;
|
||||
relatedSize = -1;
|
||||
}
|
||||
|
||||
id = uploadData.addFile({
|
||||
uuid: record.uuid,
|
||||
name: record.name,
|
||||
size: relatedSize,
|
||||
batchId: batchId,
|
||||
proxyGroupId: proxyGroupId
|
||||
});
|
||||
|
||||
if (record.blob instanceof qq.BlobProxy) {
|
||||
scaledIds.push(id);
|
||||
}
|
||||
else {
|
||||
originalId = id;
|
||||
}
|
||||
|
||||
addFileToHandler(id, relatedBlob);
|
||||
|
||||
fileList.push({id: id, file: relatedBlob});
|
||||
|
||||
});
|
||||
|
||||
// If we are potentially uploading an original file and some scaled versions,
|
||||
// ensure the scaled versions include reference's to the parent's UUID and size
|
||||
// in their associated upload requests.
|
||||
if (originalId !== null) {
|
||||
qq.each(scaledIds, function(idx, scaledId) {
|
||||
var params = {
|
||||
qqparentuuid: uploadData.retrieve({id: originalId}).uuid,
|
||||
qqparentsize: uploadData.retrieve({id: originalId}).size
|
||||
};
|
||||
|
||||
// Make SURE the UUID for each scaled image is sent with the upload request,
|
||||
// to be consistent (since we need to ensure it is sent for the original file as well).
|
||||
params[uuidParamName] = uploadData.retrieve({id: scaledId}).uuid;
|
||||
|
||||
uploadData.setParentId(scaledId, originalId);
|
||||
paramsStore.addReadOnly(scaledId, params);
|
||||
});
|
||||
|
||||
// If any scaled images are tied to this parent image, be SURE we send its UUID as an upload request
|
||||
// parameter as well.
|
||||
if (scaledIds.length) {
|
||||
(function() {
|
||||
var param = {};
|
||||
param[uuidParamName] = uploadData.retrieve({id: originalId}).uuid;
|
||||
paramsStore.addReadOnly(originalId, param);
|
||||
}());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
qq.extend(qq.Scaler.prototype, {
|
||||
scaleImage: function(id, specs, api) {
|
||||
"use strict";
|
||||
|
||||
if (!qq.supportedFeatures.scaling) {
|
||||
throw new qq.Error("Scaling is not supported in this browser!");
|
||||
}
|
||||
|
||||
var scalingEffort = new qq.Promise(),
|
||||
log = api.log,
|
||||
file = api.getFile(id),
|
||||
uploadData = api.uploadData.retrieve({id: id}),
|
||||
name = uploadData && uploadData.name,
|
||||
uuid = uploadData && uploadData.uuid,
|
||||
scalingOptions = {
|
||||
sendOriginal: false,
|
||||
orient: specs.orient,
|
||||
defaultType: specs.type || null,
|
||||
defaultQuality: specs.quality,
|
||||
failedToScaleText: "Unable to scale",
|
||||
sizes: [{name: "", maxSize: specs.maxSize}]
|
||||
},
|
||||
scaler = new qq.Scaler(scalingOptions, log);
|
||||
|
||||
if (!qq.Scaler || !qq.supportedFeatures.imagePreviews || !file) {
|
||||
scalingEffort.failure();
|
||||
|
||||
log("Could not generate requested scaled image for " + id + ". " +
|
||||
"Scaling is either not possible in this browser, or the file could not be located.", "error");
|
||||
}
|
||||
else {
|
||||
(qq.bind(function() {
|
||||
// Assumption: There will never be more than one record
|
||||
var record = scaler.getFileRecords(uuid, name, file)[0];
|
||||
|
||||
if (record && record.blob instanceof qq.BlobProxy) {
|
||||
record.blob.create().then(scalingEffort.success, scalingEffort.failure);
|
||||
}
|
||||
else {
|
||||
log(id + " is not a scalable image!", "error");
|
||||
scalingEffort.failure();
|
||||
}
|
||||
}, this)());
|
||||
}
|
||||
|
||||
return scalingEffort;
|
||||
},
|
||||
|
||||
// NOTE: We cannot reliably determine at this time if the UA supports a specific MIME type for the target format.
|
||||
// image/jpeg and image/png are the only safe choices at this time.
|
||||
_determineOutputType: function(spec) {
|
||||
"use strict";
|
||||
|
||||
var requestedType = spec.requestedType,
|
||||
defaultType = spec.defaultType,
|
||||
referenceType = spec.refType;
|
||||
|
||||
// If a default type and requested type have not been specified, this should be a
|
||||
// JPEG if the original type is a JPEG, otherwise, a PNG.
|
||||
if (!defaultType && !requestedType) {
|
||||
if (referenceType !== "image/jpeg") {
|
||||
return "image/png";
|
||||
}
|
||||
return referenceType;
|
||||
}
|
||||
|
||||
// A specified default type is used when a requested type is not specified.
|
||||
if (!requestedType) {
|
||||
return defaultType;
|
||||
}
|
||||
|
||||
// If requested type is specified, use it, as long as this recognized type is supported by the current UA
|
||||
if (qq.indexOf(Object.keys(qq.Identify.prototype.PREVIEWABLE_MIME_TYPES), requestedType) >= 0) {
|
||||
if (requestedType === "image/tiff") {
|
||||
return qq.supportedFeatures.tiffPreviews ? requestedType : defaultType;
|
||||
}
|
||||
|
||||
return requestedType;
|
||||
}
|
||||
|
||||
return defaultType;
|
||||
},
|
||||
|
||||
// Get a file name for a generated scaled file record, based on the provided scaled image description
|
||||
_getName: function(originalName, scaledVersionProperties) {
|
||||
"use strict";
|
||||
|
||||
var startOfExt = originalName.lastIndexOf("."),
|
||||
nameAppendage = " (" + scaledVersionProperties.name + ")",
|
||||
versionType = scaledVersionProperties.type || "image/png",
|
||||
referenceType = scaledVersionProperties.refType,
|
||||
scaledName = "",
|
||||
scaledExt = qq.getExtension(originalName);
|
||||
|
||||
if (startOfExt >= 0) {
|
||||
scaledName = originalName.substr(0, startOfExt);
|
||||
|
||||
if (referenceType !== versionType) {
|
||||
scaledExt = versionType.split("/")[1];
|
||||
}
|
||||
|
||||
scaledName += nameAppendage + "." + scaledExt;
|
||||
}
|
||||
else {
|
||||
scaledName = originalName + nameAppendage;
|
||||
}
|
||||
|
||||
return scaledName;
|
||||
},
|
||||
|
||||
// We want the smallest scaled file to be uploaded first
|
||||
_getSortedSizes: function(sizes) {
|
||||
"use strict";
|
||||
|
||||
sizes = qq.extend([], sizes);
|
||||
|
||||
return sizes.sort(function(a, b) {
|
||||
if (a.maxSize > b.maxSize) {
|
||||
return 1;
|
||||
}
|
||||
if (a.maxSize < b.maxSize) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
},
|
||||
|
||||
_generateScaledImage: function(spec, sourceFile) {
|
||||
"use strict";
|
||||
|
||||
var self = this,
|
||||
log = spec.log,
|
||||
maxSize = spec.maxSize,
|
||||
orient = spec.orient,
|
||||
type = spec.type,
|
||||
quality = spec.quality,
|
||||
failedText = spec.failedText,
|
||||
includeExif = spec.includeExif && sourceFile.type === "image/jpeg" && type === "image/jpeg",
|
||||
scalingEffort = new qq.Promise(),
|
||||
imageGenerator = new qq.ImageGenerator(log),
|
||||
canvas = document.createElement("canvas");
|
||||
|
||||
log("Attempting to generate scaled version for " + sourceFile.name);
|
||||
|
||||
imageGenerator.generate(sourceFile, canvas, {maxSize: maxSize, orient: orient}).then(function() {
|
||||
var scaledImageDataUri = canvas.toDataURL(type, quality),
|
||||
signalSuccess = function() {
|
||||
log("Success generating scaled version for " + sourceFile.name);
|
||||
var blob = self._dataUriToBlob(scaledImageDataUri);
|
||||
scalingEffort.success(blob);
|
||||
};
|
||||
|
||||
if (includeExif) {
|
||||
self._insertExifHeader(sourceFile, scaledImageDataUri, log).then(function(scaledImageDataUriWithExif) {
|
||||
scaledImageDataUri = scaledImageDataUriWithExif;
|
||||
signalSuccess();
|
||||
},
|
||||
function() {
|
||||
log("Problem inserting EXIF header into scaled image. Using scaled image w/out EXIF data.", "error");
|
||||
signalSuccess();
|
||||
});
|
||||
}
|
||||
else {
|
||||
signalSuccess();
|
||||
}
|
||||
}, function() {
|
||||
log("Failed attempt to generate scaled version for " + sourceFile.name, "error");
|
||||
scalingEffort.failure(failedText);
|
||||
});
|
||||
|
||||
return scalingEffort;
|
||||
},
|
||||
|
||||
// Attempt to insert the original image's EXIF header into a scaled version.
|
||||
_insertExifHeader: function(originalImage, scaledImageDataUri, log) {
|
||||
"use strict";
|
||||
|
||||
var reader = new FileReader(),
|
||||
insertionEffort = new qq.Promise(),
|
||||
originalImageDataUri = "";
|
||||
|
||||
reader.onload = function() {
|
||||
originalImageDataUri = reader.result;
|
||||
insertionEffort.success(ExifRestorer.restore(originalImageDataUri, scaledImageDataUri));
|
||||
};
|
||||
|
||||
reader.onerror = function() {
|
||||
log("Problem reading " + originalImage.name + " during attempt to transfer EXIF data to scaled version.", "error");
|
||||
insertionEffort.failure();
|
||||
};
|
||||
|
||||
reader.readAsDataURL(originalImage);
|
||||
|
||||
return insertionEffort;
|
||||
},
|
||||
|
||||
|
||||
_dataUriToBlob: function(dataUri) {
|
||||
"use strict";
|
||||
|
||||
var byteString, mimeString, arrayBuffer, intArray;
|
||||
|
||||
// convert base64 to raw binary data held in a string
|
||||
if (dataUri.split(",")[0].indexOf("base64") >= 0) {
|
||||
byteString = atob(dataUri.split(",")[1]);
|
||||
}
|
||||
else {
|
||||
byteString = decodeURI(dataUri.split(",")[1]);
|
||||
}
|
||||
|
||||
// extract the MIME
|
||||
mimeString = dataUri.split(",")[0]
|
||||
.split(":")[1]
|
||||
.split(";")[0];
|
||||
|
||||
// write the bytes of the binary string to an ArrayBuffer
|
||||
arrayBuffer = new ArrayBuffer(byteString.length);
|
||||
intArray = new Uint8Array(arrayBuffer);
|
||||
qq.each(byteString, function(idx, character) {
|
||||
intArray[idx] = character.charCodeAt(0);
|
||||
});
|
||||
|
||||
return this._createBlob(arrayBuffer, mimeString);
|
||||
},
|
||||
|
||||
_createBlob: function(data, mime) {
|
||||
"use strict";
|
||||
|
||||
var BlobBuilder = window.BlobBuilder ||
|
||||
window.WebKitBlobBuilder ||
|
||||
window.MozBlobBuilder ||
|
||||
window.MSBlobBuilder,
|
||||
blobBuilder = BlobBuilder && new BlobBuilder();
|
||||
|
||||
if (blobBuilder) {
|
||||
blobBuilder.append(data);
|
||||
return blobBuilder.getBlob(mime);
|
||||
}
|
||||
else {
|
||||
return new Blob([data], {type: mime});
|
||||
}
|
||||
}
|
||||
});
|
@ -1,134 +0,0 @@
|
||||
/*globals qq*/
|
||||
/**
|
||||
* Attempts to validate an image, wherever possible.
|
||||
*
|
||||
* @param blob File or Blob representing a user-selecting image.
|
||||
* @param log Uses this to post log messages to the console.
|
||||
* @constructor
|
||||
*/
|
||||
qq.ImageValidation = function(blob, log) {
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* @param limits Object with possible image-related limits to enforce.
|
||||
* @returns {boolean} true if at least one of the limits has a non-zero value
|
||||
*/
|
||||
function hasNonZeroLimits(limits) {
|
||||
var atLeastOne = false;
|
||||
|
||||
qq.each(limits, function(limit, value) {
|
||||
if (value > 0) {
|
||||
atLeastOne = true;
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
return atLeastOne;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {qq.Promise} The promise is a failure if we can't obtain the width & height.
|
||||
* Otherwise, `success` is called on the returned promise with an object containing
|
||||
* `width` and `height` properties.
|
||||
*/
|
||||
function getWidthHeight() {
|
||||
var sizeDetermination = new qq.Promise();
|
||||
|
||||
new qq.Identify(blob, log).isPreviewable().then(function() {
|
||||
var image = new Image(),
|
||||
url = window.URL && window.URL.createObjectURL ? window.URL :
|
||||
window.webkitURL && window.webkitURL.createObjectURL ? window.webkitURL :
|
||||
null;
|
||||
|
||||
if (url) {
|
||||
image.onerror = function() {
|
||||
log("Cannot determine dimensions for image. May be too large.", "error");
|
||||
sizeDetermination.failure();
|
||||
};
|
||||
|
||||
image.onload = function() {
|
||||
sizeDetermination.success({
|
||||
width: this.width,
|
||||
height: this.height
|
||||
});
|
||||
};
|
||||
|
||||
image.src = url.createObjectURL(blob);
|
||||
}
|
||||
else {
|
||||
log("No createObjectURL function available to generate image URL!", "error");
|
||||
sizeDetermination.failure();
|
||||
}
|
||||
}, sizeDetermination.failure);
|
||||
|
||||
return sizeDetermination;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param limits Object with possible image-related limits to enforce.
|
||||
* @param dimensions Object containing `width` & `height` properties for the image to test.
|
||||
* @returns {String || undefined} The name of the failing limit. Undefined if no failing limits.
|
||||
*/
|
||||
function getFailingLimit(limits, dimensions) {
|
||||
var failingLimit;
|
||||
|
||||
qq.each(limits, function(limitName, limitValue) {
|
||||
if (limitValue > 0) {
|
||||
var limitMatcher = /(max|min)(Width|Height)/.exec(limitName),
|
||||
dimensionPropName = limitMatcher[2].charAt(0).toLowerCase() + limitMatcher[2].slice(1),
|
||||
actualValue = dimensions[dimensionPropName];
|
||||
|
||||
/*jshint -W015*/
|
||||
switch(limitMatcher[1]) {
|
||||
case "min":
|
||||
if (actualValue < limitValue) {
|
||||
failingLimit = limitName;
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case "max":
|
||||
if (actualValue > limitValue) {
|
||||
failingLimit = limitName;
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return failingLimit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the associated blob.
|
||||
*
|
||||
* @param limits
|
||||
* @returns {qq.Promise} `success` is called on the promise is the image is valid or
|
||||
* if the blob is not an image, or if the image is not verifiable.
|
||||
* Otherwise, `failure` with the name of the failing limit.
|
||||
*/
|
||||
this.validate = function(limits) {
|
||||
var validationEffort = new qq.Promise();
|
||||
|
||||
log("Attempting to validate image.");
|
||||
|
||||
if (hasNonZeroLimits(limits)) {
|
||||
getWidthHeight().then(function(dimensions) {
|
||||
var failingLimit = getFailingLimit(limits, dimensions);
|
||||
|
||||
if (failingLimit) {
|
||||
validationEffort.failure(failingLimit);
|
||||
}
|
||||
else {
|
||||
validationEffort.success();
|
||||
}
|
||||
}, validationEffort.success);
|
||||
}
|
||||
else {
|
||||
validationEffort.success();
|
||||
}
|
||||
|
||||
return validationEffort;
|
||||
};
|
||||
};
|
@ -1,144 +0,0 @@
|
||||
/*globals jQuery, qq*/
|
||||
(function($) {
|
||||
"use strict";
|
||||
var rootDataKey = "fineUploaderDnd",
|
||||
$el;
|
||||
|
||||
function init (options) {
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
|
||||
options.dropZoneElements = [$el];
|
||||
var xformedOpts = transformVariables(options);
|
||||
addCallbacks(xformedOpts);
|
||||
dnd(new qq.DragAndDrop(xformedOpts));
|
||||
|
||||
return $el;
|
||||
}
|
||||
|
||||
function dataStore(key, val) {
|
||||
var data = $el.data(rootDataKey);
|
||||
|
||||
if (val) {
|
||||
if (data === undefined) {
|
||||
data = {};
|
||||
}
|
||||
data[key] = val;
|
||||
$el.data(rootDataKey, data);
|
||||
}
|
||||
else {
|
||||
if (data === undefined) {
|
||||
return null;
|
||||
}
|
||||
return data[key];
|
||||
}
|
||||
}
|
||||
|
||||
function dnd(instanceToStore) {
|
||||
return dataStore("dndInstance", instanceToStore);
|
||||
}
|
||||
|
||||
function addCallbacks(transformedOpts) {
|
||||
var callbacks = transformedOpts.callbacks = {};
|
||||
|
||||
$.each(new qq.DragAndDrop.callbacks(), function(prop, func) {
|
||||
var name = prop,
|
||||
$callbackEl;
|
||||
|
||||
$callbackEl = $el;
|
||||
|
||||
callbacks[prop] = function() {
|
||||
var args = Array.prototype.slice.call(arguments),
|
||||
jqueryHandlerResult = $callbackEl.triggerHandler(name, args);
|
||||
|
||||
return jqueryHandlerResult;
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
//transform jQuery objects into HTMLElements, and pass along all other option properties
|
||||
function transformVariables(source, dest) {
|
||||
var xformed, arrayVals;
|
||||
|
||||
if (dest === undefined) {
|
||||
xformed = {};
|
||||
}
|
||||
else {
|
||||
xformed = dest;
|
||||
}
|
||||
|
||||
$.each(source, function(prop, val) {
|
||||
if (val instanceof $) {
|
||||
xformed[prop] = val[0];
|
||||
}
|
||||
else if ($.isPlainObject(val)) {
|
||||
xformed[prop] = {};
|
||||
transformVariables(val, xformed[prop]);
|
||||
}
|
||||
else if ($.isArray(val)) {
|
||||
arrayVals = [];
|
||||
$.each(val, function(idx, arrayVal) {
|
||||
if (arrayVal instanceof $) {
|
||||
$.merge(arrayVals, arrayVal);
|
||||
}
|
||||
else {
|
||||
arrayVals.push(arrayVal);
|
||||
}
|
||||
});
|
||||
xformed[prop] = arrayVals;
|
||||
}
|
||||
else {
|
||||
xformed[prop] = val;
|
||||
}
|
||||
});
|
||||
|
||||
if (dest === undefined) {
|
||||
return xformed;
|
||||
}
|
||||
}
|
||||
|
||||
function isValidCommand(command) {
|
||||
return $.type(command) === "string" &&
|
||||
command === "dispose" &&
|
||||
dnd()[command] !== undefined;
|
||||
}
|
||||
|
||||
function delegateCommand(command) {
|
||||
var xformedArgs = [], origArgs = Array.prototype.slice.call(arguments, 1);
|
||||
transformVariables(origArgs, xformedArgs);
|
||||
return dnd()[command].apply(dnd(), xformedArgs);
|
||||
}
|
||||
|
||||
$.fn.fineUploaderDnd = function(optionsOrCommand) {
|
||||
var self = this, selfArgs = arguments, retVals = [];
|
||||
|
||||
this.each(function(index, el) {
|
||||
$el = $(el);
|
||||
|
||||
if (dnd() && isValidCommand(optionsOrCommand)) {
|
||||
retVals.push(delegateCommand.apply(self, selfArgs));
|
||||
|
||||
if (self.length === 1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (typeof optionsOrCommand === "object" || !optionsOrCommand) {
|
||||
init.apply(self, selfArgs);
|
||||
}
|
||||
else {
|
||||
$.error("Method " + optionsOrCommand + " does not exist in Fine Uploader's DnD module.");
|
||||
}
|
||||
});
|
||||
|
||||
if (retVals.length === 1) {
|
||||
return retVals[0];
|
||||
}
|
||||
else if (retVals.length > 1) {
|
||||
return retVals;
|
||||
}
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
}(jQuery));
|
@ -1,232 +0,0 @@
|
||||
/*globals jQuery, qq*/
|
||||
(function($) {
|
||||
"use strict";
|
||||
var $el,
|
||||
pluginOptions = ["uploaderType", "endpointType"];
|
||||
|
||||
function init(options) {
|
||||
var xformedOpts = transformVariables(options || {}),
|
||||
newUploaderInstance = getNewUploaderInstance(xformedOpts);
|
||||
|
||||
uploader(newUploaderInstance);
|
||||
addCallbacks(xformedOpts, newUploaderInstance);
|
||||
|
||||
return $el;
|
||||
}
|
||||
|
||||
function getNewUploaderInstance(params) {
|
||||
var uploaderType = pluginOption("uploaderType"),
|
||||
namespace = pluginOption("endpointType");
|
||||
|
||||
// If the integrator has defined a specific type of uploader to load, use that, otherwise assume `qq.FineUploader`
|
||||
if (uploaderType) {
|
||||
// We can determine the correct constructor function to invoke by combining "FineUploader"
|
||||
// with the upper camel cased `uploaderType` value.
|
||||
uploaderType = uploaderType.charAt(0).toUpperCase() + uploaderType.slice(1).toLowerCase();
|
||||
|
||||
if (namespace) {
|
||||
return new qq[namespace]["FineUploader" + uploaderType](params);
|
||||
}
|
||||
|
||||
return new qq["FineUploader" + uploaderType](params);
|
||||
}
|
||||
else {
|
||||
if (namespace) {
|
||||
return new qq[namespace].FineUploader(params);
|
||||
}
|
||||
|
||||
return new qq.FineUploader(params);
|
||||
}
|
||||
}
|
||||
|
||||
function dataStore(key, val) {
|
||||
var data = $el.data("fineuploader");
|
||||
|
||||
if (val) {
|
||||
if (data === undefined) {
|
||||
data = {};
|
||||
}
|
||||
data[key] = val;
|
||||
$el.data("fineuploader", data);
|
||||
}
|
||||
else {
|
||||
if (data === undefined) {
|
||||
return null;
|
||||
}
|
||||
return data[key];
|
||||
}
|
||||
}
|
||||
|
||||
//the underlying Fine Uploader instance is stored in jQuery's data stored, associated with the element
|
||||
// tied to this instance of the plug-in
|
||||
function uploader(instanceToStore) {
|
||||
return dataStore("uploader", instanceToStore);
|
||||
}
|
||||
|
||||
function pluginOption(option, optionVal) {
|
||||
return dataStore(option, optionVal);
|
||||
}
|
||||
|
||||
// Implement all callbacks defined in Fine Uploader as functions that trigger appropriately names events and
|
||||
// return the result of executing the bound handler back to Fine Uploader
|
||||
function addCallbacks(transformedOpts, newUploaderInstance) {
|
||||
var callbacks = transformedOpts.callbacks = {};
|
||||
|
||||
$.each(newUploaderInstance._options.callbacks, function(prop, nonJqueryCallback) {
|
||||
var name, callbackEventTarget;
|
||||
|
||||
name = /^on(\w+)/.exec(prop)[1];
|
||||
name = name.substring(0, 1).toLowerCase() + name.substring(1);
|
||||
callbackEventTarget = $el;
|
||||
|
||||
callbacks[prop] = function() {
|
||||
var originalArgs = Array.prototype.slice.call(arguments),
|
||||
transformedArgs = [],
|
||||
nonJqueryCallbackRetVal, jqueryEventCallbackRetVal;
|
||||
|
||||
$.each(originalArgs, function(idx, arg) {
|
||||
transformedArgs.push(maybeWrapInJquery(arg));
|
||||
});
|
||||
|
||||
nonJqueryCallbackRetVal = nonJqueryCallback.apply(this, originalArgs);
|
||||
|
||||
try {
|
||||
jqueryEventCallbackRetVal = callbackEventTarget.triggerHandler(name, transformedArgs);
|
||||
}
|
||||
catch (error) {
|
||||
qq.log("Caught error in Fine Uploader jQuery event handler: " + error.message, "error");
|
||||
}
|
||||
|
||||
/*jshint -W116*/
|
||||
if (nonJqueryCallbackRetVal != null) {
|
||||
return nonJqueryCallbackRetVal;
|
||||
}
|
||||
return jqueryEventCallbackRetVal;
|
||||
};
|
||||
});
|
||||
|
||||
newUploaderInstance._options.callbacks = callbacks;
|
||||
}
|
||||
|
||||
//transform jQuery objects into HTMLElements, and pass along all other option properties
|
||||
function transformVariables(source, dest) {
|
||||
var xformed, arrayVals;
|
||||
|
||||
if (dest === undefined) {
|
||||
if (source.uploaderType !== "basic") {
|
||||
xformed = { element : $el[0] };
|
||||
}
|
||||
else {
|
||||
xformed = {};
|
||||
}
|
||||
}
|
||||
else {
|
||||
xformed = dest;
|
||||
}
|
||||
|
||||
$.each(source, function(prop, val) {
|
||||
if ($.inArray(prop, pluginOptions) >= 0) {
|
||||
pluginOption(prop, val);
|
||||
}
|
||||
else if (val instanceof $) {
|
||||
xformed[prop] = val[0];
|
||||
}
|
||||
else if ($.isPlainObject(val)) {
|
||||
xformed[prop] = {};
|
||||
transformVariables(val, xformed[prop]);
|
||||
}
|
||||
else if ($.isArray(val)) {
|
||||
arrayVals = [];
|
||||
$.each(val, function(idx, arrayVal) {
|
||||
var arrayObjDest = {};
|
||||
|
||||
if (arrayVal instanceof $) {
|
||||
$.merge(arrayVals, arrayVal);
|
||||
}
|
||||
else if ($.isPlainObject(arrayVal)) {
|
||||
transformVariables(arrayVal, arrayObjDest);
|
||||
arrayVals.push(arrayObjDest);
|
||||
}
|
||||
else {
|
||||
arrayVals.push(arrayVal);
|
||||
}
|
||||
});
|
||||
xformed[prop] = arrayVals;
|
||||
}
|
||||
else {
|
||||
xformed[prop] = val;
|
||||
}
|
||||
});
|
||||
|
||||
if (dest === undefined) {
|
||||
return xformed;
|
||||
}
|
||||
}
|
||||
|
||||
function isValidCommand(command) {
|
||||
return $.type(command) === "string" &&
|
||||
!command.match(/^_/) && //enforce private methods convention
|
||||
uploader()[command] !== undefined;
|
||||
}
|
||||
|
||||
// Assuming we have already verified that this is a valid command, call the associated function in the underlying
|
||||
// Fine Uploader instance (passing along the arguments from the caller) and return the result of the call back to the caller
|
||||
function delegateCommand(command) {
|
||||
var xformedArgs = [],
|
||||
origArgs = Array.prototype.slice.call(arguments, 1),
|
||||
retVal;
|
||||
|
||||
transformVariables(origArgs, xformedArgs);
|
||||
|
||||
retVal = uploader()[command].apply(uploader(), xformedArgs);
|
||||
|
||||
return maybeWrapInJquery(retVal);
|
||||
}
|
||||
|
||||
// If the value is an `HTMLElement` or `HTMLDocument`, wrap it in a `jQuery` object
|
||||
function maybeWrapInJquery(val) {
|
||||
var transformedVal = val;
|
||||
|
||||
// If the command is returning an `HTMLElement` or `HTMLDocument`, wrap it in a `jQuery` object
|
||||
/*jshint -W116*/
|
||||
if (val != null && typeof val === "object" &&
|
||||
(val.nodeType === 1 || val.nodeType === 9) && val.cloneNode) {
|
||||
|
||||
transformedVal = $(val);
|
||||
}
|
||||
|
||||
return transformedVal;
|
||||
}
|
||||
|
||||
$.fn.fineUploader = function(optionsOrCommand) {
|
||||
var self = this, selfArgs = arguments, retVals = [];
|
||||
|
||||
this.each(function(index, el) {
|
||||
$el = $(el);
|
||||
|
||||
if (uploader() && isValidCommand(optionsOrCommand)) {
|
||||
retVals.push(delegateCommand.apply(self, selfArgs));
|
||||
|
||||
if (self.length === 1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (typeof optionsOrCommand === "object" || !optionsOrCommand) {
|
||||
init.apply(self, selfArgs);
|
||||
}
|
||||
else {
|
||||
$.error("Method " + optionsOrCommand + " does not exist on jQuery.fineUploader");
|
||||
}
|
||||
});
|
||||
|
||||
if (retVals.length === 1) {
|
||||
return retVals[0];
|
||||
}
|
||||
else if (retVals.length > 1) {
|
||||
return retVals;
|
||||
}
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
}(jQuery));
|
@ -1,112 +0,0 @@
|
||||
/*globals qq*/
|
||||
/**
|
||||
* Defines the public API for non-traditional FineUploaderBasic mode.
|
||||
*/
|
||||
(function(){
|
||||
"use strict";
|
||||
|
||||
qq.nonTraditionalBasePublicApi = {
|
||||
setUploadSuccessParams: function(params, id) {
|
||||
this._uploadSuccessParamsStore.set(params, id);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
qq.nonTraditionalBasePrivateApi = {
|
||||
/**
|
||||
* When the upload has completed, if it is successful, send a request to the `successEndpoint` (if defined).
|
||||
* This will hold up the call to the `onComplete` callback until we have determined success of the upload
|
||||
* according to the local server, if a `successEndpoint` has been defined by the integrator.
|
||||
*
|
||||
* @param id ID of the completed upload
|
||||
* @param name Name of the associated item
|
||||
* @param result Object created from the server's parsed JSON response.
|
||||
* @param xhr Associated XmlHttpRequest, if this was used to send the request.
|
||||
* @returns {boolean || qq.Promise} true/false if success can be determined immediately, otherwise a `qq.Promise`
|
||||
* if we need to ask the server.
|
||||
* @private
|
||||
*/
|
||||
_onComplete: function(id, name, result, xhr) {
|
||||
var success = result.success ? true : false,
|
||||
self = this,
|
||||
onCompleteArgs = arguments,
|
||||
successEndpoint = this._options.uploadSuccess.endpoint,
|
||||
successCustomHeaders = this._options.uploadSuccess.customHeaders,
|
||||
cors = this._options.cors,
|
||||
promise = new qq.Promise(),
|
||||
uploadSuccessParams = this._uploadSuccessParamsStore.get(id),
|
||||
|
||||
// If we are waiting for confirmation from the local server, and have received it,
|
||||
// include properties from the local server response in the `response` parameter
|
||||
// sent to the `onComplete` callback, delegate to the parent `_onComplete`, and
|
||||
// fulfill the associated promise.
|
||||
onSuccessFromServer = function(successRequestResult) {
|
||||
delete self._failedSuccessRequestCallbacks[id];
|
||||
qq.extend(result, successRequestResult);
|
||||
qq.FineUploaderBasic.prototype._onComplete.apply(self, onCompleteArgs);
|
||||
promise.success(successRequestResult);
|
||||
},
|
||||
|
||||
// If the upload success request fails, attempt to re-send the success request (via the core retry code).
|
||||
// The entire upload may be restarted if the server returns a "reset" property with a value of true as well.
|
||||
onFailureFromServer = function(successRequestResult) {
|
||||
var callback = submitSuccessRequest;
|
||||
|
||||
qq.extend(result, successRequestResult);
|
||||
|
||||
if (result && result.reset) {
|
||||
callback = null;
|
||||
}
|
||||
|
||||
if (!callback) {
|
||||
delete self._failedSuccessRequestCallbacks[id];
|
||||
}
|
||||
else {
|
||||
self._failedSuccessRequestCallbacks[id] = callback;
|
||||
}
|
||||
|
||||
if (!self._onAutoRetry(id, name, result, xhr, callback)) {
|
||||
qq.FineUploaderBasic.prototype._onComplete.apply(self, onCompleteArgs);
|
||||
promise.failure(successRequestResult);
|
||||
}
|
||||
},
|
||||
submitSuccessRequest,
|
||||
successAjaxRequester;
|
||||
|
||||
// Ask the local server if the file sent is ok.
|
||||
if (success && successEndpoint) {
|
||||
successAjaxRequester = new qq.UploadSuccessAjaxRequester({
|
||||
endpoint: successEndpoint,
|
||||
customHeaders: successCustomHeaders,
|
||||
cors: cors,
|
||||
log: qq.bind(this.log, this)
|
||||
});
|
||||
|
||||
|
||||
// combine custom params and default params
|
||||
qq.extend(uploadSuccessParams, self._getEndpointSpecificParams(id, result, xhr), true);
|
||||
|
||||
submitSuccessRequest = qq.bind(function() {
|
||||
successAjaxRequester.sendSuccessRequest(id, uploadSuccessParams)
|
||||
.then(onSuccessFromServer, onFailureFromServer);
|
||||
}, self);
|
||||
|
||||
submitSuccessRequest();
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
// If we are not asking the local server about the file, just delegate to the parent `_onComplete`.
|
||||
return qq.FineUploaderBasic.prototype._onComplete.apply(this, arguments);
|
||||
},
|
||||
|
||||
// If the failure occurred on an upload success request (and a reset was not ordered), try to resend that instead.
|
||||
_manualRetry: function(id) {
|
||||
var successRequestCallback = this._failedSuccessRequestCallbacks[id];
|
||||
|
||||
return qq.FineUploaderBasic.prototype._manualRetry.call(this, id, successRequestCallback);
|
||||
}
|
||||
};
|
||||
}());
|
@ -1,49 +0,0 @@
|
||||
/*globals qq*/
|
||||
qq.PasteSupport = function(o) {
|
||||
"use strict";
|
||||
|
||||
var options, detachPasteHandler;
|
||||
|
||||
options = {
|
||||
targetElement: null,
|
||||
callbacks: {
|
||||
log: function(message, level) {},
|
||||
pasteReceived: function(blob) {}
|
||||
}
|
||||
};
|
||||
|
||||
function isImage(item) {
|
||||
return item.type &&
|
||||
item.type.indexOf("image/") === 0;
|
||||
}
|
||||
|
||||
function registerPasteHandler() {
|
||||
qq(options.targetElement).attach("paste", function(event) {
|
||||
var clipboardData = event.clipboardData;
|
||||
|
||||
if (clipboardData) {
|
||||
qq.each(clipboardData.items, function(idx, item) {
|
||||
if (isImage(item)) {
|
||||
var blob = item.getAsFile();
|
||||
options.callbacks.pasteReceived(blob);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function unregisterPasteHandler() {
|
||||
if (detachPasteHandler) {
|
||||
detachPasteHandler();
|
||||
}
|
||||
}
|
||||
|
||||
qq.extend(options, o);
|
||||
registerPasteHandler();
|
||||
|
||||
qq.extend(this, {
|
||||
reset: function() {
|
||||
unregisterPasteHandler();
|
||||
}
|
||||
});
|
||||
};
|
@ -1,87 +0,0 @@
|
||||
/*globals qq*/
|
||||
|
||||
// Is the passed object a promise instance?
|
||||
qq.isGenericPromise = function(maybePromise) {
|
||||
"use strict";
|
||||
return !!(maybePromise && maybePromise.then && qq.isFunction(maybePromise.then));
|
||||
};
|
||||
|
||||
qq.Promise = function() {
|
||||
"use strict";
|
||||
|
||||
var successArgs, failureArgs,
|
||||
successCallbacks = [],
|
||||
failureCallbacks = [],
|
||||
doneCallbacks = [],
|
||||
state = 0;
|
||||
|
||||
qq.extend(this, {
|
||||
then: function(onSuccess, onFailure) {
|
||||
if (state === 0) {
|
||||
if (onSuccess) {
|
||||
successCallbacks.push(onSuccess);
|
||||
}
|
||||
if (onFailure) {
|
||||
failureCallbacks.push(onFailure);
|
||||
}
|
||||
}
|
||||
else if (state === -1) {
|
||||
onFailure && onFailure.apply(null, failureArgs);
|
||||
}
|
||||
else if (onSuccess) {
|
||||
onSuccess.apply(null,successArgs);
|
||||
}
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
done: function(callback) {
|
||||
if (state === 0) {
|
||||
doneCallbacks.push(callback);
|
||||
}
|
||||
else {
|
||||
callback.apply(null, failureArgs === undefined ? successArgs : failureArgs);
|
||||
}
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
success: function() {
|
||||
state = 1;
|
||||
successArgs = arguments;
|
||||
|
||||
if (successCallbacks.length) {
|
||||
qq.each(successCallbacks, function(idx, callback) {
|
||||
callback.apply(null, successArgs);
|
||||
});
|
||||
}
|
||||
|
||||
if(doneCallbacks.length) {
|
||||
qq.each(doneCallbacks, function(idx, callback) {
|
||||
callback.apply(null, successArgs);
|
||||
});
|
||||
}
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
failure: function() {
|
||||
state = -1;
|
||||
failureArgs = arguments;
|
||||
|
||||
if (failureCallbacks.length) {
|
||||
qq.each(failureCallbacks, function(idx, callback) {
|
||||
callback.apply(null, failureArgs);
|
||||
});
|
||||
}
|
||||
|
||||
if(doneCallbacks.length) {
|
||||
qq.each(doneCallbacks, function(idx, callback) {
|
||||
callback.apply(null, failureArgs);
|
||||
});
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,22 +0,0 @@
|
||||
/*globals jQuery*/
|
||||
/**
|
||||
* Simply an alias for the `fineUploader` plug-in wrapper, but hides the required `endpointType` option from the
|
||||
* integrator. I thought it may be confusing to convey to the integrator that, when using Fine Uploader in S3 mode,
|
||||
* you need to specify an `endpointType` with a value of S3, and perhaps an `uploaderType` with a value of "basic" if
|
||||
* you want to use basic mode when uploading directly to S3 as well. So, you can use this plug-in alias and not worry
|
||||
* about the `endpointType` option at all.
|
||||
*/
|
||||
(function($) {
|
||||
"use strict";
|
||||
|
||||
$.fn.fineUploaderS3 = function(optionsOrCommand) {
|
||||
if (typeof optionsOrCommand === "object") {
|
||||
|
||||
// This option is used to tell the plug-in wrapper to instantiate the appropriate S3-namespace modules.
|
||||
optionsOrCommand.endpointType = "s3";
|
||||
}
|
||||
|
||||
return $.fn.fineUploader.apply(this, arguments);
|
||||
};
|
||||
|
||||
}(jQuery));
|
@ -1,127 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* Ajax requester used to send an ["Abort Multipart Upload"](http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadAbort.html)
|
||||
* request to S3 via the REST API.
|
||||
|
||||
* @param o
|
||||
* @constructor
|
||||
*/
|
||||
qq.s3.AbortMultipartAjaxRequester = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
options = {
|
||||
method: "DELETE",
|
||||
endpointStore: null,
|
||||
signatureSpec: null,
|
||||
maxConnections: 3,
|
||||
getKey: function(id) {},
|
||||
log: function(str, level) {}
|
||||
},
|
||||
getSignatureAjaxRequester;
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
// Transport for requesting signatures (for the "Complete" requests) from the local server
|
||||
getSignatureAjaxRequester = new qq.s3.RequestSigner({
|
||||
signatureSpec: options.signatureSpec,
|
||||
cors: options.cors,
|
||||
log: options.log
|
||||
});
|
||||
|
||||
/**
|
||||
* Attach all required headers (including Authorization) to the "Abort" request. This is a promissory function
|
||||
* that will fulfill the associated promise once all headers have been attached or when an error has occurred that
|
||||
* prevents headers from being attached.
|
||||
*
|
||||
* @param id Associated file ID
|
||||
* @param uploadId ID of the associated upload, according to AWS
|
||||
* @returns {qq.Promise}
|
||||
*/
|
||||
function getHeaders(id, uploadId) {
|
||||
var headers = {},
|
||||
promise = new qq.Promise(),
|
||||
endpoint = options.endpointStore.get(id),
|
||||
bucket = qq.s3.util.getBucket(endpoint),
|
||||
signatureConstructor = getSignatureAjaxRequester.constructStringToSign
|
||||
(getSignatureAjaxRequester.REQUEST_TYPE.MULTIPART_ABORT, bucket, options.getKey(id))
|
||||
.withUploadId(uploadId);
|
||||
|
||||
// Ask the local server to sign the request. Use this signature to form the Authorization header.
|
||||
getSignatureAjaxRequester.getSignature(id, {signatureConstructor: signatureConstructor}).then(function(response) {
|
||||
headers = signatureConstructor.getHeaders();
|
||||
headers.Authorization = "AWS " + options.signatureSpec.credentialsProvider.get().accessKey + ":" + response.signature;
|
||||
promise.success(headers, signatureConstructor.getEndOfUrl());
|
||||
}, promise.failure);
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by the base ajax requester when the response has been received. We definitively determine here if the
|
||||
* "Abort MPU" request has been a success or not.
|
||||
*
|
||||
* @param id ID associated with the file.
|
||||
* @param xhr `XMLHttpRequest` object containing the response, among other things.
|
||||
* @param isError A boolean indicating success or failure according to the base ajax requester (primarily based on status code).
|
||||
*/
|
||||
function handleAbortRequestComplete(id, xhr, isError) {
|
||||
var domParser = new DOMParser(),
|
||||
responseDoc = domParser.parseFromString(xhr.responseText, "application/xml"),
|
||||
errorEls = responseDoc.getElementsByTagName("Error"),
|
||||
awsErrorMsg;
|
||||
|
||||
|
||||
options.log(qq.format("Abort response status {}, body = {}", xhr.status, xhr.responseText));
|
||||
|
||||
// If the base requester has determine this a failure, give up.
|
||||
if (isError) {
|
||||
options.log(qq.format("Abort Multipart Upload request for {} failed with status {}.", id, xhr.status), "error");
|
||||
}
|
||||
else {
|
||||
// Make sure the correct bucket and key has been specified in the XML response from AWS.
|
||||
if (errorEls.length) {
|
||||
isError = true;
|
||||
awsErrorMsg = responseDoc.getElementsByTagName("Message")[0].textContent;
|
||||
options.log(qq.format("Failed to Abort Multipart Upload request for {}. Error: {}", id, awsErrorMsg), "error");
|
||||
}
|
||||
else {
|
||||
options.log(qq.format("Abort MPU request succeeded for file ID {}.", id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
validMethods: ["DELETE"],
|
||||
method: options.method,
|
||||
contentType: null,
|
||||
endpointStore: options.endpointStore,
|
||||
maxConnections: options.maxConnections,
|
||||
allowXRequestedWithAndCacheControl: false, //These headers are not necessary & would break some installations if added
|
||||
log: options.log,
|
||||
onComplete: handleAbortRequestComplete,
|
||||
successfulResponseCodes: {
|
||||
DELETE: [204]
|
||||
}
|
||||
}));
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* Sends the "Abort" request.
|
||||
*
|
||||
* @param id ID associated with the file.
|
||||
* @param uploadId AWS uploadId for this file
|
||||
*/
|
||||
send: function(id, uploadId) {
|
||||
getHeaders(id, uploadId).then(function(headers, endOfUrl) {
|
||||
options.log("Submitting S3 Abort multipart upload request for " + id);
|
||||
requester.initTransport(id)
|
||||
.withPath(endOfUrl)
|
||||
.withHeaders(headers)
|
||||
.send();
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
@ -1,190 +0,0 @@
|
||||
/*globals qq*/
|
||||
/**
|
||||
* Ajax requester used to send an ["Complete Multipart Upload"](http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html)
|
||||
* request to S3 via the REST API.
|
||||
*
|
||||
* @param o Options passed by the creator, to overwrite any default option values.
|
||||
* @constructor
|
||||
*/
|
||||
qq.s3.CompleteMultipartAjaxRequester = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
pendingCompleteRequests = {},
|
||||
options = {
|
||||
method: "POST",
|
||||
contentType: "text/xml",
|
||||
endpointStore: null,
|
||||
signatureSpec: null,
|
||||
maxConnections: 3,
|
||||
getKey: function(id) {},
|
||||
log: function(str, level) {}
|
||||
},
|
||||
getSignatureAjaxRequester;
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
// Transport for requesting signatures (for the "Complete" requests) from the local server
|
||||
getSignatureAjaxRequester = new qq.s3.RequestSigner({
|
||||
signatureSpec: options.signatureSpec,
|
||||
cors: options.cors,
|
||||
log: options.log
|
||||
});
|
||||
|
||||
/**
|
||||
* Attach all required headers (including Authorization) to the "Complete" request. This is a promissory function
|
||||
* that will fulfill the associated promise once all headers have been attached or when an error has occurred that
|
||||
* prevents headers from being attached.
|
||||
*
|
||||
* @param id Associated file ID
|
||||
* @param uploadId ID of the associated upload, according to AWS
|
||||
* @returns {qq.Promise}
|
||||
*/
|
||||
function getHeaders(id, uploadId) {
|
||||
var headers = {},
|
||||
promise = new qq.Promise(),
|
||||
endpoint = options.endpointStore.get(id),
|
||||
bucket = qq.s3.util.getBucket(endpoint),
|
||||
signatureConstructor = getSignatureAjaxRequester.constructStringToSign
|
||||
(getSignatureAjaxRequester.REQUEST_TYPE.MULTIPART_COMPLETE, bucket, options.getKey(id))
|
||||
.withUploadId(uploadId)
|
||||
.withContentType("application/xml; charset=UTF-8");
|
||||
|
||||
// Ask the local server to sign the request. Use this signature to form the Authorization header.
|
||||
getSignatureAjaxRequester.getSignature(id, {signatureConstructor: signatureConstructor}).then(function(response) {
|
||||
headers = signatureConstructor.getHeaders();
|
||||
headers.Authorization = "AWS " + options.signatureSpec.credentialsProvider.get().accessKey + ":" + response.signature;
|
||||
promise.success(headers, signatureConstructor.getEndOfUrl());
|
||||
}, promise.failure);
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by the base ajax requester when the response has been received. We definitively determine here if the
|
||||
* "Complete MPU" request has been a success or not.
|
||||
*
|
||||
* @param id ID associated with the file.
|
||||
* @param xhr `XMLHttpRequest` object containing the response, among other things.
|
||||
* @param isError A boolean indicating success or failure according to the base ajax requester (primarily based on status code).
|
||||
*/
|
||||
function handleCompleteRequestComplete(id, xhr, isError) {
|
||||
var promise = pendingCompleteRequests[id],
|
||||
domParser = new DOMParser(),
|
||||
endpoint = options.endpointStore.get(id),
|
||||
bucket = qq.s3.util.getBucket(endpoint),
|
||||
key = options.getKey(id),
|
||||
responseDoc = domParser.parseFromString(xhr.responseText, "application/xml"),
|
||||
bucketEls = responseDoc.getElementsByTagName("Bucket"),
|
||||
keyEls = responseDoc.getElementsByTagName("Key");
|
||||
|
||||
delete pendingCompleteRequests[id];
|
||||
|
||||
options.log(qq.format("Complete response status {}, body = {}", xhr.status, xhr.responseText));
|
||||
|
||||
// If the base requester has determine this a failure, give up.
|
||||
if (isError) {
|
||||
options.log(qq.format("Complete Multipart Upload request for {} failed with status {}.", id, xhr.status), "error");
|
||||
}
|
||||
else {
|
||||
// Make sure the correct bucket and key has been specified in the XML response from AWS.
|
||||
if (bucketEls.length && keyEls.length) {
|
||||
if (bucketEls[0].textContent !== bucket) {
|
||||
isError = true;
|
||||
options.log(qq.format("Wrong bucket in response to Complete Multipart Upload request for {}.", id), "error");
|
||||
}
|
||||
|
||||
// TODO Compare key name from response w/ expected key name if AWS ever fixes the encoding of key names in this response.
|
||||
}
|
||||
else {
|
||||
isError = true;
|
||||
options.log(qq.format("Missing bucket and/or key in response to Complete Multipart Upload request for {}.", id), "error");
|
||||
}
|
||||
}
|
||||
|
||||
if (isError) {
|
||||
promise.failure("Problem asking Amazon to combine the parts!", xhr);
|
||||
}
|
||||
else {
|
||||
promise.success({}, xhr);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param etagEntries Array of objects containing `etag` values and their associated `part` numbers.
|
||||
* @returns {string} XML string containing the body to send with the "Complete" request
|
||||
*/
|
||||
function getCompleteRequestBody(etagEntries) {
|
||||
var doc = document.implementation.createDocument(null, "CompleteMultipartUpload", null);
|
||||
|
||||
// The entries MUST be sorted by part number, per the AWS API spec.
|
||||
etagEntries.sort(function(a, b) {
|
||||
return a.part - b.part;
|
||||
});
|
||||
|
||||
// Construct an XML document for each pair of etag/part values that correspond to part uploads.
|
||||
qq.each(etagEntries, function(idx, etagEntry) {
|
||||
var part = etagEntry.part,
|
||||
etag = etagEntry.etag,
|
||||
partEl = doc.createElement("Part"),
|
||||
partNumEl = doc.createElement("PartNumber"),
|
||||
partNumTextEl = doc.createTextNode(part),
|
||||
etagTextEl = doc.createTextNode(etag),
|
||||
etagEl = doc.createElement("ETag");
|
||||
|
||||
etagEl.appendChild(etagTextEl);
|
||||
partNumEl.appendChild(partNumTextEl);
|
||||
partEl.appendChild(partNumEl);
|
||||
partEl.appendChild(etagEl);
|
||||
qq(doc).children()[0].appendChild(partEl);
|
||||
});
|
||||
|
||||
// Turn the resulting XML document into a string fit for transport.
|
||||
return new XMLSerializer().serializeToString(doc);
|
||||
}
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
method: options.method,
|
||||
contentType: "application/xml; charset=UTF-8",
|
||||
endpointStore: options.endpointStore,
|
||||
maxConnections: options.maxConnections,
|
||||
allowXRequestedWithAndCacheControl: false, //These headers are not necessary & would break some installations if added
|
||||
log: options.log,
|
||||
onComplete: handleCompleteRequestComplete,
|
||||
successfulResponseCodes: {
|
||||
POST: [200]
|
||||
}
|
||||
}));
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* Sends the "Complete" request and fulfills the returned promise when the success of this request is known.
|
||||
*
|
||||
* @param id ID associated with the file.
|
||||
* @param uploadId AWS uploadId for this file
|
||||
* @param etagEntries Array of objects containing `etag` values and their associated `part` numbers.
|
||||
* @returns {qq.Promise}
|
||||
*/
|
||||
send: function(id, uploadId, etagEntries) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
getHeaders(id, uploadId).then(function(headers, endOfUrl) {
|
||||
var body = getCompleteRequestBody(etagEntries);
|
||||
|
||||
options.log("Submitting S3 complete multipart upload request for " + id);
|
||||
|
||||
pendingCompleteRequests[id] = promise;
|
||||
delete headers["Content-Type"];
|
||||
|
||||
requester.initTransport(id)
|
||||
.withPath(endOfUrl)
|
||||
.withHeaders(headers)
|
||||
.withPayload(body)
|
||||
.send();
|
||||
}, promise.failure);
|
||||
|
||||
return promise;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,179 +0,0 @@
|
||||
/*globals qq*/
|
||||
/**
|
||||
* Ajax requester used to send an ["Initiate Multipart Upload"](http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadInitiate.html)
|
||||
* request to S3 via the REST API.
|
||||
*
|
||||
* @param o Options from the caller - will override the defaults.
|
||||
* @constructor
|
||||
*/
|
||||
qq.s3.InitiateMultipartAjaxRequester = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
pendingInitiateRequests = {},
|
||||
options = {
|
||||
filenameParam: "qqfilename",
|
||||
method: "POST",
|
||||
endpointStore: null,
|
||||
paramsStore: null,
|
||||
signatureSpec: null,
|
||||
aclStore: null,
|
||||
reducedRedundancy: false,
|
||||
serverSideEncryption: false,
|
||||
maxConnections: 3,
|
||||
getContentType: function(id) {},
|
||||
getKey: function(id) {},
|
||||
getName: function(id) {},
|
||||
log: function(str, level) {}
|
||||
},
|
||||
getSignatureAjaxRequester;
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
getSignatureAjaxRequester = new qq.s3.RequestSigner({
|
||||
signatureSpec: options.signatureSpec,
|
||||
cors: options.cors,
|
||||
log: options.log
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Determine all headers for the "Initiate MPU" request, including the "Authorization" header, which must be determined
|
||||
* by the local server. This is a promissory function. If the server responds with a signature, the headers
|
||||
* (including the Authorization header) will be passed into the success method of the promise. Otherwise, the failure
|
||||
* method on the promise will be called.
|
||||
*
|
||||
* @param id Associated file ID
|
||||
* @returns {qq.Promise}
|
||||
*/
|
||||
function getHeaders(id) {
|
||||
var bucket = qq.s3.util.getBucket(options.endpointStore.get(id)),
|
||||
headers = {},
|
||||
promise = new qq.Promise(),
|
||||
key = options.getKey(id),
|
||||
signatureConstructor;
|
||||
|
||||
headers["x-amz-acl"] = options.aclStore.get(id);
|
||||
|
||||
if (options.reducedRedundancy) {
|
||||
headers[qq.s3.util.REDUCED_REDUNDANCY_PARAM_NAME] = qq.s3.util.REDUCED_REDUNDANCY_PARAM_VALUE;
|
||||
}
|
||||
|
||||
if (options.serverSideEncryption) {
|
||||
headers[qq.s3.util.SERVER_SIDE_ENCRYPTION_PARAM_NAME] = qq.s3.util.SERVER_SIDE_ENCRYPTION_PARAM_VALUE;
|
||||
}
|
||||
|
||||
headers[qq.s3.util.AWS_PARAM_PREFIX + options.filenameParam] = encodeURIComponent(options.getName(id));
|
||||
|
||||
qq.each(options.paramsStore.get(id), function(name, val) {
|
||||
headers[qq.s3.util.AWS_PARAM_PREFIX + name] = encodeURIComponent(val);
|
||||
});
|
||||
|
||||
signatureConstructor = getSignatureAjaxRequester.constructStringToSign
|
||||
(getSignatureAjaxRequester.REQUEST_TYPE.MULTIPART_INITIATE, bucket, key)
|
||||
.withContentType(options.getContentType(id))
|
||||
.withHeaders(headers);
|
||||
|
||||
// Ask the local server to sign the request. Use this signature to form the Authorization header.
|
||||
getSignatureAjaxRequester.getSignature(id, {signatureConstructor: signatureConstructor}).then(function(response) {
|
||||
headers = signatureConstructor.getHeaders();
|
||||
headers.Authorization = "AWS " + options.signatureSpec.credentialsProvider.get().accessKey + ":" + response.signature;
|
||||
promise.success(headers, signatureConstructor.getEndOfUrl());
|
||||
}, promise.failure);
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by the base ajax requester when the response has been received. We definitively determine here if the
|
||||
* "Initiate MPU" request has been a success or not.
|
||||
*
|
||||
* @param id ID associated with the file.
|
||||
* @param xhr `XMLHttpRequest` object containing the response, among other things.
|
||||
* @param isError A boolean indicating success or failure according to the base ajax requester (primarily based on status code).
|
||||
*/
|
||||
function handleInitiateRequestComplete(id, xhr, isError) {
|
||||
var promise = pendingInitiateRequests[id],
|
||||
domParser = new DOMParser(),
|
||||
responseDoc = domParser.parseFromString(xhr.responseText, "application/xml"),
|
||||
uploadIdElements, messageElements, uploadId, errorMessage, status;
|
||||
|
||||
delete pendingInitiateRequests[id];
|
||||
|
||||
// The base ajax requester may declare the request to be a failure based on status code.
|
||||
if (isError) {
|
||||
status = xhr.status;
|
||||
|
||||
messageElements = responseDoc.getElementsByTagName("Message");
|
||||
if (messageElements.length > 0) {
|
||||
errorMessage = messageElements[0].textContent;
|
||||
}
|
||||
}
|
||||
// If the base ajax requester has not declared this a failure, make sure we can retrieve the uploadId from the response.
|
||||
else {
|
||||
uploadIdElements = responseDoc.getElementsByTagName("UploadId");
|
||||
if (uploadIdElements.length > 0) {
|
||||
uploadId = uploadIdElements[0].textContent;
|
||||
}
|
||||
else {
|
||||
errorMessage = "Upload ID missing from request";
|
||||
}
|
||||
}
|
||||
|
||||
// Either fail the promise (passing a descriptive error message) or declare it a success (passing the upload ID)
|
||||
if (uploadId === undefined) {
|
||||
if (errorMessage) {
|
||||
options.log(qq.format("Specific problem detected initiating multipart upload request for {}: '{}'.", id, errorMessage), "error");
|
||||
}
|
||||
else {
|
||||
options.log(qq.format("Unexplained error with initiate multipart upload request for {}. Status code {}.", id, status), "error");
|
||||
}
|
||||
|
||||
promise.failure("Problem initiating upload request with Amazon.", xhr);
|
||||
}
|
||||
else {
|
||||
options.log(qq.format("Initiate multipart upload request successful for {}. Upload ID is {}", id, uploadId));
|
||||
promise.success(uploadId, xhr);
|
||||
}
|
||||
}
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
method: options.method,
|
||||
contentType: null,
|
||||
endpointStore: options.endpointStore,
|
||||
maxConnections: options.maxConnections,
|
||||
allowXRequestedWithAndCacheControl: false, //These headers are not necessary & would break some installations if added
|
||||
log: options.log,
|
||||
onComplete: handleInitiateRequestComplete,
|
||||
successfulResponseCodes: {
|
||||
POST: [200]
|
||||
}
|
||||
}));
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* Sends the "Initiate MPU" request to AWS via the REST API. First, though, we must get a signature from the
|
||||
* local server for the request. If all is successful, the uploadId from AWS will be passed into the promise's
|
||||
* success handler. Otherwise, an error message will ultimately be passed into the failure method.
|
||||
*
|
||||
* @param id The ID associated with the file
|
||||
* @returns {qq.Promise}
|
||||
*/
|
||||
send: function(id) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
getHeaders(id).then(function(headers, endOfUrl) {
|
||||
options.log("Submitting S3 initiate multipart upload request for " + id);
|
||||
|
||||
pendingInitiateRequests[id] = promise;
|
||||
requester.initTransport(id)
|
||||
.withPath(endOfUrl)
|
||||
.withHeaders(headers)
|
||||
.send();
|
||||
}, promise.failure);
|
||||
|
||||
return promise;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,315 +0,0 @@
|
||||
/* globals qq, CryptoJS */
|
||||
/**
|
||||
* Handles signature determination for HTML Form Upload requests and Multipart Uploader requests (via the S3 REST API).
|
||||
*
|
||||
* If the S3 requests are to be signed server side, this module will send a POST request to the server in an attempt
|
||||
* to solicit signatures for various S3-related requests. This module also parses the response and attempts
|
||||
* to determine if the effort was successful.
|
||||
*
|
||||
* If the S3 requests are to be signed client-side, without the help of a server, this module will utilize CryptoJS to
|
||||
* sign the requests directly in the browser and send them off to S3.
|
||||
*
|
||||
* @param o Options associated with all such requests
|
||||
* @returns {{getSignature: Function}} API method used to initiate the signature request.
|
||||
* @constructor
|
||||
*/
|
||||
qq.s3.RequestSigner = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
thisSignatureRequester = this,
|
||||
pendingSignatures = {},
|
||||
options = {
|
||||
expectingPolicy: false,
|
||||
method: "POST",
|
||||
signatureSpec: {
|
||||
credentialsProvider: {},
|
||||
endpoint: null,
|
||||
customHeaders: {}
|
||||
},
|
||||
maxConnections: 3,
|
||||
paramsStore: {},
|
||||
cors: {
|
||||
expected: false,
|
||||
sendCredentials: false
|
||||
},
|
||||
log: function(str, level) {}
|
||||
},
|
||||
credentialsProvider;
|
||||
|
||||
qq.extend(options, o, true);
|
||||
credentialsProvider = options.signatureSpec.credentialsProvider;
|
||||
|
||||
function handleSignatureReceived(id, xhrOrXdr, isError) {
|
||||
var responseJson = xhrOrXdr.responseText,
|
||||
pendingSignatureData = pendingSignatures[id],
|
||||
promise = pendingSignatureData.promise,
|
||||
errorMessage, response;
|
||||
|
||||
delete pendingSignatures[id];
|
||||
|
||||
// Attempt to parse what we would expect to be a JSON response
|
||||
if (responseJson) {
|
||||
try {
|
||||
response = qq.parseJson(responseJson);
|
||||
}
|
||||
catch (error) {
|
||||
options.log("Error attempting to parse signature response: " + error, "error");
|
||||
}
|
||||
}
|
||||
|
||||
// If we have received a parsable response, and it has an `invalid` property,
|
||||
// the policy document or request headers may have been tampered with client-side.
|
||||
if (response && response.invalid) {
|
||||
isError = true;
|
||||
errorMessage = "Invalid policy document or request headers!";
|
||||
}
|
||||
// Make sure the response contains policy & signature properties
|
||||
else if (response) {
|
||||
if (options.expectingPolicy && !response.policy) {
|
||||
isError = true;
|
||||
errorMessage = "Response does not include the base64 encoded policy!";
|
||||
}
|
||||
else if (!response.signature) {
|
||||
isError = true;
|
||||
errorMessage = "Response does not include the signature!";
|
||||
}
|
||||
}
|
||||
// Something unknown went wrong
|
||||
else {
|
||||
isError = true;
|
||||
errorMessage = "Received an empty or invalid response from the server!";
|
||||
}
|
||||
|
||||
if (isError) {
|
||||
if (errorMessage) {
|
||||
options.log(errorMessage, "error");
|
||||
}
|
||||
|
||||
promise.failure(errorMessage);
|
||||
}
|
||||
else {
|
||||
promise.success(response);
|
||||
}
|
||||
}
|
||||
|
||||
function getToSignAndEndOfUrl(type, bucket, key, contentType, headers, uploadId, partNum) {
|
||||
var method = "POST",
|
||||
headerNames = [],
|
||||
headersAsString = "",
|
||||
endOfUrl;
|
||||
|
||||
/*jshint indent:false */
|
||||
switch(type) {
|
||||
case thisSignatureRequester.REQUEST_TYPE.MULTIPART_ABORT:
|
||||
method = "DELETE";
|
||||
endOfUrl = qq.format("uploadId={}", uploadId);
|
||||
break;
|
||||
case thisSignatureRequester.REQUEST_TYPE.MULTIPART_INITIATE:
|
||||
endOfUrl = "uploads";
|
||||
break;
|
||||
case thisSignatureRequester.REQUEST_TYPE.MULTIPART_COMPLETE:
|
||||
endOfUrl = qq.format("uploadId={}", uploadId);
|
||||
break;
|
||||
case thisSignatureRequester.REQUEST_TYPE.MULTIPART_UPLOAD:
|
||||
method = "PUT";
|
||||
endOfUrl = qq.format("partNumber={}&uploadId={}", partNum, uploadId);
|
||||
break;
|
||||
}
|
||||
|
||||
endOfUrl = key + "?" + endOfUrl;
|
||||
|
||||
qq.each(headers, function(name) {
|
||||
headerNames.push(name);
|
||||
});
|
||||
headerNames.sort();
|
||||
|
||||
qq.each(headerNames, function(idx, name) {
|
||||
headersAsString += name + ":" + headers[name] + "\n";
|
||||
});
|
||||
|
||||
return {
|
||||
toSign: qq.format("{}\n\n{}\n\n{}/{}/{}",
|
||||
method, contentType || "", headersAsString || "\n", bucket, endOfUrl),
|
||||
endOfUrl: endOfUrl
|
||||
};
|
||||
}
|
||||
|
||||
function determineSignatureClientSide(toBeSigned, signatureEffort, updatedAccessKey, updatedSessionToken) {
|
||||
var updatedHeaders;
|
||||
|
||||
// REST API request
|
||||
if (toBeSigned.signatureConstructor) {
|
||||
if (updatedSessionToken) {
|
||||
updatedHeaders = toBeSigned.signatureConstructor.getHeaders();
|
||||
updatedHeaders[qq.s3.util.SESSION_TOKEN_PARAM_NAME] = updatedSessionToken;
|
||||
toBeSigned.signatureConstructor.withHeaders(updatedHeaders);
|
||||
}
|
||||
|
||||
signApiRequest(toBeSigned.signatureConstructor.getToSign().stringToSign, signatureEffort);
|
||||
}
|
||||
// Form upload (w/ policy document)
|
||||
else {
|
||||
updatedSessionToken && qq.s3.util.refreshPolicyCredentials(toBeSigned, updatedSessionToken);
|
||||
signPolicy(toBeSigned, signatureEffort, updatedAccessKey, updatedSessionToken);
|
||||
}
|
||||
}
|
||||
|
||||
function signPolicy(policy, signatureEffort, updatedAccessKey, updatedSessionToken) {
|
||||
var policyStr = JSON.stringify(policy),
|
||||
policyWordArray = CryptoJS.enc.Utf8.parse(policyStr),
|
||||
base64Policy = CryptoJS.enc.Base64.stringify(policyWordArray),
|
||||
policyHmacSha1 = CryptoJS.HmacSHA1(base64Policy, credentialsProvider.get().secretKey),
|
||||
policyHmacSha1Base64 = CryptoJS.enc.Base64.stringify(policyHmacSha1);
|
||||
|
||||
signatureEffort.success({
|
||||
policy: base64Policy,
|
||||
signature: policyHmacSha1Base64
|
||||
}, updatedAccessKey, updatedSessionToken);
|
||||
}
|
||||
|
||||
function signApiRequest(headersStr, signatureEffort) {
|
||||
var headersWordArray = CryptoJS.enc.Utf8.parse(headersStr),
|
||||
headersHmacSha1 = CryptoJS.HmacSHA1(headersWordArray, credentialsProvider.get().secretKey),
|
||||
headersHmacSha1Base64 = CryptoJS.enc.Base64.stringify(headersHmacSha1);
|
||||
|
||||
signatureEffort.success({signature: headersHmacSha1Base64});
|
||||
}
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
acceptHeader: "application/json",
|
||||
method: options.method,
|
||||
contentType: "application/json; charset=utf-8",
|
||||
endpointStore: {
|
||||
get: function() {
|
||||
return options.signatureSpec.endpoint;
|
||||
}
|
||||
},
|
||||
paramsStore: options.paramsStore,
|
||||
maxConnections: options.maxConnections,
|
||||
customHeaders: options.signatureSpec.customHeaders,
|
||||
log: options.log,
|
||||
onComplete: handleSignatureReceived,
|
||||
cors: options.cors,
|
||||
successfulResponseCodes: {
|
||||
POST: [200]
|
||||
}
|
||||
}));
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* On success, an object containing the parsed JSON response will be passed into the success handler if the
|
||||
* request succeeds. Otherwise an error message will be passed into the failure method.
|
||||
*
|
||||
* @param id File ID.
|
||||
* @param toBeSigned an Object that holds the item(s) to be signed
|
||||
* @returns {qq.Promise} A promise that is fulfilled when the response has been received.
|
||||
*/
|
||||
getSignature: function(id, toBeSigned) {
|
||||
var params = toBeSigned,
|
||||
signatureEffort = new qq.Promise();
|
||||
|
||||
if (credentialsProvider.get().secretKey && window.CryptoJS) {
|
||||
if (credentialsProvider.get().expiration.getTime() > Date.now()) {
|
||||
determineSignatureClientSide(toBeSigned, signatureEffort);
|
||||
}
|
||||
// If credentials are expired, ask for new ones before attempting to sign request
|
||||
else {
|
||||
credentialsProvider.onExpired().then(function() {
|
||||
determineSignatureClientSide(toBeSigned,
|
||||
signatureEffort,
|
||||
credentialsProvider.get().accessKey,
|
||||
credentialsProvider.get().sessionToken);
|
||||
}, function(errorMsg) {
|
||||
options.log("Attempt to update expired credentials apparently failed! Unable to sign request. ", "error");
|
||||
signatureEffort.failure("Unable to sign request - expired credentials.");
|
||||
});
|
||||
}
|
||||
}
|
||||
else {
|
||||
options.log("Submitting S3 signature request for " + id);
|
||||
|
||||
if (params.signatureConstructor) {
|
||||
params = {headers: params.signatureConstructor.getToSign().stringToSign};
|
||||
}
|
||||
|
||||
requester.initTransport(id)
|
||||
.withParams(params)
|
||||
.send();
|
||||
|
||||
pendingSignatures[id] = {
|
||||
promise: signatureEffort
|
||||
};
|
||||
}
|
||||
|
||||
return signatureEffort;
|
||||
},
|
||||
|
||||
constructStringToSign: function(type, bucket, key) {
|
||||
var headers = {},
|
||||
uploadId, contentType, partNum, toSignAndEndOfUrl;
|
||||
|
||||
return {
|
||||
withHeaders: function(theHeaders) {
|
||||
headers = theHeaders;
|
||||
return this;
|
||||
},
|
||||
|
||||
withUploadId: function(theUploadId) {
|
||||
uploadId = theUploadId;
|
||||
return this;
|
||||
},
|
||||
|
||||
withContentType: function(theContentType) {
|
||||
contentType = theContentType;
|
||||
return this;
|
||||
},
|
||||
|
||||
withPartNum: function(thePartNum) {
|
||||
partNum = thePartNum;
|
||||
return this;
|
||||
},
|
||||
|
||||
getToSign: function() {
|
||||
var sessionToken = credentialsProvider.get().sessionToken;
|
||||
|
||||
headers["x-amz-date"] = new Date().toUTCString();
|
||||
|
||||
if (sessionToken) {
|
||||
headers[qq.s3.util.SESSION_TOKEN_PARAM_NAME] = sessionToken;
|
||||
}
|
||||
|
||||
toSignAndEndOfUrl = getToSignAndEndOfUrl(type, bucket, key, contentType, headers, uploadId, partNum);
|
||||
|
||||
return {
|
||||
headers: (function() {
|
||||
if (contentType) {
|
||||
headers["Content-Type"] = contentType;
|
||||
}
|
||||
|
||||
return headers;
|
||||
}()),
|
||||
endOfUrl: toSignAndEndOfUrl.endOfUrl,
|
||||
stringToSign: toSignAndEndOfUrl.toSign
|
||||
};
|
||||
},
|
||||
|
||||
getHeaders: function() {
|
||||
return qq.extend({}, headers);
|
||||
},
|
||||
|
||||
getEndOfUrl: function() {
|
||||
return toSignAndEndOfUrl && toSignAndEndOfUrl.endOfUrl;
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
qq.s3.RequestSigner.prototype.REQUEST_TYPE = {
|
||||
MULTIPART_INITIATE: "multipart_initiate",
|
||||
MULTIPART_COMPLETE: "multipart_complete",
|
||||
MULTIPART_ABORT: "multipart_abort",
|
||||
MULTIPART_UPLOAD: "multipart_upload"
|
||||
};
|
@ -1,219 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* Upload handler used by the upload to S3 module that assumes the current user agent does not have any support for the
|
||||
* File API, and, therefore, makes use of iframes and forms to submit the files directly to S3 buckets via the associated
|
||||
* AWS API.
|
||||
*
|
||||
* @param options Options passed from the base handler
|
||||
* @param proxy Callbacks & methods used to query for or push out data/changes
|
||||
*/
|
||||
qq.s3.FormUploadHandler = function(options, proxy) {
|
||||
"use strict";
|
||||
|
||||
var handler = this,
|
||||
onUuidChanged = proxy.onUuidChanged,
|
||||
getName = proxy.getName,
|
||||
getUuid = proxy.getUuid,
|
||||
log = proxy.log,
|
||||
onGetKeyName = options.getKeyName,
|
||||
filenameParam = options.filenameParam,
|
||||
paramsStore = options.paramsStore,
|
||||
endpointStore = options.endpointStore,
|
||||
aclStore = options.aclStore,
|
||||
reducedRedundancy = options.objectProperties.reducedRedundancy,
|
||||
serverSideEncryption = options.objectProperties.serverSideEncryption,
|
||||
validation = options.validation,
|
||||
signature = options.signature,
|
||||
successRedirectUrl = options.iframeSupport.localBlankPagePath,
|
||||
credentialsProvider = options.signature.credentialsProvider,
|
||||
getSignatureAjaxRequester = new qq.s3.RequestSigner({
|
||||
signatureSpec: signature,
|
||||
cors: options.cors,
|
||||
log: log
|
||||
});
|
||||
|
||||
|
||||
if (successRedirectUrl === undefined) {
|
||||
throw new Error("successRedirectEndpoint MUST be defined if you intend to use browsers that do not support the File API!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to parse the contents of an iframe after receiving a response from the server. If the contents cannot be
|
||||
* read (perhaps due to a security error) it is safe to assume that the upload was not successful since Amazon should
|
||||
* have redirected to a known endpoint that should provide a parseable response.
|
||||
*
|
||||
* @param id ID of the associated file
|
||||
* @param iframe target of the form submit
|
||||
* @returns {boolean} true if the contents can be read, false otherwise
|
||||
*/
|
||||
function isValidResponse(id, iframe) {
|
||||
var response,
|
||||
endpoint = options.endpointStore.get(id),
|
||||
bucket = qq.s3.util.getBucket(endpoint);
|
||||
|
||||
|
||||
//IE may throw an "access is denied" error when attempting to access contentDocument on the iframe in some cases
|
||||
try {
|
||||
// iframe.contentWindow.document - for IE<7
|
||||
var doc = iframe.contentDocument || iframe.contentWindow.document,
|
||||
innerHtml = doc.body.innerHTML;
|
||||
|
||||
var responseData = qq.s3.util.parseIframeResponse(iframe);
|
||||
if (responseData.bucket === bucket &&
|
||||
responseData.key === qq.s3.util.encodeQueryStringParam(handler.getThirdPartyFileId(id))) {
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
log("Response from AWS included an unexpected bucket or key name.", "error");
|
||||
|
||||
}
|
||||
catch(error) {
|
||||
log("Error when attempting to parse form upload response (" + error.message + ")", "error");
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function generateAwsParams(id) {
|
||||
/*jshint -W040 */
|
||||
var customParams = paramsStore.get(id);
|
||||
|
||||
customParams[filenameParam] = getName(id);
|
||||
|
||||
return qq.s3.util.generateAwsParams({
|
||||
endpoint: endpointStore.get(id),
|
||||
params: customParams,
|
||||
key: handler.getThirdPartyFileId(id),
|
||||
accessKey: credentialsProvider.get().accessKey,
|
||||
sessionToken: credentialsProvider.get().sessionToken,
|
||||
acl: aclStore.get(id),
|
||||
minFileSize: validation.minSizeLimit,
|
||||
maxFileSize: validation.maxSizeLimit,
|
||||
successRedirectUrl: successRedirectUrl,
|
||||
reducedRedundancy: reducedRedundancy,
|
||||
serverSideEncryption: serverSideEncryption,
|
||||
log: log
|
||||
},
|
||||
qq.bind(getSignatureAjaxRequester.getSignature, this, id));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates form, that will be submitted to iframe
|
||||
*/
|
||||
function createForm(id, iframe) {
|
||||
var promise = new qq.Promise(),
|
||||
method = options.demoMode ? "GET" : "POST",
|
||||
endpoint = options.endpointStore.get(id),
|
||||
fileName = getName(id);
|
||||
|
||||
generateAwsParams(id).then(function(params) {
|
||||
var form = handler._initFormForUpload({
|
||||
method: method,
|
||||
endpoint: endpoint,
|
||||
params: params,
|
||||
paramsInBody: true,
|
||||
targetName: iframe.name
|
||||
});
|
||||
|
||||
promise.success(form);
|
||||
}, function(errorMessage) {
|
||||
promise.failure(errorMessage);
|
||||
handleFinishedUpload(id, iframe, fileName, {error: errorMessage});
|
||||
});
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
function handleUpload(id) {
|
||||
var iframe = handler._createIframe(id),
|
||||
input = handler.getInput(id),
|
||||
promise = new qq.Promise();
|
||||
|
||||
createForm(id, iframe).then(function(form) {
|
||||
form.appendChild(input);
|
||||
|
||||
// Register a callback when the response comes in from S3
|
||||
handler._attachLoadEvent(iframe, function(response) {
|
||||
log("iframe loaded");
|
||||
|
||||
// If the common response handler has determined success or failure immediately
|
||||
if (response) {
|
||||
// If there is something fundamentally wrong with the response (such as iframe content is not accessible)
|
||||
if (response.success === false) {
|
||||
log("Amazon likely rejected the upload request", "error");
|
||||
promise.failure(response);
|
||||
}
|
||||
}
|
||||
// The generic response (iframe onload) handler was not able to make a determination regarding the success of the request
|
||||
else {
|
||||
response = {};
|
||||
response.success = isValidResponse(id, iframe);
|
||||
|
||||
// If the more specific response handle detected a problem with the response from S3
|
||||
if (response.success === false) {
|
||||
log("A success response was received by Amazon, but it was invalid in some way.", "error");
|
||||
promise.failure(response);
|
||||
}
|
||||
else {
|
||||
qq.extend(response, qq.s3.util.parseIframeResponse(iframe));
|
||||
promise.success(response);
|
||||
}
|
||||
}
|
||||
|
||||
handleFinishedUpload(id, iframe);
|
||||
});
|
||||
|
||||
log("Sending upload request for " + id);
|
||||
form.submit();
|
||||
qq(form).remove();
|
||||
}, promise.failure);
|
||||
|
||||
return promise;
|
||||
}
|
||||
|
||||
function handleFinishedUpload(id, iframe) {
|
||||
handler._detachLoadEvent(id);
|
||||
iframe && qq(iframe).remove();
|
||||
}
|
||||
|
||||
qq.extend(this, new qq.FormUploadHandler({
|
||||
options: {
|
||||
isCors: false,
|
||||
inputName: "file"
|
||||
},
|
||||
|
||||
proxy: {
|
||||
onCancel: options.onCancel,
|
||||
onUuidChanged: onUuidChanged,
|
||||
getName: getName,
|
||||
getUuid: getUuid,
|
||||
log: log
|
||||
}
|
||||
}
|
||||
));
|
||||
|
||||
qq.extend(this, {
|
||||
uploadFile: function(id) {
|
||||
var name = getName(id),
|
||||
promise = new qq.Promise();
|
||||
|
||||
if (handler.getThirdPartyFileId(id)) {
|
||||
handleUpload(id).then(promise.success, promise.failure);
|
||||
}
|
||||
else {
|
||||
// The S3 uploader module will either calculate the key or ask the server for it
|
||||
// and will call us back once it is known.
|
||||
onGetKeyName(id, name).then(function(key) {
|
||||
handler._setThirdPartyFileId(id, key);
|
||||
handleUpload(id).then(promise.success, promise.failure);
|
||||
|
||||
}, function(errorReason) {
|
||||
promise.failure({error: errorReason});
|
||||
});
|
||||
}
|
||||
|
||||
return promise;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,536 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* Upload handler used by the upload to S3 module that depends on File API support, and, therefore, makes use of
|
||||
* `XMLHttpRequest` level 2 to upload `File`s and `Blob`s directly to S3 buckets via the associated AWS API.
|
||||
*
|
||||
* If chunking is supported and enabled, the S3 Multipart Upload REST API is utilized.
|
||||
*
|
||||
* @param spec Options passed from the base handler
|
||||
* @param proxy Callbacks & methods used to query for or push out data/changes
|
||||
*/
|
||||
qq.s3.XhrUploadHandler = function(spec, proxy) {
|
||||
"use strict";
|
||||
|
||||
var getName = proxy.getName,
|
||||
log = proxy.log,
|
||||
expectedStatus = 200,
|
||||
onGetKeyName = spec.getKeyName,
|
||||
filenameParam = spec.filenameParam,
|
||||
paramsStore = spec.paramsStore,
|
||||
endpointStore = spec.endpointStore,
|
||||
aclStore = spec.aclStore,
|
||||
reducedRedundancy = spec.objectProperties.reducedRedundancy,
|
||||
serverSideEncryption = spec.objectProperties.serverSideEncryption,
|
||||
validation = spec.validation,
|
||||
signature = spec.signature,
|
||||
handler = this,
|
||||
credentialsProvider = spec.signature.credentialsProvider,
|
||||
|
||||
chunked = {
|
||||
// Sends a "Complete Multipart Upload" request and then signals completion of the upload
|
||||
// when the response to this request has been parsed.
|
||||
combine: function(id) {
|
||||
var uploadId = handler._getPersistableData(id).uploadId,
|
||||
etagMap = handler._getPersistableData(id).etags,
|
||||
result = new qq.Promise();
|
||||
|
||||
requesters.completeMultipart.send(id, uploadId, etagMap).then(
|
||||
result.success,
|
||||
|
||||
function failure(reason, xhr) {
|
||||
result.failure(upload.done(id, xhr).response, xhr);
|
||||
}
|
||||
);
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
// The last step in handling a chunked upload. This is called after each chunk has been sent.
|
||||
// The request may be successful, or not. If it was successful, we must extract the "ETag" element
|
||||
// in the XML response and store that along with the associated part number.
|
||||
// We need these items to "Complete" the multipart upload after all chunks have been successfully sent.
|
||||
done: function(id, xhr, chunkIdx) {
|
||||
var response = upload.response.parse(id, xhr),
|
||||
etag;
|
||||
|
||||
if (response.success) {
|
||||
etag = xhr.getResponseHeader("ETag");
|
||||
|
||||
if (!handler._getPersistableData(id).etags) {
|
||||
handler._getPersistableData(id).etags = [];
|
||||
}
|
||||
handler._getPersistableData(id).etags.push({part: chunkIdx+1, etag: etag});
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Determines headers that must be attached to the chunked (Multipart Upload) request. One of these headers is an
|
||||
* Authorization value, which must be determined by asking the local server to sign the request first. So, this
|
||||
* function returns a promise. Once all headers are determined, the `success` method of the promise is called with
|
||||
* the headers object. If there was some problem determining the headers, we delegate to the caller's `failure`
|
||||
* callback.
|
||||
*
|
||||
* @param id File ID
|
||||
* @param chunkIdx Index of the chunk to PUT
|
||||
* @returns {qq.Promise}
|
||||
*/
|
||||
initHeaders: function(id, chunkIdx) {
|
||||
var headers = {},
|
||||
endpoint = spec.endpointStore.get(id),
|
||||
bucket = qq.s3.util.getBucket(endpoint),
|
||||
key = upload.key.urlSafe(id),
|
||||
promise = new qq.Promise(),
|
||||
signatureConstructor = requesters.restSignature.constructStringToSign
|
||||
(requesters.restSignature.REQUEST_TYPE.MULTIPART_UPLOAD, bucket, key)
|
||||
.withPartNum(chunkIdx + 1)
|
||||
.withUploadId(handler._getPersistableData(id).uploadId);
|
||||
|
||||
// Ask the local server to sign the request. Use this signature to form the Authorization header.
|
||||
requesters.restSignature.getSignature(id + "." + chunkIdx, {signatureConstructor: signatureConstructor}).then(function(response) {
|
||||
headers = signatureConstructor.getHeaders();
|
||||
headers.Authorization = "AWS " + credentialsProvider.get().accessKey + ":" + response.signature;
|
||||
promise.success(headers, signatureConstructor.getEndOfUrl());
|
||||
}, promise.failure);
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
put: function(id, chunkIdx) {
|
||||
var xhr = handler._createXhr(id, chunkIdx),
|
||||
chunkData = handler._getChunkData(id, chunkIdx),
|
||||
domain = spec.endpointStore.get(id),
|
||||
promise = new qq.Promise();
|
||||
|
||||
// Add appropriate headers to the multipart upload request.
|
||||
// Once these have been determined (asynchronously) attach the headers and send the chunk.
|
||||
chunked.initHeaders(id, chunkIdx).then(function(headers, endOfUrl) {
|
||||
var url = domain + "/" + endOfUrl;
|
||||
handler._registerProgressHandler(id, chunkIdx, chunkData.size);
|
||||
upload.track(id, xhr, chunkIdx).then(promise.success, promise.failure);
|
||||
xhr.open("PUT", url, true);
|
||||
|
||||
qq.each(headers, function(name, val) {
|
||||
xhr.setRequestHeader(name, val);
|
||||
});
|
||||
|
||||
xhr.send(chunkData.blob);
|
||||
}, function() {
|
||||
promise.failure({error: "Problem signing the chunk!"}, xhr);
|
||||
});
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
send: function(id, chunkIdx) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
chunked.setup(id).then(
|
||||
// The "Initiate" request succeeded. We are ready to send the first chunk.
|
||||
function() {
|
||||
chunked.put(id, chunkIdx).then(promise.success, promise.failure);
|
||||
},
|
||||
|
||||
// We were unable to initiate the chunked upload process.
|
||||
function(errorMessage, xhr) {
|
||||
promise.failure({error: errorMessage}, xhr);
|
||||
}
|
||||
);
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
/**
|
||||
* Sends an "Initiate Multipart Upload" request to S3 via the REST API, but only if the MPU has not already been
|
||||
* initiated.
|
||||
*
|
||||
* @param id Associated file ID
|
||||
* @returns {qq.Promise} A promise that is fulfilled when the initiate request has been sent and the response has been parsed.
|
||||
*/
|
||||
setup: function(id) {
|
||||
var promise = new qq.Promise(),
|
||||
uploadId = handler._getPersistableData(id).uploadId,
|
||||
uploadIdPromise = new qq.Promise();
|
||||
|
||||
if (!uploadId) {
|
||||
handler._getPersistableData(id).uploadId = uploadIdPromise;
|
||||
requesters.initiateMultipart.send(id).then(
|
||||
function(uploadId) {
|
||||
handler._getPersistableData(id).uploadId = uploadId;
|
||||
uploadIdPromise.success(uploadId);
|
||||
promise.success(uploadId);
|
||||
},
|
||||
function(errorMsg) {
|
||||
handler._getPersistableData(id).uploadId = null;
|
||||
promise.failure(errorMsg);
|
||||
uploadIdPromise.failure(errorMsg);
|
||||
}
|
||||
);
|
||||
}
|
||||
else if (uploadId instanceof qq.Promise) {
|
||||
uploadId.then(function(uploadId) {
|
||||
promise.success(uploadId);
|
||||
});
|
||||
}
|
||||
else {
|
||||
promise.success(uploadId);
|
||||
}
|
||||
|
||||
return promise;
|
||||
}
|
||||
},
|
||||
|
||||
requesters = {
|
||||
abortMultipart: new qq.s3.AbortMultipartAjaxRequester({
|
||||
endpointStore: endpointStore,
|
||||
signatureSpec: signature,
|
||||
cors: spec.cors,
|
||||
log: log,
|
||||
getKey: function(id) {
|
||||
return upload.key.urlSafe(id);
|
||||
}
|
||||
}),
|
||||
|
||||
completeMultipart: new qq.s3.CompleteMultipartAjaxRequester({
|
||||
endpointStore: endpointStore,
|
||||
signatureSpec: signature,
|
||||
cors: spec.cors,
|
||||
log: log,
|
||||
getKey: function(id) {
|
||||
return upload.key.urlSafe(id);
|
||||
}
|
||||
}),
|
||||
|
||||
initiateMultipart: new qq.s3.InitiateMultipartAjaxRequester({
|
||||
filenameParam: filenameParam,
|
||||
endpointStore: endpointStore,
|
||||
paramsStore: paramsStore,
|
||||
signatureSpec: signature,
|
||||
aclStore: aclStore,
|
||||
reducedRedundancy: reducedRedundancy,
|
||||
serverSideEncryption: serverSideEncryption,
|
||||
cors: spec.cors,
|
||||
log: log,
|
||||
getContentType: function(id) {
|
||||
return handler._getMimeType(id);
|
||||
},
|
||||
getKey: function(id) {
|
||||
return upload.key.urlSafe(id);
|
||||
},
|
||||
getName: function(id) {
|
||||
return getName(id);
|
||||
}
|
||||
}),
|
||||
|
||||
policySignature: new qq.s3.RequestSigner({
|
||||
expectingPolicy: true,
|
||||
signatureSpec: signature,
|
||||
cors: spec.cors,
|
||||
log: log
|
||||
}),
|
||||
|
||||
restSignature: new qq.s3.RequestSigner({
|
||||
signatureSpec: signature,
|
||||
cors: spec.cors,
|
||||
log: log
|
||||
})
|
||||
},
|
||||
|
||||
simple = {
|
||||
/**
|
||||
* Used for simple (non-chunked) uploads to determine the parameters to send along with the request. Part of this
|
||||
* process involves asking the local server to sign the request, so this function returns a promise. The promise
|
||||
* is fulfilled when all parameters are determined, or when we determine that all parameters cannnot be calculated
|
||||
* due to some error.
|
||||
*
|
||||
* @param id File ID
|
||||
* @returns {qq.Promise}
|
||||
*/
|
||||
initParams: function(id) {
|
||||
/*jshint -W040 */
|
||||
var customParams = paramsStore.get(id);
|
||||
customParams[filenameParam] = getName(id);
|
||||
|
||||
return qq.s3.util.generateAwsParams({
|
||||
endpoint: endpointStore.get(id),
|
||||
params: customParams,
|
||||
type: handler._getMimeType(id),
|
||||
key: handler.getThirdPartyFileId(id),
|
||||
accessKey: credentialsProvider.get().accessKey,
|
||||
sessionToken: credentialsProvider.get().sessionToken,
|
||||
acl: aclStore.get(id),
|
||||
expectedStatus: expectedStatus,
|
||||
minFileSize: validation.minSizeLimit,
|
||||
maxFileSize: validation.maxSizeLimit,
|
||||
reducedRedundancy: reducedRedundancy,
|
||||
serverSideEncryption: serverSideEncryption,
|
||||
log: log
|
||||
},
|
||||
qq.bind(requesters.policySignature.getSignature, this, id));
|
||||
},
|
||||
|
||||
send: function(id) {
|
||||
var promise = new qq.Promise(),
|
||||
xhr = handler._createXhr(id),
|
||||
fileOrBlob = handler.getFile(id);
|
||||
|
||||
handler._registerProgressHandler(id);
|
||||
upload.track(id, xhr).then(promise.success, promise.failure);
|
||||
|
||||
// Delegate to a function the sets up the XHR request and notifies us when it is ready to be sent, along w/ the payload.
|
||||
simple.setup(id, xhr, fileOrBlob).then(function(toSend) {
|
||||
log("Sending upload request for " + id);
|
||||
xhr.send(toSend);
|
||||
}, promise.failure);
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
/**
|
||||
* Starts the upload process by delegating to an async function that determine parameters to be attached to the
|
||||
* request. If all params can be determined, we are called back with the params and the caller of this function is
|
||||
* informed by invoking the `success` method on the promise returned by this function, passing the payload of the
|
||||
* request. If some error occurs here, we delegate to a function that signals a failure for this upload attempt.
|
||||
*
|
||||
* Note that this is only used by the simple (non-chunked) upload process.
|
||||
*
|
||||
* @param id File ID
|
||||
* @param xhr XMLHttpRequest to use for the upload
|
||||
* @param fileOrBlob `File` or `Blob` to send
|
||||
* @returns {qq.Promise}
|
||||
*/
|
||||
setup: function(id, xhr, fileOrBlob) {
|
||||
var formData = new FormData(),
|
||||
endpoint = endpointStore.get(id),
|
||||
url = endpoint,
|
||||
promise = new qq.Promise();
|
||||
|
||||
simple.initParams(id).then(
|
||||
// Success - all params determined
|
||||
function(awsParams) {
|
||||
xhr.open("POST", url, true);
|
||||
|
||||
qq.obj2FormData(awsParams, formData);
|
||||
|
||||
// AWS requires the file field be named "file".
|
||||
formData.append("file", fileOrBlob);
|
||||
|
||||
promise.success(formData);
|
||||
},
|
||||
|
||||
// Failure - we couldn't determine some params (likely the signature)
|
||||
function(errorMessage) {
|
||||
promise.failure({error: errorMessage});
|
||||
}
|
||||
);
|
||||
|
||||
return promise;
|
||||
}
|
||||
},
|
||||
|
||||
upload = {
|
||||
/**
|
||||
* Note that this is called when an upload has reached a termination point,
|
||||
* regardless of success/failure. For example, it is called when we have
|
||||
* encountered an error during the upload or when the file may have uploaded successfully.
|
||||
*
|
||||
* @param id file ID
|
||||
*/
|
||||
done: function(id, xhr) {
|
||||
var response = upload.response.parse(id, xhr),
|
||||
isError = response.success !== true;
|
||||
|
||||
if (isError && upload.response.shouldReset(response.code)) {
|
||||
log("This is an unrecoverable error, we must restart the upload entirely on the next retry attempt.", "error");
|
||||
response.reset = true;
|
||||
}
|
||||
|
||||
return {
|
||||
success: !isError,
|
||||
response: response
|
||||
};
|
||||
},
|
||||
|
||||
key: {
|
||||
promise: function(id) {
|
||||
var promise = new qq.Promise(),
|
||||
key = handler.getThirdPartyFileId(id);
|
||||
|
||||
/* jshint eqnull:true */
|
||||
if (key == null) {
|
||||
key = new qq.Promise();
|
||||
handler._setThirdPartyFileId(id, key);
|
||||
onGetKeyName(id, getName(id)).then(
|
||||
function(key) {
|
||||
handler._setThirdPartyFileId(id, key);
|
||||
promise.success(key);
|
||||
},
|
||||
function(errorReason) {
|
||||
handler._setThirdPartyFileId(id, null);
|
||||
promise.failure(errorReason);
|
||||
}
|
||||
);
|
||||
}
|
||||
else if (qq.isGenericPromise(key)) {
|
||||
promise.then(key.success, key.failure);
|
||||
}
|
||||
else {
|
||||
promise.success(key);
|
||||
}
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
urlSafe: function(id) {
|
||||
return encodeURIComponent(handler.getThirdPartyFileId(id));
|
||||
}
|
||||
},
|
||||
|
||||
response: {
|
||||
parse: function(id, xhr) {
|
||||
var response = {},
|
||||
parsedErrorProps;
|
||||
|
||||
try {
|
||||
log(qq.format("Received response status {} with body: {}", xhr.status, xhr.responseText));
|
||||
|
||||
if (xhr.status === expectedStatus) {
|
||||
response.success = true;
|
||||
}
|
||||
else {
|
||||
parsedErrorProps = upload.response.parseError(xhr.responseText);
|
||||
|
||||
if (parsedErrorProps) {
|
||||
response.error = parsedErrorProps.message;
|
||||
response.code = parsedErrorProps.code;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(error) {
|
||||
log("Error when attempting to parse xhr response text (" + error.message + ")", "error");
|
||||
}
|
||||
|
||||
return response;
|
||||
},
|
||||
|
||||
/**
|
||||
* This parses an XML response by extracting the "Message" and "Code" elements that accompany AWS error responses.
|
||||
*
|
||||
* @param awsResponseXml XML response from AWS
|
||||
* @returns {object} Object w/ `code` and `message` properties, or undefined if we couldn't find error info in the XML document.
|
||||
*/
|
||||
parseError: function(awsResponseXml) {
|
||||
var parser = new DOMParser(),
|
||||
parsedDoc = parser.parseFromString(awsResponseXml, "application/xml"),
|
||||
errorEls = parsedDoc.getElementsByTagName("Error"),
|
||||
errorDetails = {},
|
||||
codeEls, messageEls;
|
||||
|
||||
if (errorEls.length) {
|
||||
codeEls = parsedDoc.getElementsByTagName("Code");
|
||||
messageEls = parsedDoc.getElementsByTagName("Message");
|
||||
|
||||
if (messageEls.length) {
|
||||
errorDetails.message = messageEls[0].textContent;
|
||||
}
|
||||
|
||||
if (codeEls.length) {
|
||||
errorDetails.code = codeEls[0].textContent;
|
||||
}
|
||||
|
||||
return errorDetails;
|
||||
}
|
||||
},
|
||||
|
||||
// Determine if the upload should be restarted on the next retry attempt
|
||||
// based on the error code returned in the response from AWS.
|
||||
shouldReset: function(errorCode) {
|
||||
/*jshint -W014 */
|
||||
return errorCode === "EntityTooSmall"
|
||||
|| errorCode === "InvalidPart"
|
||||
|| errorCode === "InvalidPartOrder"
|
||||
|| errorCode === "NoSuchUpload";
|
||||
}
|
||||
},
|
||||
|
||||
start: function(id, opt_chunkIdx) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
upload.key.promise(id).then(function() {
|
||||
/* jshint eqnull:true */
|
||||
if (opt_chunkIdx == null) {
|
||||
simple.send(id).then(promise.success, promise.failure);
|
||||
}
|
||||
else {
|
||||
chunked.send(id, opt_chunkIdx).then(promise.success, promise.failure);
|
||||
}
|
||||
},
|
||||
function(errorReason) {
|
||||
promise.failure({error: errorReason});
|
||||
});
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
track: function(id, xhr, opt_chunkIdx) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
xhr.onreadystatechange = function() {
|
||||
if (xhr.readyState === 4) {
|
||||
var result;
|
||||
|
||||
/* jshint eqnull:true */
|
||||
if (opt_chunkIdx == null) {
|
||||
result = upload.done(id, xhr);
|
||||
promise[result.success ? "success" : "failure"](result.response, xhr);
|
||||
}
|
||||
else {
|
||||
chunked.done(id, xhr, opt_chunkIdx);
|
||||
result = upload.done(id, xhr);
|
||||
promise[result.success ? "success" : "failure"](result.response, xhr);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return promise;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
uploadChunk: upload.start,
|
||||
uploadFile: upload.start
|
||||
});
|
||||
|
||||
qq.extend(this, new qq.XhrUploadHandler({
|
||||
options: qq.extend({namespace: "s3"}, spec),
|
||||
proxy: qq.extend({getEndpoint: spec.endpointStore.get}, proxy)
|
||||
}
|
||||
));
|
||||
|
||||
qq.override(this, function(super_) {
|
||||
return {
|
||||
expunge: function(id) {
|
||||
var uploadId = handler._getPersistableData(id) && handler._getPersistableData(id).uploadId,
|
||||
existedInLocalStorage = handler._maybeDeletePersistedChunkData(id);
|
||||
|
||||
if (uploadId !== undefined && existedInLocalStorage) {
|
||||
requesters.abortMultipart.send(id, uploadId);
|
||||
}
|
||||
|
||||
super_.expunge(id);
|
||||
},
|
||||
|
||||
finalizeChunks: function(id) {
|
||||
return chunked.combine(id);
|
||||
},
|
||||
|
||||
_getLocalStorageId: function(id) {
|
||||
var baseStorageId = super_._getLocalStorageId(id),
|
||||
endpoint = endpointStore.get(id),
|
||||
bucketName = qq.s3.util.getBucket(endpoint);
|
||||
|
||||
return baseStorageId + "-" + bucketName;
|
||||
}
|
||||
};
|
||||
});
|
||||
};
|
@ -1,373 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* This defines FineUploaderBasic mode w/ support for uploading to S3, which provides all the basic
|
||||
* functionality of Fine Uploader Basic as well as code to handle uploads directly to S3.
|
||||
* Some inherited options and API methods have a special meaning in the context of the S3 uploader.
|
||||
*/
|
||||
(function(){
|
||||
"use strict";
|
||||
|
||||
qq.s3.FineUploaderBasic = function(o) {
|
||||
var options = {
|
||||
request: {
|
||||
// public key (required for server-side signing, ignored if `credentials` have been provided)
|
||||
accessKey: null
|
||||
},
|
||||
|
||||
objectProperties: {
|
||||
acl: "private",
|
||||
|
||||
// 'uuid', 'filename', or a function which may be promissory
|
||||
key: "uuid",
|
||||
|
||||
reducedRedundancy: false,
|
||||
|
||||
serverSideEncryption: false
|
||||
},
|
||||
|
||||
credentials: {
|
||||
// Public key (required).
|
||||
accessKey: null,
|
||||
// Private key (required).
|
||||
secretKey: null,
|
||||
// Expiration date for the credentials (required). May be an ISO string or a `Date`.
|
||||
expiration: null,
|
||||
// Temporary credentials session token.
|
||||
// Only required for temporary credentials obtained via AssumeRoleWithWebIdentity.
|
||||
sessionToken: null
|
||||
},
|
||||
|
||||
// optional/ignored if `credentials` is provided
|
||||
signature: {
|
||||
endpoint: null,
|
||||
customHeaders: {}
|
||||
},
|
||||
|
||||
uploadSuccess: {
|
||||
endpoint: null,
|
||||
|
||||
// In addition to the default params sent by Fine Uploader
|
||||
params: {},
|
||||
|
||||
customHeaders: {}
|
||||
},
|
||||
|
||||
// required if non-File-API browsers, such as IE9 and older, are used
|
||||
iframeSupport: {
|
||||
localBlankPagePath: null
|
||||
},
|
||||
|
||||
chunking: {
|
||||
// minimum part size is 5 MiB when uploading to S3
|
||||
partSize: 5242880
|
||||
},
|
||||
|
||||
cors: {
|
||||
allowXdr: true
|
||||
},
|
||||
|
||||
callbacks: {
|
||||
onCredentialsExpired: function() {}
|
||||
}
|
||||
};
|
||||
|
||||
// Replace any default options with user defined ones
|
||||
qq.extend(options, o, true);
|
||||
|
||||
if (!this.setCredentials(options.credentials, true)) {
|
||||
this._currentCredentials.accessKey = options.request.accessKey;
|
||||
}
|
||||
|
||||
this._aclStore = this._createStore(options.objectProperties.acl);
|
||||
|
||||
// Call base module
|
||||
qq.FineUploaderBasic.call(this, options);
|
||||
|
||||
this._uploadSuccessParamsStore = this._createStore(this._options.uploadSuccess.params);
|
||||
|
||||
// This will hold callbacks for failed uploadSuccess requests that will be invoked on retry.
|
||||
// Indexed by file ID.
|
||||
this._failedSuccessRequestCallbacks = {};
|
||||
|
||||
// Holds S3 keys for file representations constructed from a session request.
|
||||
this._cannedKeys = {};
|
||||
};
|
||||
|
||||
// Inherit basic public & private API methods.
|
||||
qq.extend(qq.s3.FineUploaderBasic.prototype, qq.basePublicApi);
|
||||
qq.extend(qq.s3.FineUploaderBasic.prototype, qq.basePrivateApi);
|
||||
qq.extend(qq.s3.FineUploaderBasic.prototype, qq.nonTraditionalBasePublicApi);
|
||||
qq.extend(qq.s3.FineUploaderBasic.prototype, qq.nonTraditionalBasePrivateApi);
|
||||
|
||||
// Define public & private API methods for this module.
|
||||
qq.extend(qq.s3.FineUploaderBasic.prototype, {
|
||||
/**
|
||||
* @param id File ID
|
||||
* @returns {*} Key name associated w/ the file, if one exists
|
||||
*/
|
||||
getKey: function(id) {
|
||||
/* jshint eqnull:true */
|
||||
if (this._cannedKeys[id] == null) {
|
||||
return this._handler.getThirdPartyFileId(id);
|
||||
}
|
||||
|
||||
return this._cannedKeys[id];
|
||||
},
|
||||
|
||||
/**
|
||||
* Override the parent's reset function to cleanup various S3-related items.
|
||||
*/
|
||||
reset: function() {
|
||||
qq.FineUploaderBasic.prototype.reset.call(this);
|
||||
this._failedSuccessRequestCallbacks = [];
|
||||
},
|
||||
|
||||
setUploadSuccessParams: function(params, id) {
|
||||
this._uploadSuccessParamsStore.set(params, id);
|
||||
},
|
||||
|
||||
setCredentials: function(credentials, ignoreEmpty) {
|
||||
if (credentials && credentials.secretKey) {
|
||||
if (!credentials.accessKey) {
|
||||
throw new qq.Error("Invalid credentials: no accessKey");
|
||||
}
|
||||
else if (!credentials.expiration) {
|
||||
throw new qq.Error("Invalid credentials: no expiration");
|
||||
}
|
||||
else {
|
||||
this._currentCredentials = qq.extend({}, credentials);
|
||||
|
||||
// Ensure expiration is a `Date`. If initially a string, assuming it is in ISO format.
|
||||
if (qq.isString(credentials.expiration)) {
|
||||
this._currentCredentials.expiration = new Date(credentials.expiration);
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
else if (!ignoreEmpty) {
|
||||
throw new qq.Error("Invalid credentials parameter!");
|
||||
}
|
||||
else {
|
||||
this._currentCredentials = {};
|
||||
}
|
||||
},
|
||||
|
||||
setAcl: function(acl, id) {
|
||||
this._aclStore.set(acl, id);
|
||||
},
|
||||
|
||||
/**
|
||||
* Ensures the parent's upload handler creator passes any additional S3-specific options to the handler as well
|
||||
* as information required to instantiate the specific handler based on the current browser's capabilities.
|
||||
*
|
||||
* @returns {qq.UploadHandlerController}
|
||||
* @private
|
||||
*/
|
||||
_createUploadHandler: function() {
|
||||
var self = this,
|
||||
additionalOptions = {
|
||||
objectProperties: this._options.objectProperties,
|
||||
aclStore: this._aclStore,
|
||||
signature: this._options.signature,
|
||||
iframeSupport: this._options.iframeSupport,
|
||||
getKeyName: qq.bind(this._determineKeyName, this),
|
||||
// pass size limit validation values to include in the request so AWS enforces this server-side
|
||||
validation: {
|
||||
minSizeLimit: this._options.validation.minSizeLimit,
|
||||
maxSizeLimit: this._options.validation.sizeLimit
|
||||
}
|
||||
};
|
||||
|
||||
// We assume HTTP if it is missing from the start of the endpoint string.
|
||||
qq.override(this._endpointStore, function(super_) {
|
||||
return {
|
||||
get: function(id) {
|
||||
var endpoint = super_.get(id);
|
||||
|
||||
if (endpoint.indexOf("http") < 0) {
|
||||
return "http://" + endpoint;
|
||||
}
|
||||
|
||||
return endpoint;
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// Param names should be lower case to avoid signature mismatches
|
||||
qq.override(this._paramsStore, function(super_) {
|
||||
return {
|
||||
get: function(id) {
|
||||
var oldParams = super_.get(id),
|
||||
modifiedParams = {};
|
||||
|
||||
qq.each(oldParams, function(name, val) {
|
||||
modifiedParams[name.toLowerCase()] = qq.isFunction(val) ? val() : val;
|
||||
});
|
||||
|
||||
return modifiedParams;
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
additionalOptions.signature.credentialsProvider = {
|
||||
get: function() {
|
||||
return self._currentCredentials;
|
||||
},
|
||||
|
||||
onExpired: function() {
|
||||
var updateCredentials = new qq.Promise(),
|
||||
callbackRetVal = self._options.callbacks.onCredentialsExpired();
|
||||
|
||||
if (qq.isGenericPromise(callbackRetVal)) {
|
||||
callbackRetVal.then(function(credentials) {
|
||||
try {
|
||||
self.setCredentials(credentials);
|
||||
updateCredentials.success();
|
||||
}
|
||||
catch (error) {
|
||||
self.log("Invalid credentials returned from onCredentialsExpired callback! (" + error.message + ")", "error");
|
||||
updateCredentials.failure("onCredentialsExpired did not return valid credentials.");
|
||||
}
|
||||
}, function(errorMsg) {
|
||||
self.log("onCredentialsExpired callback indicated failure! (" + errorMsg + ")", "error");
|
||||
updateCredentials.failure("onCredentialsExpired callback failed.");
|
||||
});
|
||||
}
|
||||
else {
|
||||
self.log("onCredentialsExpired callback did not return a promise!", "error");
|
||||
updateCredentials.failure("Unexpected return value for onCredentialsExpired.");
|
||||
}
|
||||
|
||||
return updateCredentials;
|
||||
}
|
||||
};
|
||||
|
||||
return qq.FineUploaderBasic.prototype._createUploadHandler.call(this, additionalOptions, "s3");
|
||||
},
|
||||
|
||||
/**
|
||||
* Determine the file's key name and passes it to the caller via a promissory callback. This also may
|
||||
* delegate to an integrator-defined function that determines the file's key name on demand,
|
||||
* which also may be promissory.
|
||||
*
|
||||
* @param id ID of the file
|
||||
* @param filename Name of the file
|
||||
* @returns {qq.Promise} A promise that will be fulfilled when the key name has been determined (and will be passed to the caller via the success callback).
|
||||
* @private
|
||||
*/
|
||||
_determineKeyName: function(id, filename) {
|
||||
/*jshint -W015*/
|
||||
var promise = new qq.Promise(),
|
||||
keynameLogic = this._options.objectProperties.key,
|
||||
extension = qq.getExtension(filename),
|
||||
onGetKeynameFailure = promise.failure,
|
||||
onGetKeynameSuccess = function(keyname, extension) {
|
||||
var keynameToUse = keyname;
|
||||
|
||||
if (extension !== undefined) {
|
||||
keynameToUse += "." + extension;
|
||||
}
|
||||
|
||||
promise.success(keynameToUse);
|
||||
};
|
||||
|
||||
switch(keynameLogic) {
|
||||
case "uuid":
|
||||
onGetKeynameSuccess(this.getUuid(id), extension);
|
||||
break;
|
||||
case "filename":
|
||||
onGetKeynameSuccess(filename);
|
||||
break;
|
||||
default:
|
||||
if (qq.isFunction(keynameLogic)) {
|
||||
this._handleKeynameFunction(keynameLogic, id, onGetKeynameSuccess, onGetKeynameFailure);
|
||||
}
|
||||
else {
|
||||
this.log(keynameLogic + " is not a valid value for the s3.keyname option!", "error");
|
||||
onGetKeynameFailure();
|
||||
}
|
||||
}
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
/**
|
||||
* Called by the internal onUpload handler if the integrator has supplied a function to determine
|
||||
* the file's key name. The integrator's function may be promissory. We also need to fulfill
|
||||
* the promise contract associated with the caller as well.
|
||||
*
|
||||
* @param keynameFunc Integrator-supplied function that must be executed to determine the key name. May be promissory.
|
||||
* @param id ID of the associated file
|
||||
* @param successCallback Invoke this if key name retrieval is successful, passing in the key name.
|
||||
* @param failureCallback Invoke this if key name retrieval was unsuccessful.
|
||||
* @private
|
||||
*/
|
||||
_handleKeynameFunction: function(keynameFunc, id, successCallback, failureCallback) {
|
||||
var self = this,
|
||||
onSuccess = function(keyname) {
|
||||
successCallback(keyname);
|
||||
},
|
||||
onFailure = function(reason) {
|
||||
self.log(qq.format("Failed to retrieve key name for {}. Reason: {}", id, reason || "null"), "error");
|
||||
failureCallback(reason);
|
||||
},
|
||||
keyname = keynameFunc.call(this, id);
|
||||
|
||||
|
||||
if (qq.isGenericPromise(keyname)) {
|
||||
keyname.then(onSuccess, onFailure);
|
||||
}
|
||||
/*jshint -W116*/
|
||||
else if (keyname == null) {
|
||||
onFailure();
|
||||
}
|
||||
else {
|
||||
onSuccess(keyname);
|
||||
}
|
||||
},
|
||||
|
||||
_getEndpointSpecificParams: function(id, response, maybeXhr) {
|
||||
var params = {
|
||||
key: this.getKey(id),
|
||||
uuid: this.getUuid(id),
|
||||
name: this.getName(id),
|
||||
bucket: qq.s3.util.getBucket(this._endpointStore.get(id))
|
||||
};
|
||||
|
||||
if (maybeXhr && maybeXhr.getResponseHeader("ETag")) {
|
||||
params.etag = maybeXhr.getResponseHeader("ETag");
|
||||
}
|
||||
else if (response.etag) {
|
||||
params.etag = response.etag;
|
||||
}
|
||||
|
||||
return params;
|
||||
},
|
||||
|
||||
// Hooks into the base internal `_onSubmitDelete` to add key and bucket params to the delete file request.
|
||||
_onSubmitDelete: function(id, onSuccessCallback) {
|
||||
var additionalMandatedParams = {
|
||||
key: this.getKey(id),
|
||||
bucket: qq.s3.util.getBucket(this._endpointStore.get(id))
|
||||
};
|
||||
|
||||
return qq.FineUploaderBasic.prototype._onSubmitDelete.call(this, id, onSuccessCallback, additionalMandatedParams);
|
||||
},
|
||||
|
||||
_addCannedFile: function(sessionData) {
|
||||
var id;
|
||||
|
||||
/* jshint eqnull:true */
|
||||
if (sessionData.s3Key == null) {
|
||||
throw new qq.Error("Did not find s3Key property in server session response. This is required!");
|
||||
}
|
||||
else {
|
||||
id = qq.FineUploaderBasic.prototype._addCannedFile.apply(this, arguments);
|
||||
this._cannedKeys[id] = sessionData.s3Key;
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
});
|
||||
}());
|
@ -1,37 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* This defines FineUploader mode w/ support for uploading to S3, which provides all the basic
|
||||
* functionality of Fine Uploader as well as code to handle uploads directly to S3.
|
||||
* This module inherits all logic from FineUploader mode and FineUploaderBasicS3 mode and adds some UI-related logic
|
||||
* specific to the upload-to-S3 workflow. Some inherited options and API methods have a special meaning
|
||||
* in the context of the S3 uploader.
|
||||
*/
|
||||
(function(){
|
||||
"use strict";
|
||||
|
||||
qq.s3.FineUploader = function(o) {
|
||||
var options = {
|
||||
failedUploadTextDisplay: {
|
||||
mode: "custom"
|
||||
}
|
||||
};
|
||||
|
||||
// Replace any default options with user defined ones
|
||||
qq.extend(options, o, true);
|
||||
|
||||
// Inherit instance data from FineUploader, which should in turn inherit from s3.FineUploaderBasic.
|
||||
qq.FineUploader.call(this, options, "s3");
|
||||
|
||||
if (!qq.supportedFeatures.ajaxUploading && options.iframeSupport.localBlankPagePath === undefined) {
|
||||
this._options.element.innerHTML = "<div>You MUST set the <code>localBlankPagePath</code> property " +
|
||||
"of the <code>iframeSupport</code> option since this browser does not support the File API!</div>";
|
||||
}
|
||||
};
|
||||
|
||||
// Inherit the API methods from FineUploaderBasicS3
|
||||
qq.extend(qq.s3.FineUploader.prototype, qq.s3.FineUploaderBasic.prototype);
|
||||
|
||||
// Inherit public and private API methods related to UI
|
||||
qq.extend(qq.s3.FineUploader.prototype, qq.uiPublicApi);
|
||||
qq.extend(qq.s3.FineUploader.prototype, qq.uiPrivateApi);
|
||||
}());
|
@ -1,361 +0,0 @@
|
||||
/*globals qq */
|
||||
qq.s3 = qq.s3 || {};
|
||||
|
||||
qq.s3.util = qq.s3.util || (function() {
|
||||
"use strict";
|
||||
|
||||
return {
|
||||
AWS_PARAM_PREFIX: "x-amz-meta-",
|
||||
|
||||
SESSION_TOKEN_PARAM_NAME: "x-amz-security-token",
|
||||
|
||||
REDUCED_REDUNDANCY_PARAM_NAME: "x-amz-storage-class",
|
||||
REDUCED_REDUNDANCY_PARAM_VALUE: "REDUCED_REDUNDANCY",
|
||||
|
||||
SERVER_SIDE_ENCRYPTION_PARAM_NAME: "x-amz-server-side-encryption",
|
||||
SERVER_SIDE_ENCRYPTION_PARAM_VALUE: "AES256",
|
||||
|
||||
/**
|
||||
* This allows for the region to be specified in the bucket's endpoint URL, or not.
|
||||
*
|
||||
* Examples of some valid endpoints are:
|
||||
* http://foo.s3.amazonaws.com
|
||||
* https://foo.s3.amazonaws.com
|
||||
* http://foo.s3-ap-northeast-1.amazonaws.com
|
||||
* foo.s3.amazonaws.com
|
||||
* http://foo.bar.com
|
||||
* http://s3.amazonaws.com/foo.bar.com
|
||||
* ...etc
|
||||
*
|
||||
* @param endpoint The bucket's URL.
|
||||
* @returns {String || undefined} The bucket name, or undefined if the URL cannot be parsed.
|
||||
*/
|
||||
getBucket: function(endpoint) {
|
||||
var patterns = [
|
||||
//bucket in domain
|
||||
/^(?:https?:\/\/)?([a-z0-9.\-_]+)\.s3(?:-[a-z0-9\-]+)?\.amazonaws\.com/i,
|
||||
//bucket in path
|
||||
/^(?:https?:\/\/)?s3(?:-[a-z0-9\-]+)?\.amazonaws\.com\/([a-z0-9.\-_]+)/i,
|
||||
//custom domain
|
||||
/^(?:https?:\/\/)?([a-z0-9.\-_]+)/i
|
||||
],
|
||||
bucket;
|
||||
|
||||
qq.each(patterns, function(idx, pattern) {
|
||||
var match = pattern.exec(endpoint);
|
||||
|
||||
if (match) {
|
||||
bucket = match[1];
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
return bucket;
|
||||
},
|
||||
|
||||
/**
|
||||
* Create a policy document to be signed and sent along with the S3 upload request.
|
||||
*
|
||||
* @param spec Object with properties use to construct the policy document.
|
||||
* @returns {Object} Policy doc.
|
||||
*/
|
||||
getPolicy: function(spec) {
|
||||
var policy = {},
|
||||
conditions = [],
|
||||
bucket = qq.s3.util.getBucket(spec.endpoint),
|
||||
key = spec.key,
|
||||
acl = spec.acl,
|
||||
type = spec.type,
|
||||
expirationDate = new Date(),
|
||||
expectedStatus = spec.expectedStatus,
|
||||
sessionToken = spec.sessionToken,
|
||||
params = spec.params,
|
||||
successRedirectUrl = qq.s3.util.getSuccessRedirectAbsoluteUrl(spec.successRedirectUrl),
|
||||
minFileSize = spec.minFileSize,
|
||||
maxFileSize = spec.maxFileSize,
|
||||
reducedRedundancy = spec.reducedRedundancy,
|
||||
serverSideEncryption = spec.serverSideEncryption;
|
||||
|
||||
policy.expiration = qq.s3.util.getPolicyExpirationDate(expirationDate);
|
||||
|
||||
conditions.push({acl: acl});
|
||||
conditions.push({bucket: bucket});
|
||||
|
||||
if (type) {
|
||||
conditions.push({"Content-Type": type});
|
||||
}
|
||||
|
||||
if (expectedStatus) {
|
||||
conditions.push({success_action_status: expectedStatus.toString()});
|
||||
}
|
||||
|
||||
if (successRedirectUrl) {
|
||||
conditions.push({success_action_redirect: successRedirectUrl});
|
||||
}
|
||||
|
||||
if (reducedRedundancy) {
|
||||
conditions.push({});
|
||||
conditions[conditions.length - 1][qq.s3.util.REDUCED_REDUNDANCY_PARAM_NAME] = qq.s3.util.REDUCED_REDUNDANCY_PARAM_VALUE;
|
||||
}
|
||||
|
||||
if (sessionToken) {
|
||||
conditions.push({});
|
||||
conditions[conditions.length - 1][qq.s3.util.SESSION_TOKEN_PARAM_NAME] = sessionToken;
|
||||
}
|
||||
|
||||
if (serverSideEncryption) {
|
||||
conditions.push({});
|
||||
conditions[conditions.length - 1][qq.s3.util.SERVER_SIDE_ENCRYPTION_PARAM_NAME] = qq.s3.util.SERVER_SIDE_ENCRYPTION_PARAM_VALUE;
|
||||
}
|
||||
|
||||
conditions.push({key: key});
|
||||
|
||||
// user metadata
|
||||
qq.each(params, function(name, val) {
|
||||
var awsParamName = qq.s3.util.AWS_PARAM_PREFIX + name,
|
||||
param = {};
|
||||
|
||||
param[awsParamName] = encodeURIComponent(val);
|
||||
conditions.push(param);
|
||||
});
|
||||
|
||||
policy.conditions = conditions;
|
||||
|
||||
qq.s3.util.enforceSizeLimits(policy, minFileSize, maxFileSize);
|
||||
|
||||
return policy;
|
||||
},
|
||||
|
||||
/**
|
||||
* Update a previously constructed policy document with updated credentials. Currently, this only requires we
|
||||
* update the session token. This is only relevant if requests are being signed client-side.
|
||||
*
|
||||
* @param policy Live policy document
|
||||
* @param newSessionToken Updated session token.
|
||||
*/
|
||||
refreshPolicyCredentials: function(policy, newSessionToken) {
|
||||
var sessionTokenFound = false;
|
||||
|
||||
qq.each(policy.conditions, function(oldCondIdx, oldCondObj) {
|
||||
qq.each(oldCondObj, function(oldCondName, oldCondVal) {
|
||||
if (oldCondName === qq.s3.util.SESSION_TOKEN_PARAM_NAME) {
|
||||
oldCondObj[oldCondName] = newSessionToken;
|
||||
sessionTokenFound = true;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (!sessionTokenFound) {
|
||||
policy.conditions.push({});
|
||||
policy.conditions[policy.conditions.length - 1][qq.s3.util.SESSION_TOKEN_PARAM_NAME] = newSessionToken;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Generates all parameters to be passed along with the S3 upload request. This includes invoking a callback
|
||||
* that is expected to asynchronously retrieve a signature for the policy document. Note that the server
|
||||
* signing the request should reject a "tainted" policy document that includes unexpected values, since it is
|
||||
* still possible for a malicious user to tamper with these values during policy document generation, b
|
||||
* before it is sent to the server for signing.
|
||||
*
|
||||
* @param spec Object with properties: `params`, `type`, `key`, `accessKey`, `acl`, `expectedStatus`, `successRedirectUrl`,
|
||||
* `reducedRedundancy`, serverSideEncryption, and `log()`, along with any options associated with `qq.s3.util.getPolicy()`.
|
||||
* @returns {qq.Promise} Promise that will be fulfilled once all parameters have been determined.
|
||||
*/
|
||||
generateAwsParams: function(spec, signPolicyCallback) {
|
||||
var awsParams = {},
|
||||
customParams = spec.params,
|
||||
promise = new qq.Promise(),
|
||||
policyJson = qq.s3.util.getPolicy(spec),
|
||||
sessionToken = spec.sessionToken,
|
||||
type = spec.type,
|
||||
key = spec.key,
|
||||
accessKey = spec.accessKey,
|
||||
acl = spec.acl,
|
||||
expectedStatus = spec.expectedStatus,
|
||||
successRedirectUrl = qq.s3.util.getSuccessRedirectAbsoluteUrl(spec.successRedirectUrl),
|
||||
reducedRedundancy = spec.reducedRedundancy,
|
||||
serverSideEncryption = spec.serverSideEncryption,
|
||||
log = spec.log;
|
||||
|
||||
awsParams.key = key;
|
||||
awsParams.AWSAccessKeyId = accessKey;
|
||||
|
||||
if (type) {
|
||||
awsParams["Content-Type"] = type;
|
||||
}
|
||||
|
||||
if (expectedStatus) {
|
||||
awsParams.success_action_status = expectedStatus;
|
||||
}
|
||||
|
||||
if (successRedirectUrl) {
|
||||
awsParams.success_action_redirect = successRedirectUrl;
|
||||
}
|
||||
|
||||
if (reducedRedundancy) {
|
||||
awsParams[qq.s3.util.REDUCED_REDUNDANCY_PARAM_NAME] = qq.s3.util.REDUCED_REDUNDANCY_PARAM_VALUE;
|
||||
}
|
||||
|
||||
if (serverSideEncryption) {
|
||||
awsParams[qq.s3.util.SERVER_SIDE_ENCRYPTION_PARAM_NAME] = qq.s3.util.SERVER_SIDE_ENCRYPTION_PARAM_VALUE;
|
||||
}
|
||||
|
||||
if (sessionToken) {
|
||||
awsParams[qq.s3.util.SESSION_TOKEN_PARAM_NAME] = sessionToken;
|
||||
}
|
||||
|
||||
awsParams.acl = acl;
|
||||
|
||||
// Custom (user-supplied) params must be prefixed with the value of `qq.s3.util.AWS_PARAM_PREFIX`.
|
||||
// Custom param values will be URI encoded as well.
|
||||
qq.each(customParams, function(name, val) {
|
||||
var awsParamName = qq.s3.util.AWS_PARAM_PREFIX + name;
|
||||
awsParams[awsParamName] = encodeURIComponent(val);
|
||||
});
|
||||
|
||||
// Invoke a promissory callback that should provide us with a base64-encoded policy doc and an
|
||||
// HMAC signature for the policy doc.
|
||||
signPolicyCallback(policyJson).then(
|
||||
function(policyAndSignature, updatedAccessKey, updatedSessionToken) {
|
||||
awsParams.policy = policyAndSignature.policy;
|
||||
awsParams.signature = policyAndSignature.signature;
|
||||
|
||||
if (updatedAccessKey) {
|
||||
awsParams.AWSAccessKeyId = updatedAccessKey;
|
||||
}
|
||||
if (updatedSessionToken) {
|
||||
awsParams[qq.s3.util.SESSION_TOKEN_PARAM_NAME] = updatedSessionToken;
|
||||
}
|
||||
|
||||
promise.success(awsParams);
|
||||
},
|
||||
function(errorMessage) {
|
||||
errorMessage = errorMessage || "Can't continue further with request to S3 as we did not receive " +
|
||||
"a valid signature and policy from the server.";
|
||||
|
||||
log("Policy signing failed. " + errorMessage, "error");
|
||||
promise.failure(errorMessage);
|
||||
}
|
||||
);
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
/**
|
||||
* Add a condition to an existing S3 upload request policy document used to ensure AWS enforces any size
|
||||
* restrictions placed on files server-side. This is important to do, in case users mess with the client-side
|
||||
* checks already in place.
|
||||
*
|
||||
* @param policy Policy document as an `Object`, with a `conditions` property already attached
|
||||
* @param minSize Minimum acceptable size, in bytes
|
||||
* @param maxSize Maximum acceptable size, in bytes (0 = unlimited)
|
||||
*/
|
||||
enforceSizeLimits: function(policy, minSize, maxSize) {
|
||||
var adjustedMinSize = minSize < 0 ? 0 : minSize,
|
||||
// Adjust a maxSize of 0 to the largest possible integer, since we must specify a high and a low in the request
|
||||
adjustedMaxSize = maxSize <= 0 ? 9007199254740992 : maxSize;
|
||||
|
||||
if (minSize > 0 || maxSize > 0) {
|
||||
policy.conditions.push(["content-length-range", adjustedMinSize.toString(), adjustedMaxSize.toString()]);
|
||||
}
|
||||
},
|
||||
|
||||
getPolicyExpirationDate: function(date) {
|
||||
/*jshint -W014 */
|
||||
// Is this going to be a problem if we encounter this moments before 2 AM just before daylight savings time ends?
|
||||
date.setMinutes(date.getMinutes() + 5);
|
||||
|
||||
if (Date.prototype.toISOString) {
|
||||
return date.toISOString();
|
||||
}
|
||||
else {
|
||||
var pad = function(number) {
|
||||
var r = String(number);
|
||||
|
||||
if ( r.length === 1 ) {
|
||||
r = "0" + r;
|
||||
}
|
||||
|
||||
return r;
|
||||
};
|
||||
|
||||
return date.getUTCFullYear()
|
||||
+ "-" + pad( date.getUTCMonth() + 1 )
|
||||
+ "-" + pad( date.getUTCDate() )
|
||||
+ "T" + pad( date.getUTCHours() )
|
||||
+ ":" + pad( date.getUTCMinutes() )
|
||||
+ ":" + pad( date.getUTCSeconds() )
|
||||
+ "." + String( (date.getUTCMilliseconds()/1000).toFixed(3) ).slice( 2, 5 )
|
||||
+ "Z";
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Looks at a response from S3 contained in an iframe and parses the query string in an attempt to identify
|
||||
* the associated resource.
|
||||
*
|
||||
* @param iframe Iframe containing response
|
||||
* @returns {{bucket: *, key: *, etag: *}}
|
||||
*/
|
||||
parseIframeResponse: function(iframe) {
|
||||
var doc = iframe.contentDocument || iframe.contentWindow.document,
|
||||
queryString = doc.location.search,
|
||||
match = /bucket=(.+)&key=(.+)&etag=(.+)/.exec(queryString);
|
||||
|
||||
if (match) {
|
||||
return {
|
||||
bucket: match[1],
|
||||
key: match[2],
|
||||
etag: match[3].replace(/%22/g, "")
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @param successRedirectUrl Relative or absolute location of success redirect page
|
||||
* @returns {*|string} undefined if the parameter is undefined, otherwise the absolute location of the success redirect page
|
||||
*/
|
||||
getSuccessRedirectAbsoluteUrl: function(successRedirectUrl) {
|
||||
if (successRedirectUrl) {
|
||||
var targetAnchorContainer = document.createElement("div"),
|
||||
targetAnchor;
|
||||
|
||||
if (qq.ie7()) {
|
||||
// Note that we must make use of `innerHTML` for IE7 only instead of simply creating an anchor via
|
||||
// `document.createElement('a')` and setting the `href` attribute. The latter approach does not allow us to
|
||||
// obtain an absolute URL in IE7 if the `endpoint` is a relative URL.
|
||||
targetAnchorContainer.innerHTML = "<a href='" + successRedirectUrl + "'></a>";
|
||||
targetAnchor = targetAnchorContainer.firstChild;
|
||||
return targetAnchor.href;
|
||||
}
|
||||
else {
|
||||
// IE8 and IE9 do not seem to derive an absolute URL from a relative URL using the `innerHTML`
|
||||
// approach above, so we'll just create an anchor this way and set it's `href` attribute.
|
||||
// Due to yet another quirk in IE8 and IE9, we have to set the `href` equal to itself
|
||||
// in order to ensure relative URLs will be properly parsed.
|
||||
targetAnchor = document.createElement("a");
|
||||
targetAnchor.href = successRedirectUrl;
|
||||
targetAnchor.href = targetAnchor.href;
|
||||
return targetAnchor.href;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// AWS employs a strict interpretation of [RFC 3986](http://tools.ietf.org/html/rfc3986#page-12).
|
||||
// So, we must ensure all reserved characters listed in the spec are percent-encoded,
|
||||
// and spaces are replaced with "+".
|
||||
encodeQueryStringParam: function(param) {
|
||||
var percentEncoded = encodeURIComponent(param);
|
||||
|
||||
// %-encode characters not handled by `encodeURIComponent` (to follow RFC 3986)
|
||||
percentEncoded = percentEncoded.replace(/[!'()]/g, escape);
|
||||
|
||||
// %-encode characters not handled by `escape` (to follow RFC 3986)
|
||||
percentEncoded = percentEncoded.replace(/\*/g, "%2A");
|
||||
|
||||
// replace percent-encoded spaces with a "+"
|
||||
return percentEncoded.replace(/%20/g, "+");
|
||||
}
|
||||
};
|
||||
}());
|
@ -1,72 +0,0 @@
|
||||
/*globals qq, XMLHttpRequest*/
|
||||
/**
|
||||
* Thin module used to send GET requests to the server, expecting information about session
|
||||
* data used to initialize an uploader instance.
|
||||
*
|
||||
* @param spec Various options used to influence the associated request.
|
||||
* @constructor
|
||||
*/
|
||||
qq.SessionAjaxRequester = function(spec) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
options = {
|
||||
endpoint: null,
|
||||
customHeaders: {},
|
||||
params: {},
|
||||
cors: {
|
||||
expected: false,
|
||||
sendCredentials: false
|
||||
},
|
||||
onComplete: function(response, success, xhrOrXdr) {},
|
||||
log: function(str, level) {}
|
||||
};
|
||||
|
||||
qq.extend(options, spec);
|
||||
|
||||
function onComplete(id, xhrOrXdr, isError) {
|
||||
var response = null;
|
||||
|
||||
/* jshint eqnull:true */
|
||||
if (xhrOrXdr.responseText != null) {
|
||||
try {
|
||||
response = qq.parseJson(xhrOrXdr.responseText);
|
||||
}
|
||||
catch(err) {
|
||||
options.log("Problem parsing session response: " + err.message, "error");
|
||||
isError = true;
|
||||
}
|
||||
}
|
||||
|
||||
options.onComplete(response, !isError, xhrOrXdr);
|
||||
}
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
acceptHeader: "application/json",
|
||||
validMethods: ["GET"],
|
||||
method: "GET",
|
||||
endpointStore: {
|
||||
get: function() {
|
||||
return options.endpoint;
|
||||
}
|
||||
},
|
||||
customHeaders: options.customHeaders,
|
||||
log: options.log,
|
||||
onComplete: onComplete,
|
||||
cors: options.cors
|
||||
}));
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
queryServer: function() {
|
||||
var params = qq.extend({}, options.params);
|
||||
|
||||
options.log("Session query request.");
|
||||
|
||||
requester.initTransport("sessionRefresh")
|
||||
.withParams(params)
|
||||
.withCacheBuster()
|
||||
.send();
|
||||
}
|
||||
});
|
||||
};
|
@ -1,81 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Module used to control populating the initial list of files.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
qq.Session = function(spec) {
|
||||
"use strict";
|
||||
|
||||
var options = {
|
||||
endpoint: null,
|
||||
params: {},
|
||||
customHeaders: {},
|
||||
cors: {},
|
||||
addFileRecord: function(sessionData) {},
|
||||
log: function(message, level) {}
|
||||
};
|
||||
|
||||
qq.extend(options, spec, true);
|
||||
|
||||
|
||||
function isJsonResponseValid(response) {
|
||||
if (qq.isArray(response)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
options.log("Session response is not an array.", "error");
|
||||
}
|
||||
|
||||
function handleFileItems(fileItems, success, xhrOrXdr, promise) {
|
||||
var someItemsIgnored = false;
|
||||
|
||||
success = success && isJsonResponseValid(fileItems);
|
||||
|
||||
if (success) {
|
||||
qq.each(fileItems, function(idx, fileItem) {
|
||||
/* jshint eqnull:true */
|
||||
if (fileItem.uuid == null) {
|
||||
someItemsIgnored = true;
|
||||
options.log(qq.format("Session response item {} did not include a valid UUID - ignoring.", idx), "error");
|
||||
}
|
||||
else if (fileItem.name == null) {
|
||||
someItemsIgnored = true;
|
||||
options.log(qq.format("Session response item {} did not include a valid name - ignoring.", idx), "error");
|
||||
}
|
||||
else {
|
||||
try {
|
||||
options.addFileRecord(fileItem);
|
||||
return true;
|
||||
}
|
||||
catch(err) {
|
||||
someItemsIgnored = true;
|
||||
options.log(err.message, "error");
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
promise[success && !someItemsIgnored ? "success" : "failure"](fileItems, xhrOrXdr);
|
||||
}
|
||||
|
||||
// Initiate a call to the server that will be used to populate the initial file list.
|
||||
// Returns a `qq.Promise`.
|
||||
this.refresh = function() {
|
||||
/*jshint indent:false */
|
||||
var refreshEffort = new qq.Promise(),
|
||||
refreshCompleteCallback = function(response, success, xhrOrXdr) {
|
||||
handleFileItems(response, success, xhrOrXdr, refreshEffort);
|
||||
},
|
||||
requsterOptions = qq.extend({}, options),
|
||||
requester = new qq.SessionAjaxRequester(
|
||||
qq.extend(requsterOptions, {onComplete: refreshCompleteCallback})
|
||||
);
|
||||
|
||||
requester.queryServer();
|
||||
|
||||
return refreshEffort;
|
||||
};
|
||||
};
|
@ -1,825 +0,0 @@
|
||||
/* globals qq */
|
||||
/* jshint -W065 */
|
||||
/**
|
||||
* Module responsible for rendering all Fine Uploader UI templates. This module also asserts at least
|
||||
* a limited amount of control over the template elements after they are added to the DOM.
|
||||
* Wherever possible, this module asserts total control over template elements present in the DOM.
|
||||
*
|
||||
* @param spec Specification object used to control various templating behaviors
|
||||
* @constructor
|
||||
*/
|
||||
qq.Templating = function(spec) {
|
||||
"use strict";
|
||||
|
||||
var FILE_ID_ATTR = "qq-file-id",
|
||||
FILE_CLASS_PREFIX = "qq-file-id-",
|
||||
THUMBNAIL_MAX_SIZE_ATTR = "qq-max-size",
|
||||
THUMBNAIL_SERVER_SCALE_ATTR = "qq-server-scale",
|
||||
// This variable is duplicated in the DnD module since it can function as a standalone as well
|
||||
HIDE_DROPZONE_ATTR = "qq-hide-dropzone",
|
||||
isCancelDisabled = false,
|
||||
thumbnailMaxSize = -1,
|
||||
options = {
|
||||
log: null,
|
||||
templateIdOrEl: "qq-template",
|
||||
containerEl: null,
|
||||
fileContainerEl: null,
|
||||
button: null,
|
||||
imageGenerator: null,
|
||||
classes: {
|
||||
hide: "qq-hide",
|
||||
editable: "qq-editable"
|
||||
},
|
||||
placeholders: {
|
||||
waitUntilUpdate: false,
|
||||
thumbnailNotAvailable: null,
|
||||
waitingForThumbnail: null
|
||||
},
|
||||
text: {
|
||||
paused: "Paused"
|
||||
}
|
||||
},
|
||||
selectorClasses = {
|
||||
button: "qq-upload-button-selector",
|
||||
drop: "qq-upload-drop-area-selector",
|
||||
list: "qq-upload-list-selector",
|
||||
progressBarContainer: "qq-progress-bar-container-selector",
|
||||
progressBar: "qq-progress-bar-selector",
|
||||
totalProgressBarContainer: "qq-total-progress-bar-container-selector",
|
||||
totalProgressBar: "qq-total-progress-bar-selector",
|
||||
file: "qq-upload-file-selector",
|
||||
spinner: "qq-upload-spinner-selector",
|
||||
size: "qq-upload-size-selector",
|
||||
cancel: "qq-upload-cancel-selector",
|
||||
pause: "qq-upload-pause-selector",
|
||||
continueButton: "qq-upload-continue-selector",
|
||||
deleteButton: "qq-upload-delete-selector",
|
||||
retry: "qq-upload-retry-selector",
|
||||
statusText: "qq-upload-status-text-selector",
|
||||
editFilenameInput: "qq-edit-filename-selector",
|
||||
editNameIcon: "qq-edit-filename-icon-selector",
|
||||
dropProcessing: "qq-drop-processing-selector",
|
||||
dropProcessingSpinner: "qq-drop-processing-spinner-selector",
|
||||
thumbnail: "qq-thumbnail-selector"
|
||||
},
|
||||
previewGeneration = {},
|
||||
cachedThumbnailNotAvailableImg = new qq.Promise(),
|
||||
cachedWaitingForThumbnailImg = new qq.Promise(),
|
||||
log,
|
||||
isEditElementsExist,
|
||||
isRetryElementExist,
|
||||
templateHtml,
|
||||
container,
|
||||
fileList,
|
||||
showThumbnails,
|
||||
serverScale;
|
||||
|
||||
/**
|
||||
* Grabs the HTML from the script tag holding the template markup. This function will also adjust
|
||||
* some internally-tracked state variables based on the contents of the template.
|
||||
* The template is filtered so that irrelevant elements (such as the drop zone if DnD is not supported)
|
||||
* are omitted from the DOM. Useful errors will be thrown if the template cannot be parsed.
|
||||
*
|
||||
* @returns {{template: *, fileTemplate: *}} HTML for the top-level file items templates
|
||||
*/
|
||||
function parseAndGetTemplate() {
|
||||
var scriptEl,
|
||||
scriptHtml,
|
||||
fileListNode,
|
||||
tempTemplateEl,
|
||||
fileListHtml,
|
||||
defaultButton,
|
||||
dropArea,
|
||||
thumbnail,
|
||||
dropProcessing;
|
||||
|
||||
log("Parsing template");
|
||||
|
||||
/*jshint -W116*/
|
||||
if (options.templateIdOrEl == null) {
|
||||
throw new Error("You MUST specify either a template element or ID!");
|
||||
}
|
||||
|
||||
// Grab the contents of the script tag holding the template.
|
||||
if (qq.isString(options.templateIdOrEl)) {
|
||||
scriptEl = document.getElementById(options.templateIdOrEl);
|
||||
|
||||
if (scriptEl === null) {
|
||||
throw new Error(qq.format("Cannot find template script at ID '{}'!", options.templateIdOrEl));
|
||||
}
|
||||
|
||||
scriptHtml = scriptEl.innerHTML;
|
||||
}
|
||||
else {
|
||||
if (options.templateIdOrEl.innerHTML === undefined) {
|
||||
throw new Error("You have specified an invalid value for the template option! " +
|
||||
"It must be an ID or an Element.");
|
||||
}
|
||||
|
||||
scriptHtml = options.templateIdOrEl.innerHTML;
|
||||
}
|
||||
|
||||
scriptHtml = qq.trimStr(scriptHtml);
|
||||
tempTemplateEl = document.createElement("div");
|
||||
tempTemplateEl.appendChild(qq.toElement(scriptHtml));
|
||||
|
||||
// Don't include the default template button in the DOM
|
||||
// if an alternate button container has been specified.
|
||||
if (options.button) {
|
||||
defaultButton = qq(tempTemplateEl).getByClass(selectorClasses.button)[0];
|
||||
if (defaultButton) {
|
||||
qq(defaultButton).remove();
|
||||
}
|
||||
}
|
||||
|
||||
// Omit the drop processing element from the DOM if DnD is not supported by the UA,
|
||||
// or the drag and drop module is not found.
|
||||
// NOTE: We are consciously not removing the drop zone if the UA doesn't support DnD
|
||||
// to support layouts where the drop zone is also a container for visible elements,
|
||||
// such as the file list.
|
||||
if (!qq.DragAndDrop || !qq.supportedFeatures.fileDrop) {
|
||||
dropProcessing = qq(tempTemplateEl).getByClass(selectorClasses.dropProcessing)[0];
|
||||
if (dropProcessing) {
|
||||
qq(dropProcessing).remove();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
dropArea = qq(tempTemplateEl).getByClass(selectorClasses.drop)[0];
|
||||
|
||||
// If DnD is not available then remove
|
||||
// it from the DOM as well.
|
||||
if (dropArea && !qq.DragAndDrop) {
|
||||
log("DnD module unavailable.", "info");
|
||||
qq(dropArea).remove();
|
||||
}
|
||||
|
||||
// If there is a drop area defined in the template, and the current UA doesn't support DnD,
|
||||
// and the drop area is marked as "hide before enter", ensure it is hidden as the DnD module
|
||||
// will not do this (since we will not be loading the DnD module)
|
||||
if (dropArea && !qq.supportedFeatures.fileDrop &&
|
||||
qq(dropArea).hasAttribute(HIDE_DROPZONE_ATTR)) {
|
||||
|
||||
qq(dropArea).css({
|
||||
display: "none"
|
||||
});
|
||||
}
|
||||
|
||||
// Ensure the `showThumbnails` flag is only set if the thumbnail element
|
||||
// is present in the template AND the current UA is capable of generating client-side previews.
|
||||
thumbnail = qq(tempTemplateEl).getByClass(selectorClasses.thumbnail)[0];
|
||||
if (!showThumbnails) {
|
||||
thumbnail && qq(thumbnail).remove();
|
||||
}
|
||||
else if (thumbnail) {
|
||||
thumbnailMaxSize = parseInt(thumbnail.getAttribute(THUMBNAIL_MAX_SIZE_ATTR));
|
||||
// Only enforce max size if the attr value is non-zero
|
||||
thumbnailMaxSize = thumbnailMaxSize > 0 ? thumbnailMaxSize : null;
|
||||
|
||||
serverScale = qq(thumbnail).hasAttribute(THUMBNAIL_SERVER_SCALE_ATTR);
|
||||
}
|
||||
showThumbnails = showThumbnails && thumbnail;
|
||||
|
||||
isEditElementsExist = qq(tempTemplateEl).getByClass(selectorClasses.editFilenameInput).length > 0;
|
||||
isRetryElementExist = qq(tempTemplateEl).getByClass(selectorClasses.retry).length > 0;
|
||||
|
||||
fileListNode = qq(tempTemplateEl).getByClass(selectorClasses.list)[0];
|
||||
/*jshint -W116*/
|
||||
if (fileListNode == null) {
|
||||
throw new Error("Could not find the file list container in the template!");
|
||||
}
|
||||
|
||||
fileListHtml = fileListNode.innerHTML;
|
||||
fileListNode.innerHTML = "";
|
||||
|
||||
log("Template parsing complete");
|
||||
|
||||
return {
|
||||
template: qq.trimStr(tempTemplateEl.innerHTML),
|
||||
fileTemplate: qq.trimStr(fileListHtml)
|
||||
};
|
||||
}
|
||||
|
||||
function getFile(id) {
|
||||
return qq(fileList).getByClass(FILE_CLASS_PREFIX + id)[0];
|
||||
}
|
||||
|
||||
function getTemplateEl(context, cssClass) {
|
||||
return context && qq(context).getByClass(cssClass)[0];
|
||||
}
|
||||
|
||||
function prependFile(el, index) {
|
||||
var parentEl = fileList,
|
||||
beforeEl = parentEl.firstChild;
|
||||
|
||||
if (index > 0) {
|
||||
beforeEl = qq(parentEl).children()[index].nextSibling;
|
||||
|
||||
}
|
||||
|
||||
parentEl.insertBefore(el, beforeEl);
|
||||
}
|
||||
|
||||
function getCancel(id) {
|
||||
return getTemplateEl(getFile(id), selectorClasses.cancel);
|
||||
}
|
||||
|
||||
function getPause(id) {
|
||||
return getTemplateEl(getFile(id), selectorClasses.pause);
|
||||
}
|
||||
|
||||
function getContinue(id) {
|
||||
return getTemplateEl(getFile(id), selectorClasses.continueButton);
|
||||
}
|
||||
|
||||
function getProgress(id) {
|
||||
/* jshint eqnull:true */
|
||||
// Total progress bar
|
||||
if (id == null) {
|
||||
return getTemplateEl(container, selectorClasses.totalProgressBarContainer) ||
|
||||
getTemplateEl(container, selectorClasses.totalProgressBar);
|
||||
}
|
||||
|
||||
// Per-file progress bar
|
||||
return getTemplateEl(getFile(id), selectorClasses.progressBarContainer) ||
|
||||
getTemplateEl(getFile(id), selectorClasses.progressBar);
|
||||
}
|
||||
|
||||
function getSpinner(id) {
|
||||
return getTemplateEl(getFile(id), selectorClasses.spinner);
|
||||
}
|
||||
|
||||
function getEditIcon(id) {
|
||||
return getTemplateEl(getFile(id), selectorClasses.editNameIcon);
|
||||
}
|
||||
|
||||
function getSize(id) {
|
||||
return getTemplateEl(getFile(id), selectorClasses.size);
|
||||
}
|
||||
|
||||
function getDelete(id) {
|
||||
return getTemplateEl(getFile(id), selectorClasses.deleteButton);
|
||||
}
|
||||
|
||||
function getRetry(id) {
|
||||
return getTemplateEl(getFile(id), selectorClasses.retry);
|
||||
}
|
||||
|
||||
function getFilename(id) {
|
||||
return getTemplateEl(getFile(id), selectorClasses.file);
|
||||
}
|
||||
|
||||
function getDropProcessing() {
|
||||
return getTemplateEl(container, selectorClasses.dropProcessing);
|
||||
}
|
||||
|
||||
function getThumbnail(id) {
|
||||
return showThumbnails && getTemplateEl(getFile(id), selectorClasses.thumbnail);
|
||||
}
|
||||
|
||||
function hide(el) {
|
||||
el && qq(el).addClass(options.classes.hide);
|
||||
}
|
||||
|
||||
function show(el) {
|
||||
el && qq(el).removeClass(options.classes.hide);
|
||||
}
|
||||
|
||||
function setProgressBarWidth(id, percent) {
|
||||
var bar = getProgress(id),
|
||||
/* jshint eqnull:true */
|
||||
progressBarSelector = id == null ? selectorClasses.totalProgressBar : selectorClasses.progressBar;
|
||||
|
||||
if (bar && !qq(bar).hasClass(progressBarSelector)) {
|
||||
bar = qq(bar).getByClass(progressBarSelector)[0];
|
||||
}
|
||||
|
||||
bar && qq(bar).css({width: percent + "%"});
|
||||
}
|
||||
|
||||
// During initialization of the templating module we should cache any
|
||||
// placeholder images so we can quickly swap them into the file list on demand.
|
||||
// Any placeholder images that cannot be loaded/found are simply ignored.
|
||||
function cacheThumbnailPlaceholders() {
|
||||
var notAvailableUrl = options.placeholders.thumbnailNotAvailable,
|
||||
waitingUrl = options.placeholders.waitingForThumbnail,
|
||||
spec = {
|
||||
maxSize: thumbnailMaxSize,
|
||||
scale: serverScale
|
||||
};
|
||||
|
||||
if (showThumbnails) {
|
||||
if (notAvailableUrl) {
|
||||
options.imageGenerator.generate(notAvailableUrl, new Image(), spec).then(
|
||||
function(updatedImg) {
|
||||
cachedThumbnailNotAvailableImg.success(updatedImg);
|
||||
},
|
||||
function() {
|
||||
cachedThumbnailNotAvailableImg.failure();
|
||||
log("Problem loading 'not available' placeholder image at " + notAvailableUrl, "error");
|
||||
}
|
||||
);
|
||||
}
|
||||
else {
|
||||
cachedThumbnailNotAvailableImg.failure();
|
||||
}
|
||||
|
||||
if (waitingUrl) {
|
||||
options.imageGenerator.generate(waitingUrl, new Image(), spec).then(
|
||||
function(updatedImg) {
|
||||
cachedWaitingForThumbnailImg.success(updatedImg);
|
||||
},
|
||||
function() {
|
||||
cachedWaitingForThumbnailImg.failure();
|
||||
log("Problem loading 'waiting for thumbnail' placeholder image at " + waitingUrl, "error");
|
||||
}
|
||||
);
|
||||
}
|
||||
else {
|
||||
cachedWaitingForThumbnailImg.failure();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Displays a "waiting for thumbnail" type placeholder image
|
||||
// iff we were able to load it during initialization of the templating module.
|
||||
function displayWaitingImg(thumbnail) {
|
||||
var waitingImgPlacement = new qq.Promise();
|
||||
|
||||
cachedWaitingForThumbnailImg.then(function(img) {
|
||||
maybeScalePlaceholderViaCss(img, thumbnail);
|
||||
/* jshint eqnull:true */
|
||||
if (!thumbnail.src) {
|
||||
thumbnail.src = img.src;
|
||||
thumbnail.onload = function() {
|
||||
show(thumbnail);
|
||||
waitingImgPlacement.success();
|
||||
};
|
||||
}
|
||||
else {
|
||||
waitingImgPlacement.success();
|
||||
}
|
||||
}, function() {
|
||||
// In some browsers (such as IE9 and older) an img w/out a src attribute
|
||||
// are displayed as "broken" images, so we should just hide the img tag
|
||||
// if we aren't going to display the "waiting" placeholder.
|
||||
hide(thumbnail);
|
||||
waitingImgPlacement.success();
|
||||
});
|
||||
|
||||
return waitingImgPlacement;
|
||||
}
|
||||
|
||||
// Displays a "thumbnail not available" type placeholder image
|
||||
// iff we were able to load this placeholder during initialization
|
||||
// of the templating module or after preview generation has failed.
|
||||
function maybeSetDisplayNotAvailableImg(id, thumbnail) {
|
||||
var previewing = previewGeneration[id] || new qq.Promise().failure(),
|
||||
notAvailableImgPlacement = new qq.Promise();
|
||||
|
||||
cachedThumbnailNotAvailableImg.then(function(img) {
|
||||
previewing.then(
|
||||
function() {
|
||||
notAvailableImgPlacement.success();
|
||||
},
|
||||
function() {
|
||||
maybeScalePlaceholderViaCss(img, thumbnail);
|
||||
thumbnail.onload = function() {
|
||||
notAvailableImgPlacement.success();
|
||||
};
|
||||
thumbnail.src = img.src;
|
||||
show(thumbnail);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
return notAvailableImgPlacement;
|
||||
}
|
||||
|
||||
// Ensures a placeholder image does not exceed any max size specified
|
||||
// via `style` attribute properties iff <canvas> was not used to scale
|
||||
// the placeholder AND the target <img> doesn't already have these `style` attribute properties set.
|
||||
function maybeScalePlaceholderViaCss(placeholder, thumbnail) {
|
||||
var maxWidth = placeholder.style.maxWidth,
|
||||
maxHeight = placeholder.style.maxHeight;
|
||||
|
||||
if (maxHeight && maxWidth && !thumbnail.style.maxWidth && !thumbnail.style.maxHeight) {
|
||||
qq(thumbnail).css({
|
||||
maxWidth: maxWidth,
|
||||
maxHeight: maxHeight
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function useCachedPreview(targetThumbnailId, cachedThumbnailId) {
|
||||
var targetThumnail = getThumbnail(targetThumbnailId),
|
||||
cachedThumbnail = getThumbnail(cachedThumbnailId);
|
||||
|
||||
log(qq.format("ID {} is the same file as ID {}. Will use generated thumbnail from ID {} instead.", targetThumbnailId, cachedThumbnailId, cachedThumbnailId));
|
||||
|
||||
// Generation of the related thumbnail may still be in progress, so, wait until it is done.
|
||||
previewGeneration[cachedThumbnailId].then(function() {
|
||||
previewGeneration[targetThumbnailId].success();
|
||||
log(qq.format("Now using previously generated thumbnail created for ID {} on ID {}.", cachedThumbnailId, targetThumbnailId));
|
||||
targetThumnail.src = cachedThumbnail.src;
|
||||
show(targetThumnail);
|
||||
},
|
||||
function() {
|
||||
previewGeneration[targetThumbnailId].failure();
|
||||
if (!options.placeholders.waitUntilUpdate) {
|
||||
maybeSetDisplayNotAvailableImg(targetThumbnailId, targetThumnail);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function generateNewPreview(id, blob, spec) {
|
||||
var thumbnail = getThumbnail(id);
|
||||
|
||||
log("Generating new thumbnail for " + id);
|
||||
blob.qqThumbnailId = id;
|
||||
|
||||
return options.imageGenerator.generate(blob, thumbnail, spec).then(
|
||||
function() {
|
||||
show(thumbnail);
|
||||
previewGeneration[id].success();
|
||||
},
|
||||
function() {
|
||||
previewGeneration[id].failure();
|
||||
|
||||
// Display the "not available" placeholder img only if we are
|
||||
// not expecting a thumbnail at a later point, such as in a server response.
|
||||
if (!options.placeholders.waitUntilUpdate) {
|
||||
maybeSetDisplayNotAvailableImg(id, thumbnail);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
qq.extend(options, spec);
|
||||
log = options.log;
|
||||
|
||||
container = options.containerEl;
|
||||
showThumbnails = options.imageGenerator !== undefined;
|
||||
templateHtml = parseAndGetTemplate();
|
||||
|
||||
cacheThumbnailPlaceholders();
|
||||
|
||||
qq.extend(this, {
|
||||
render: function() {
|
||||
log("Rendering template in DOM.");
|
||||
|
||||
container.innerHTML = templateHtml.template;
|
||||
hide(getDropProcessing());
|
||||
this.hideTotalProgress();
|
||||
fileList = options.fileContainerEl || getTemplateEl(container, selectorClasses.list);
|
||||
|
||||
log("Template rendering complete");
|
||||
},
|
||||
|
||||
renderFailure: function(message) {
|
||||
var cantRenderEl = qq.toElement(message);
|
||||
container.innerHTML = "";
|
||||
container.appendChild(cantRenderEl);
|
||||
},
|
||||
|
||||
reset: function() {
|
||||
this.render();
|
||||
},
|
||||
|
||||
clearFiles: function() {
|
||||
fileList.innerHTML = "";
|
||||
},
|
||||
|
||||
disableCancel: function() {
|
||||
isCancelDisabled = true;
|
||||
},
|
||||
|
||||
addFile: function(id, name, prependInfo) {
|
||||
var fileEl = qq.toElement(templateHtml.fileTemplate),
|
||||
fileNameEl = getTemplateEl(fileEl, selectorClasses.file);
|
||||
|
||||
qq(fileEl).addClass(FILE_CLASS_PREFIX + id);
|
||||
fileNameEl && qq(fileNameEl).setText(name);
|
||||
fileEl.setAttribute(FILE_ID_ATTR, id);
|
||||
|
||||
if (prependInfo) {
|
||||
prependFile(fileEl, prependInfo.index);
|
||||
}
|
||||
else {
|
||||
fileList.appendChild(fileEl);
|
||||
}
|
||||
|
||||
hide(getProgress(id));
|
||||
hide(getSize(id));
|
||||
hide(getDelete(id));
|
||||
hide(getRetry(id));
|
||||
hide(getPause(id));
|
||||
hide(getContinue(id));
|
||||
|
||||
if (isCancelDisabled) {
|
||||
this.hideCancel(id);
|
||||
}
|
||||
},
|
||||
|
||||
removeFile: function(id) {
|
||||
qq(getFile(id)).remove();
|
||||
},
|
||||
|
||||
getFileId: function(el) {
|
||||
var currentNode = el;
|
||||
|
||||
if (currentNode) {
|
||||
/*jshint -W116*/
|
||||
while (currentNode.getAttribute(FILE_ID_ATTR) == null) {
|
||||
currentNode = currentNode.parentNode;
|
||||
}
|
||||
|
||||
return parseInt(currentNode.getAttribute(FILE_ID_ATTR));
|
||||
}
|
||||
},
|
||||
|
||||
getFileList: function() {
|
||||
return fileList;
|
||||
},
|
||||
|
||||
markFilenameEditable: function(id) {
|
||||
var filename = getFilename(id);
|
||||
|
||||
filename && qq(filename).addClass(options.classes.editable);
|
||||
},
|
||||
|
||||
updateFilename: function(id, name) {
|
||||
var filename = getFilename(id);
|
||||
|
||||
filename && qq(filename).setText(name);
|
||||
},
|
||||
|
||||
hideFilename: function(id) {
|
||||
hide(getFilename(id));
|
||||
},
|
||||
|
||||
showFilename: function(id) {
|
||||
show(getFilename(id));
|
||||
},
|
||||
|
||||
isFileName: function(el) {
|
||||
return qq(el).hasClass(selectorClasses.file);
|
||||
},
|
||||
|
||||
getButton: function() {
|
||||
return options.button || getTemplateEl(container, selectorClasses.button);
|
||||
},
|
||||
|
||||
hideDropProcessing: function() {
|
||||
hide(getDropProcessing());
|
||||
},
|
||||
|
||||
showDropProcessing: function() {
|
||||
show(getDropProcessing());
|
||||
},
|
||||
|
||||
getDropZone: function() {
|
||||
return getTemplateEl(container, selectorClasses.drop);
|
||||
},
|
||||
|
||||
isEditFilenamePossible: function() {
|
||||
return isEditElementsExist;
|
||||
},
|
||||
|
||||
hideRetry: function(id) {
|
||||
hide(getRetry(id));
|
||||
},
|
||||
|
||||
isRetryPossible: function() {
|
||||
return isRetryElementExist;
|
||||
},
|
||||
|
||||
showRetry: function(id) {
|
||||
show(getRetry(id));
|
||||
},
|
||||
|
||||
getFileContainer: function(id) {
|
||||
return getFile(id);
|
||||
},
|
||||
|
||||
showEditIcon: function(id) {
|
||||
var icon = getEditIcon(id);
|
||||
|
||||
icon && qq(icon).addClass(options.classes.editable);
|
||||
},
|
||||
|
||||
hideEditIcon: function(id) {
|
||||
var icon = getEditIcon(id);
|
||||
|
||||
icon && qq(icon).removeClass(options.classes.editable);
|
||||
},
|
||||
|
||||
isEditIcon: function(el) {
|
||||
return qq(el).hasClass(selectorClasses.editNameIcon);
|
||||
},
|
||||
|
||||
getEditInput: function(id) {
|
||||
return getTemplateEl(getFile(id), selectorClasses.editFilenameInput);
|
||||
},
|
||||
|
||||
isEditInput: function(el) {
|
||||
return qq(el).hasClass(selectorClasses.editFilenameInput);
|
||||
},
|
||||
|
||||
updateProgress: function(id, loaded, total) {
|
||||
var bar = getProgress(id),
|
||||
percent;
|
||||
|
||||
if (bar) {
|
||||
percent = Math.round(loaded / total * 100);
|
||||
|
||||
if (percent === 100) {
|
||||
hide(bar);
|
||||
}
|
||||
else {
|
||||
show(bar);
|
||||
}
|
||||
|
||||
setProgressBarWidth(id, percent);
|
||||
}
|
||||
},
|
||||
|
||||
updateTotalProgress: function(loaded, total) {
|
||||
this.updateProgress(null, loaded, total);
|
||||
},
|
||||
|
||||
hideProgress: function(id) {
|
||||
var bar = getProgress(id);
|
||||
|
||||
bar && hide(bar);
|
||||
},
|
||||
|
||||
hideTotalProgress: function() {
|
||||
this.hideProgress();
|
||||
},
|
||||
|
||||
resetProgress: function(id) {
|
||||
setProgressBarWidth(id, 0);
|
||||
},
|
||||
|
||||
resetTotalProgress: function() {
|
||||
this.resetProgress();
|
||||
},
|
||||
|
||||
showCancel: function(id) {
|
||||
if (!isCancelDisabled) {
|
||||
var cancel = getCancel(id);
|
||||
|
||||
cancel && qq(cancel).removeClass(options.classes.hide);
|
||||
}
|
||||
},
|
||||
|
||||
hideCancel: function(id) {
|
||||
hide(getCancel(id));
|
||||
},
|
||||
|
||||
isCancel: function(el) {
|
||||
return qq(el).hasClass(selectorClasses.cancel);
|
||||
},
|
||||
|
||||
allowPause: function(id) {
|
||||
show(getPause(id));
|
||||
hide(getContinue(id));
|
||||
},
|
||||
|
||||
uploadPaused: function(id) {
|
||||
this.setStatusText(id, options.text.paused);
|
||||
this.allowContinueButton(id);
|
||||
hide(getSpinner(id));
|
||||
},
|
||||
|
||||
hidePause: function(id) {
|
||||
hide(getPause(id));
|
||||
},
|
||||
|
||||
isPause: function(el) {
|
||||
return qq(el).hasClass(selectorClasses.pause);
|
||||
},
|
||||
|
||||
isContinueButton: function(el) {
|
||||
return qq(el).hasClass(selectorClasses.continueButton);
|
||||
},
|
||||
|
||||
allowContinueButton: function(id) {
|
||||
show(getContinue(id));
|
||||
hide(getPause(id));
|
||||
},
|
||||
|
||||
uploadContinued: function(id) {
|
||||
this.setStatusText(id, "");
|
||||
this.allowPause(id);
|
||||
show(getSpinner(id));
|
||||
},
|
||||
|
||||
showDeleteButton: function(id) {
|
||||
show(getDelete(id));
|
||||
},
|
||||
|
||||
hideDeleteButton: function(id) {
|
||||
hide(getDelete(id));
|
||||
},
|
||||
|
||||
isDeleteButton: function(el) {
|
||||
return qq(el).hasClass(selectorClasses.deleteButton);
|
||||
},
|
||||
|
||||
isRetry: function(el) {
|
||||
return qq(el).hasClass(selectorClasses.retry);
|
||||
},
|
||||
|
||||
updateSize: function(id, text) {
|
||||
var size = getSize(id);
|
||||
|
||||
if (size) {
|
||||
show(size);
|
||||
qq(size).setText(text);
|
||||
}
|
||||
},
|
||||
|
||||
setStatusText: function(id, text) {
|
||||
var textEl = getTemplateEl(getFile(id), selectorClasses.statusText);
|
||||
|
||||
if (textEl) {
|
||||
/*jshint -W116*/
|
||||
if (text == null) {
|
||||
qq(textEl).clearText();
|
||||
}
|
||||
else {
|
||||
qq(textEl).setText(text);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
hideSpinner: function(id) {
|
||||
hide(getSpinner(id));
|
||||
},
|
||||
|
||||
showSpinner: function(id) {
|
||||
show(getSpinner(id));
|
||||
},
|
||||
|
||||
generatePreview: function(id, opt_fileOrBlob) {
|
||||
var relatedThumbnailId = opt_fileOrBlob && opt_fileOrBlob.qqThumbnailId,
|
||||
thumbnail = getThumbnail(id),
|
||||
spec = {
|
||||
maxSize: thumbnailMaxSize,
|
||||
scale: true,
|
||||
orient: true
|
||||
};
|
||||
|
||||
if (qq.supportedFeatures.imagePreviews) {
|
||||
if (thumbnail) {
|
||||
displayWaitingImg(thumbnail).done(function() {
|
||||
previewGeneration[id] = new qq.Promise();
|
||||
|
||||
/* jshint eqnull: true */
|
||||
// If we've already generated an <img> for this file, use the one that exists,
|
||||
// don't waste resources generating a new one.
|
||||
if (relatedThumbnailId != null) {
|
||||
useCachedPreview(id, relatedThumbnailId);
|
||||
}
|
||||
else {
|
||||
generateNewPreview(id, opt_fileOrBlob, spec);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (thumbnail) {
|
||||
displayWaitingImg(thumbnail);
|
||||
}
|
||||
},
|
||||
|
||||
updateThumbnail: function(id, thumbnailUrl, showWaitingImg) {
|
||||
var thumbnail = getThumbnail(id),
|
||||
spec = {
|
||||
maxSize: thumbnailMaxSize,
|
||||
scale: serverScale
|
||||
};
|
||||
|
||||
if (thumbnail) {
|
||||
if (thumbnailUrl) {
|
||||
if (showWaitingImg) {
|
||||
displayWaitingImg(thumbnail);
|
||||
}
|
||||
|
||||
return options.imageGenerator.generate(thumbnailUrl, thumbnail, spec).then(
|
||||
function() {
|
||||
show(thumbnail);
|
||||
},
|
||||
function() {
|
||||
maybeSetDisplayNotAvailableImg(id, thumbnail);
|
||||
}
|
||||
);
|
||||
}
|
||||
else {
|
||||
maybeSetDisplayNotAvailableImg(id, thumbnail);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
@ -1,183 +0,0 @@
|
||||
//Based on MinifyJpeg
|
||||
//http://elicon.blog57.fc2.com/blog-entry-206.html
|
||||
|
||||
var ExifRestorer = (function()
|
||||
{
|
||||
|
||||
var ExifRestorer = {};
|
||||
|
||||
ExifRestorer.KEY_STR = "ABCDEFGHIJKLMNOP" +
|
||||
"QRSTUVWXYZabcdef" +
|
||||
"ghijklmnopqrstuv" +
|
||||
"wxyz0123456789+/" +
|
||||
"=";
|
||||
|
||||
ExifRestorer.encode64 = function(input)
|
||||
{
|
||||
var output = "",
|
||||
chr1, chr2, chr3 = "",
|
||||
enc1, enc2, enc3, enc4 = "",
|
||||
i = 0;
|
||||
|
||||
do {
|
||||
chr1 = input[i++];
|
||||
chr2 = input[i++];
|
||||
chr3 = input[i++];
|
||||
|
||||
enc1 = chr1 >> 2;
|
||||
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
|
||||
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
|
||||
enc4 = chr3 & 63;
|
||||
|
||||
if (isNaN(chr2)) {
|
||||
enc3 = enc4 = 64;
|
||||
} else if (isNaN(chr3)) {
|
||||
enc4 = 64;
|
||||
}
|
||||
|
||||
output = output +
|
||||
this.KEY_STR.charAt(enc1) +
|
||||
this.KEY_STR.charAt(enc2) +
|
||||
this.KEY_STR.charAt(enc3) +
|
||||
this.KEY_STR.charAt(enc4);
|
||||
chr1 = chr2 = chr3 = "";
|
||||
enc1 = enc2 = enc3 = enc4 = "";
|
||||
} while (i < input.length);
|
||||
|
||||
return output;
|
||||
};
|
||||
|
||||
ExifRestorer.restore = function(origFileBase64, resizedFileBase64)
|
||||
{
|
||||
var expectedBase64Header = "data:image/jpeg;base64,";
|
||||
|
||||
if (!origFileBase64.match(expectedBase64Header))
|
||||
{
|
||||
return resizedFileBase64;
|
||||
}
|
||||
|
||||
var rawImage = this.decode64(origFileBase64.replace(expectedBase64Header, ""));
|
||||
var segments = this.slice2Segments(rawImage);
|
||||
|
||||
var image = this.exifManipulation(resizedFileBase64, segments);
|
||||
|
||||
return expectedBase64Header + this.encode64(image);
|
||||
|
||||
};
|
||||
|
||||
|
||||
ExifRestorer.exifManipulation = function(resizedFileBase64, segments)
|
||||
{
|
||||
var exifArray = this.getExifArray(segments),
|
||||
newImageArray = this.insertExif(resizedFileBase64, exifArray),
|
||||
aBuffer = new Uint8Array(newImageArray);
|
||||
|
||||
return aBuffer;
|
||||
};
|
||||
|
||||
|
||||
ExifRestorer.getExifArray = function(segments)
|
||||
{
|
||||
var seg;
|
||||
for (var x = 0; x < segments.length; x++)
|
||||
{
|
||||
seg = segments[x];
|
||||
if (seg[0] == 255 & seg[1] == 225) //(ff e1)
|
||||
{
|
||||
return seg;
|
||||
}
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
|
||||
ExifRestorer.insertExif = function(resizedFileBase64, exifArray)
|
||||
{
|
||||
var imageData = resizedFileBase64.replace("data:image/jpeg;base64,", ""),
|
||||
buf = this.decode64(imageData),
|
||||
separatePoint = buf.indexOf(255,3),
|
||||
mae = buf.slice(0, separatePoint),
|
||||
ato = buf.slice(separatePoint),
|
||||
array = mae;
|
||||
|
||||
array = array.concat(exifArray);
|
||||
array = array.concat(ato);
|
||||
return array;
|
||||
};
|
||||
|
||||
|
||||
|
||||
ExifRestorer.slice2Segments = function(rawImageArray)
|
||||
{
|
||||
var head = 0,
|
||||
segments = [];
|
||||
|
||||
while (1)
|
||||
{
|
||||
if (rawImageArray[head] == 255 & rawImageArray[head + 1] == 218){break;}
|
||||
if (rawImageArray[head] == 255 & rawImageArray[head + 1] == 216)
|
||||
{
|
||||
head += 2;
|
||||
}
|
||||
else
|
||||
{
|
||||
var length = rawImageArray[head + 2] * 256 + rawImageArray[head + 3],
|
||||
endPoint = head + length + 2,
|
||||
seg = rawImageArray.slice(head, endPoint);
|
||||
segments.push(seg);
|
||||
head = endPoint;
|
||||
}
|
||||
if (head > rawImageArray.length){break;}
|
||||
}
|
||||
|
||||
return segments;
|
||||
};
|
||||
|
||||
|
||||
|
||||
ExifRestorer.decode64 = function(input)
|
||||
{
|
||||
var output = "",
|
||||
chr1, chr2, chr3 = "",
|
||||
enc1, enc2, enc3, enc4 = "",
|
||||
i = 0,
|
||||
buf = [];
|
||||
|
||||
// remove all characters that are not A-Z, a-z, 0-9, +, /, or =
|
||||
var base64test = /[^A-Za-z0-9\+\/\=]/g;
|
||||
if (base64test.exec(input)) {
|
||||
throw new Error("There were invalid base64 characters in the input text. " +
|
||||
"Valid base64 characters are A-Z, a-z, 0-9, '+', '/',and '='");
|
||||
}
|
||||
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "");
|
||||
|
||||
do {
|
||||
enc1 = this.KEY_STR.indexOf(input.charAt(i++));
|
||||
enc2 = this.KEY_STR.indexOf(input.charAt(i++));
|
||||
enc3 = this.KEY_STR.indexOf(input.charAt(i++));
|
||||
enc4 = this.KEY_STR.indexOf(input.charAt(i++));
|
||||
|
||||
chr1 = (enc1 << 2) | (enc2 >> 4);
|
||||
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
|
||||
chr3 = ((enc3 & 3) << 6) | enc4;
|
||||
|
||||
buf.push(chr1);
|
||||
|
||||
if (enc3 != 64) {
|
||||
buf.push(chr2);
|
||||
}
|
||||
if (enc4 != 64) {
|
||||
buf.push(chr3);
|
||||
}
|
||||
|
||||
chr1 = chr2 = chr3 = "";
|
||||
enc1 = enc2 = enc3 = enc4 = "";
|
||||
|
||||
} while (i < input.length);
|
||||
|
||||
return buf;
|
||||
};
|
||||
|
||||
|
||||
return ExifRestorer;
|
||||
})();
|
@ -1,712 +0,0 @@
|
||||
/*
|
||||
CryptoJS v3.1.2
|
||||
code.google.com/p/crypto-js
|
||||
(c) 2009-2013 by Jeff Mott. All rights reserved.
|
||||
code.google.com/p/crypto-js/wiki/License
|
||||
*/
|
||||
/**
|
||||
* CryptoJS core components.
|
||||
*/
|
||||
var CryptoJS = CryptoJS || (function (Math, undefined) {
|
||||
/**
|
||||
* CryptoJS namespace.
|
||||
*/
|
||||
var C = {};
|
||||
|
||||
/**
|
||||
* Library namespace.
|
||||
*/
|
||||
var C_lib = C.lib = {};
|
||||
|
||||
/**
|
||||
* Base object for prototypal inheritance.
|
||||
*/
|
||||
var Base = C_lib.Base = (function () {
|
||||
function F() {}
|
||||
|
||||
return {
|
||||
/**
|
||||
* Creates a new object that inherits from this object.
|
||||
*
|
||||
* @param {Object} overrides Properties to copy into the new object.
|
||||
*
|
||||
* @return {Object} The new object.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var MyType = CryptoJS.lib.Base.extend({
|
||||
* field: 'value',
|
||||
*
|
||||
* method: function () {
|
||||
* }
|
||||
* });
|
||||
*/
|
||||
extend: function (overrides) {
|
||||
// Spawn
|
||||
F.prototype = this;
|
||||
var subtype = new F();
|
||||
|
||||
// Augment
|
||||
if (overrides) {
|
||||
subtype.mixIn(overrides);
|
||||
}
|
||||
|
||||
// Create default initializer
|
||||
if (!subtype.hasOwnProperty('init')) {
|
||||
subtype.init = function () {
|
||||
subtype.$super.init.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
// Initializer's prototype is the subtype object
|
||||
subtype.init.prototype = subtype;
|
||||
|
||||
// Reference supertype
|
||||
subtype.$super = this;
|
||||
|
||||
return subtype;
|
||||
},
|
||||
|
||||
/**
|
||||
* Extends this object and runs the init method.
|
||||
* Arguments to create() will be passed to init().
|
||||
*
|
||||
* @return {Object} The new object.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var instance = MyType.create();
|
||||
*/
|
||||
create: function () {
|
||||
var instance = this.extend();
|
||||
instance.init.apply(instance, arguments);
|
||||
|
||||
return instance;
|
||||
},
|
||||
|
||||
/**
|
||||
* Initializes a newly created object.
|
||||
* Override this method to add some logic when your objects are created.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var MyType = CryptoJS.lib.Base.extend({
|
||||
* init: function () {
|
||||
* // ...
|
||||
* }
|
||||
* });
|
||||
*/
|
||||
init: function () {
|
||||
},
|
||||
|
||||
/**
|
||||
* Copies properties into this object.
|
||||
*
|
||||
* @param {Object} properties The properties to mix in.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* MyType.mixIn({
|
||||
* field: 'value'
|
||||
* });
|
||||
*/
|
||||
mixIn: function (properties) {
|
||||
for (var propertyName in properties) {
|
||||
if (properties.hasOwnProperty(propertyName)) {
|
||||
this[propertyName] = properties[propertyName];
|
||||
}
|
||||
}
|
||||
|
||||
// IE won't copy toString using the loop above
|
||||
if (properties.hasOwnProperty('toString')) {
|
||||
this.toString = properties.toString;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a copy of this object.
|
||||
*
|
||||
* @return {Object} The clone.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var clone = instance.clone();
|
||||
*/
|
||||
clone: function () {
|
||||
return this.init.prototype.extend(this);
|
||||
}
|
||||
};
|
||||
}());
|
||||
|
||||
/**
|
||||
* An array of 32-bit words.
|
||||
*
|
||||
* @property {Array} words The array of 32-bit words.
|
||||
* @property {number} sigBytes The number of significant bytes in this word array.
|
||||
*/
|
||||
var WordArray = C_lib.WordArray = Base.extend({
|
||||
/**
|
||||
* Initializes a newly created word array.
|
||||
*
|
||||
* @param {Array} words (Optional) An array of 32-bit words.
|
||||
* @param {number} sigBytes (Optional) The number of significant bytes in the words.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var wordArray = CryptoJS.lib.WordArray.create();
|
||||
* var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607]);
|
||||
* var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607], 6);
|
||||
*/
|
||||
init: function (words, sigBytes) {
|
||||
words = this.words = words || [];
|
||||
|
||||
if (sigBytes != undefined) {
|
||||
this.sigBytes = sigBytes;
|
||||
} else {
|
||||
this.sigBytes = words.length * 4;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Converts this word array to a string.
|
||||
*
|
||||
* @param {Encoder} encoder (Optional) The encoding strategy to use. Default: CryptoJS.enc.Hex
|
||||
*
|
||||
* @return {string} The stringified word array.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var string = wordArray + '';
|
||||
* var string = wordArray.toString();
|
||||
* var string = wordArray.toString(CryptoJS.enc.Utf8);
|
||||
*/
|
||||
toString: function (encoder) {
|
||||
return (encoder || Hex).stringify(this);
|
||||
},
|
||||
|
||||
/**
|
||||
* Concatenates a word array to this word array.
|
||||
*
|
||||
* @param {WordArray} wordArray The word array to append.
|
||||
*
|
||||
* @return {WordArray} This word array.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* wordArray1.concat(wordArray2);
|
||||
*/
|
||||
concat: function (wordArray) {
|
||||
// Shortcuts
|
||||
var thisWords = this.words;
|
||||
var thatWords = wordArray.words;
|
||||
var thisSigBytes = this.sigBytes;
|
||||
var thatSigBytes = wordArray.sigBytes;
|
||||
|
||||
// Clamp excess bits
|
||||
this.clamp();
|
||||
|
||||
// Concat
|
||||
if (thisSigBytes % 4) {
|
||||
// Copy one byte at a time
|
||||
for (var i = 0; i < thatSigBytes; i++) {
|
||||
var thatByte = (thatWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;
|
||||
thisWords[(thisSigBytes + i) >>> 2] |= thatByte << (24 - ((thisSigBytes + i) % 4) * 8);
|
||||
}
|
||||
} else if (thatWords.length > 0xffff) {
|
||||
// Copy one word at a time
|
||||
for (var i = 0; i < thatSigBytes; i += 4) {
|
||||
thisWords[(thisSigBytes + i) >>> 2] = thatWords[i >>> 2];
|
||||
}
|
||||
} else {
|
||||
// Copy all words at once
|
||||
thisWords.push.apply(thisWords, thatWords);
|
||||
}
|
||||
this.sigBytes += thatSigBytes;
|
||||
|
||||
// Chainable
|
||||
return this;
|
||||
},
|
||||
|
||||
/**
|
||||
* Removes insignificant bits.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* wordArray.clamp();
|
||||
*/
|
||||
clamp: function () {
|
||||
// Shortcuts
|
||||
var words = this.words;
|
||||
var sigBytes = this.sigBytes;
|
||||
|
||||
// Clamp
|
||||
words[sigBytes >>> 2] &= 0xffffffff << (32 - (sigBytes % 4) * 8);
|
||||
words.length = Math.ceil(sigBytes / 4);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a copy of this word array.
|
||||
*
|
||||
* @return {WordArray} The clone.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var clone = wordArray.clone();
|
||||
*/
|
||||
clone: function () {
|
||||
var clone = Base.clone.call(this);
|
||||
clone.words = this.words.slice(0);
|
||||
|
||||
return clone;
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a word array filled with random bytes.
|
||||
*
|
||||
* @param {number} nBytes The number of random bytes to generate.
|
||||
*
|
||||
* @return {WordArray} The random word array.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var wordArray = CryptoJS.lib.WordArray.random(16);
|
||||
*/
|
||||
random: function (nBytes) {
|
||||
var words = [];
|
||||
for (var i = 0; i < nBytes; i += 4) {
|
||||
words.push((Math.random() * 0x100000000) | 0);
|
||||
}
|
||||
|
||||
return new WordArray.init(words, nBytes);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Encoder namespace.
|
||||
*/
|
||||
var C_enc = C.enc = {};
|
||||
|
||||
/**
|
||||
* Hex encoding strategy.
|
||||
*/
|
||||
var Hex = C_enc.Hex = {
|
||||
/**
|
||||
* Converts a word array to a hex string.
|
||||
*
|
||||
* @param {WordArray} wordArray The word array.
|
||||
*
|
||||
* @return {string} The hex string.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var hexString = CryptoJS.enc.Hex.stringify(wordArray);
|
||||
*/
|
||||
stringify: function (wordArray) {
|
||||
// Shortcuts
|
||||
var words = wordArray.words;
|
||||
var sigBytes = wordArray.sigBytes;
|
||||
|
||||
// Convert
|
||||
var hexChars = [];
|
||||
for (var i = 0; i < sigBytes; i++) {
|
||||
var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;
|
||||
hexChars.push((bite >>> 4).toString(16));
|
||||
hexChars.push((bite & 0x0f).toString(16));
|
||||
}
|
||||
|
||||
return hexChars.join('');
|
||||
},
|
||||
|
||||
/**
|
||||
* Converts a hex string to a word array.
|
||||
*
|
||||
* @param {string} hexStr The hex string.
|
||||
*
|
||||
* @return {WordArray} The word array.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var wordArray = CryptoJS.enc.Hex.parse(hexString);
|
||||
*/
|
||||
parse: function (hexStr) {
|
||||
// Shortcut
|
||||
var hexStrLength = hexStr.length;
|
||||
|
||||
// Convert
|
||||
var words = [];
|
||||
for (var i = 0; i < hexStrLength; i += 2) {
|
||||
words[i >>> 3] |= parseInt(hexStr.substr(i, 2), 16) << (24 - (i % 8) * 4);
|
||||
}
|
||||
|
||||
return new WordArray.init(words, hexStrLength / 2);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Latin1 encoding strategy.
|
||||
*/
|
||||
var Latin1 = C_enc.Latin1 = {
|
||||
/**
|
||||
* Converts a word array to a Latin1 string.
|
||||
*
|
||||
* @param {WordArray} wordArray The word array.
|
||||
*
|
||||
* @return {string} The Latin1 string.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var latin1String = CryptoJS.enc.Latin1.stringify(wordArray);
|
||||
*/
|
||||
stringify: function (wordArray) {
|
||||
// Shortcuts
|
||||
var words = wordArray.words;
|
||||
var sigBytes = wordArray.sigBytes;
|
||||
|
||||
// Convert
|
||||
var latin1Chars = [];
|
||||
for (var i = 0; i < sigBytes; i++) {
|
||||
var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;
|
||||
latin1Chars.push(String.fromCharCode(bite));
|
||||
}
|
||||
|
||||
return latin1Chars.join('');
|
||||
},
|
||||
|
||||
/**
|
||||
* Converts a Latin1 string to a word array.
|
||||
*
|
||||
* @param {string} latin1Str The Latin1 string.
|
||||
*
|
||||
* @return {WordArray} The word array.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var wordArray = CryptoJS.enc.Latin1.parse(latin1String);
|
||||
*/
|
||||
parse: function (latin1Str) {
|
||||
// Shortcut
|
||||
var latin1StrLength = latin1Str.length;
|
||||
|
||||
// Convert
|
||||
var words = [];
|
||||
for (var i = 0; i < latin1StrLength; i++) {
|
||||
words[i >>> 2] |= (latin1Str.charCodeAt(i) & 0xff) << (24 - (i % 4) * 8);
|
||||
}
|
||||
|
||||
return new WordArray.init(words, latin1StrLength);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* UTF-8 encoding strategy.
|
||||
*/
|
||||
var Utf8 = C_enc.Utf8 = {
|
||||
/**
|
||||
* Converts a word array to a UTF-8 string.
|
||||
*
|
||||
* @param {WordArray} wordArray The word array.
|
||||
*
|
||||
* @return {string} The UTF-8 string.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var utf8String = CryptoJS.enc.Utf8.stringify(wordArray);
|
||||
*/
|
||||
stringify: function (wordArray) {
|
||||
try {
|
||||
return decodeURIComponent(escape(Latin1.stringify(wordArray)));
|
||||
} catch (e) {
|
||||
throw new Error('Malformed UTF-8 data');
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Converts a UTF-8 string to a word array.
|
||||
*
|
||||
* @param {string} utf8Str The UTF-8 string.
|
||||
*
|
||||
* @return {WordArray} The word array.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var wordArray = CryptoJS.enc.Utf8.parse(utf8String);
|
||||
*/
|
||||
parse: function (utf8Str) {
|
||||
return Latin1.parse(unescape(encodeURIComponent(utf8Str)));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract buffered block algorithm template.
|
||||
*
|
||||
* The property blockSize must be implemented in a concrete subtype.
|
||||
*
|
||||
* @property {number} _minBufferSize The number of blocks that should be kept unprocessed in the buffer. Default: 0
|
||||
*/
|
||||
var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm = Base.extend({
|
||||
/**
|
||||
* Resets this block algorithm's data buffer to its initial state.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* bufferedBlockAlgorithm.reset();
|
||||
*/
|
||||
reset: function () {
|
||||
// Initial values
|
||||
this._data = new WordArray.init();
|
||||
this._nDataBytes = 0;
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds new data to this block algorithm's buffer.
|
||||
*
|
||||
* @param {WordArray|string} data The data to append. Strings are converted to a WordArray using UTF-8.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* bufferedBlockAlgorithm._append('data');
|
||||
* bufferedBlockAlgorithm._append(wordArray);
|
||||
*/
|
||||
_append: function (data) {
|
||||
// Convert string to WordArray, else assume WordArray already
|
||||
if (typeof data == 'string') {
|
||||
data = Utf8.parse(data);
|
||||
}
|
||||
|
||||
// Append
|
||||
this._data.concat(data);
|
||||
this._nDataBytes += data.sigBytes;
|
||||
},
|
||||
|
||||
/**
|
||||
* Processes available data blocks.
|
||||
*
|
||||
* This method invokes _doProcessBlock(offset), which must be implemented by a concrete subtype.
|
||||
*
|
||||
* @param {boolean} doFlush Whether all blocks and partial blocks should be processed.
|
||||
*
|
||||
* @return {WordArray} The processed data.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var processedData = bufferedBlockAlgorithm._process();
|
||||
* var processedData = bufferedBlockAlgorithm._process(!!'flush');
|
||||
*/
|
||||
_process: function (doFlush) {
|
||||
// Shortcuts
|
||||
var data = this._data;
|
||||
var dataWords = data.words;
|
||||
var dataSigBytes = data.sigBytes;
|
||||
var blockSize = this.blockSize;
|
||||
var blockSizeBytes = blockSize * 4;
|
||||
|
||||
// Count blocks ready
|
||||
var nBlocksReady = dataSigBytes / blockSizeBytes;
|
||||
if (doFlush) {
|
||||
// Round up to include partial blocks
|
||||
nBlocksReady = Math.ceil(nBlocksReady);
|
||||
} else {
|
||||
// Round down to include only full blocks,
|
||||
// less the number of blocks that must remain in the buffer
|
||||
nBlocksReady = Math.max((nBlocksReady | 0) - this._minBufferSize, 0);
|
||||
}
|
||||
|
||||
// Count words ready
|
||||
var nWordsReady = nBlocksReady * blockSize;
|
||||
|
||||
// Count bytes ready
|
||||
var nBytesReady = Math.min(nWordsReady * 4, dataSigBytes);
|
||||
|
||||
// Process blocks
|
||||
if (nWordsReady) {
|
||||
for (var offset = 0; offset < nWordsReady; offset += blockSize) {
|
||||
// Perform concrete-algorithm logic
|
||||
this._doProcessBlock(dataWords, offset);
|
||||
}
|
||||
|
||||
// Remove processed words
|
||||
var processedWords = dataWords.splice(0, nWordsReady);
|
||||
data.sigBytes -= nBytesReady;
|
||||
}
|
||||
|
||||
// Return processed words
|
||||
return new WordArray.init(processedWords, nBytesReady);
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a copy of this object.
|
||||
*
|
||||
* @return {Object} The clone.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var clone = bufferedBlockAlgorithm.clone();
|
||||
*/
|
||||
clone: function () {
|
||||
var clone = Base.clone.call(this);
|
||||
clone._data = this._data.clone();
|
||||
|
||||
return clone;
|
||||
},
|
||||
|
||||
_minBufferSize: 0
|
||||
});
|
||||
|
||||
/**
|
||||
* Abstract hasher template.
|
||||
*
|
||||
* @property {number} blockSize The number of 32-bit words this hasher operates on. Default: 16 (512 bits)
|
||||
*/
|
||||
var Hasher = C_lib.Hasher = BufferedBlockAlgorithm.extend({
|
||||
/**
|
||||
* Configuration options.
|
||||
*/
|
||||
cfg: Base.extend(),
|
||||
|
||||
/**
|
||||
* Initializes a newly created hasher.
|
||||
*
|
||||
* @param {Object} cfg (Optional) The configuration options to use for this hash computation.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var hasher = CryptoJS.algo.SHA256.create();
|
||||
*/
|
||||
init: function (cfg) {
|
||||
// Apply config defaults
|
||||
this.cfg = this.cfg.extend(cfg);
|
||||
|
||||
// Set initial values
|
||||
this.reset();
|
||||
},
|
||||
|
||||
/**
|
||||
* Resets this hasher to its initial state.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* hasher.reset();
|
||||
*/
|
||||
reset: function () {
|
||||
// Reset data buffer
|
||||
BufferedBlockAlgorithm.reset.call(this);
|
||||
|
||||
// Perform concrete-hasher logic
|
||||
this._doReset();
|
||||
},
|
||||
|
||||
/**
|
||||
* Updates this hasher with a message.
|
||||
*
|
||||
* @param {WordArray|string} messageUpdate The message to append.
|
||||
*
|
||||
* @return {Hasher} This hasher.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* hasher.update('message');
|
||||
* hasher.update(wordArray);
|
||||
*/
|
||||
update: function (messageUpdate) {
|
||||
// Append
|
||||
this._append(messageUpdate);
|
||||
|
||||
// Update the hash
|
||||
this._process();
|
||||
|
||||
// Chainable
|
||||
return this;
|
||||
},
|
||||
|
||||
/**
|
||||
* Finalizes the hash computation.
|
||||
* Note that the finalize operation is effectively a destructive, read-once operation.
|
||||
*
|
||||
* @param {WordArray|string} messageUpdate (Optional) A final message update.
|
||||
*
|
||||
* @return {WordArray} The hash.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var hash = hasher.finalize();
|
||||
* var hash = hasher.finalize('message');
|
||||
* var hash = hasher.finalize(wordArray);
|
||||
*/
|
||||
finalize: function (messageUpdate) {
|
||||
// Final message update
|
||||
if (messageUpdate) {
|
||||
this._append(messageUpdate);
|
||||
}
|
||||
|
||||
// Perform concrete-hasher logic
|
||||
var hash = this._doFinalize();
|
||||
|
||||
return hash;
|
||||
},
|
||||
|
||||
blockSize: 512/32,
|
||||
|
||||
/**
|
||||
* Creates a shortcut function to a hasher's object interface.
|
||||
*
|
||||
* @param {Hasher} hasher The hasher to create a helper for.
|
||||
*
|
||||
* @return {Function} The shortcut function.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var SHA256 = CryptoJS.lib.Hasher._createHelper(CryptoJS.algo.SHA256);
|
||||
*/
|
||||
_createHelper: function (hasher) {
|
||||
return function (message, cfg) {
|
||||
return new hasher.init(cfg).finalize(message);
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a shortcut function to the HMAC's object interface.
|
||||
*
|
||||
* @param {Hasher} hasher The hasher to use in this HMAC helper.
|
||||
*
|
||||
* @return {Function} The shortcut function.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var HmacSHA256 = CryptoJS.lib.Hasher._createHmacHelper(CryptoJS.algo.SHA256);
|
||||
*/
|
||||
_createHmacHelper: function (hasher) {
|
||||
return function (message, key) {
|
||||
return new C_algo.HMAC.init(hasher, key).finalize(message);
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Algorithm namespace.
|
||||
*/
|
||||
var C_algo = C.algo = {};
|
||||
|
||||
return C;
|
||||
}(Math));
|
@ -1,109 +0,0 @@
|
||||
/*
|
||||
CryptoJS v3.1.2
|
||||
code.google.com/p/crypto-js
|
||||
(c) 2009-2013 by Jeff Mott. All rights reserved.
|
||||
code.google.com/p/crypto-js/wiki/License
|
||||
*/
|
||||
(function () {
|
||||
// Shortcuts
|
||||
var C = CryptoJS;
|
||||
var C_lib = C.lib;
|
||||
var WordArray = C_lib.WordArray;
|
||||
var C_enc = C.enc;
|
||||
|
||||
/**
|
||||
* Base64 encoding strategy.
|
||||
*/
|
||||
var Base64 = C_enc.Base64 = {
|
||||
/**
|
||||
* Converts a word array to a Base64 string.
|
||||
*
|
||||
* @param {WordArray} wordArray The word array.
|
||||
*
|
||||
* @return {string} The Base64 string.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var base64String = CryptoJS.enc.Base64.stringify(wordArray);
|
||||
*/
|
||||
stringify: function (wordArray) {
|
||||
// Shortcuts
|
||||
var words = wordArray.words;
|
||||
var sigBytes = wordArray.sigBytes;
|
||||
var map = this._map;
|
||||
|
||||
// Clamp excess bits
|
||||
wordArray.clamp();
|
||||
|
||||
// Convert
|
||||
var base64Chars = [];
|
||||
for (var i = 0; i < sigBytes; i += 3) {
|
||||
var byte1 = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;
|
||||
var byte2 = (words[(i + 1) >>> 2] >>> (24 - ((i + 1) % 4) * 8)) & 0xff;
|
||||
var byte3 = (words[(i + 2) >>> 2] >>> (24 - ((i + 2) % 4) * 8)) & 0xff;
|
||||
|
||||
var triplet = (byte1 << 16) | (byte2 << 8) | byte3;
|
||||
|
||||
for (var j = 0; (j < 4) && (i + j * 0.75 < sigBytes); j++) {
|
||||
base64Chars.push(map.charAt((triplet >>> (6 * (3 - j))) & 0x3f));
|
||||
}
|
||||
}
|
||||
|
||||
// Add padding
|
||||
var paddingChar = map.charAt(64);
|
||||
if (paddingChar) {
|
||||
while (base64Chars.length % 4) {
|
||||
base64Chars.push(paddingChar);
|
||||
}
|
||||
}
|
||||
|
||||
return base64Chars.join('');
|
||||
},
|
||||
|
||||
/**
|
||||
* Converts a Base64 string to a word array.
|
||||
*
|
||||
* @param {string} base64Str The Base64 string.
|
||||
*
|
||||
* @return {WordArray} The word array.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var wordArray = CryptoJS.enc.Base64.parse(base64String);
|
||||
*/
|
||||
parse: function (base64Str) {
|
||||
// Shortcuts
|
||||
var base64StrLength = base64Str.length;
|
||||
var map = this._map;
|
||||
|
||||
// Ignore padding
|
||||
var paddingChar = map.charAt(64);
|
||||
if (paddingChar) {
|
||||
var paddingIndex = base64Str.indexOf(paddingChar);
|
||||
if (paddingIndex != -1) {
|
||||
base64StrLength = paddingIndex;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert
|
||||
var words = [];
|
||||
var nBytes = 0;
|
||||
for (var i = 0; i < base64StrLength; i++) {
|
||||
if (i % 4) {
|
||||
var bits1 = map.indexOf(base64Str.charAt(i - 1)) << ((i % 4) * 2);
|
||||
var bits2 = map.indexOf(base64Str.charAt(i)) >>> (6 - (i % 4) * 2);
|
||||
words[nBytes >>> 2] |= (bits1 | bits2) << (24 - (nBytes % 4) * 8);
|
||||
nBytes++;
|
||||
}
|
||||
}
|
||||
|
||||
return WordArray.create(words, nBytes);
|
||||
},
|
||||
|
||||
_map: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='
|
||||
};
|
||||
}());
|
@ -1,131 +0,0 @@
|
||||
/*
|
||||
CryptoJS v3.1.2
|
||||
code.google.com/p/crypto-js
|
||||
(c) 2009-2013 by Jeff Mott. All rights reserved.
|
||||
code.google.com/p/crypto-js/wiki/License
|
||||
*/
|
||||
(function () {
|
||||
// Shortcuts
|
||||
var C = CryptoJS;
|
||||
var C_lib = C.lib;
|
||||
var Base = C_lib.Base;
|
||||
var C_enc = C.enc;
|
||||
var Utf8 = C_enc.Utf8;
|
||||
var C_algo = C.algo;
|
||||
|
||||
/**
|
||||
* HMAC algorithm.
|
||||
*/
|
||||
var HMAC = C_algo.HMAC = Base.extend({
|
||||
/**
|
||||
* Initializes a newly created HMAC.
|
||||
*
|
||||
* @param {Hasher} hasher The hash algorithm to use.
|
||||
* @param {WordArray|string} key The secret key.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var hmacHasher = CryptoJS.algo.HMAC.create(CryptoJS.algo.SHA256, key);
|
||||
*/
|
||||
init: function (hasher, key) {
|
||||
// Init hasher
|
||||
hasher = this._hasher = new hasher.init();
|
||||
|
||||
// Convert string to WordArray, else assume WordArray already
|
||||
if (typeof key == 'string') {
|
||||
key = Utf8.parse(key);
|
||||
}
|
||||
|
||||
// Shortcuts
|
||||
var hasherBlockSize = hasher.blockSize;
|
||||
var hasherBlockSizeBytes = hasherBlockSize * 4;
|
||||
|
||||
// Allow arbitrary length keys
|
||||
if (key.sigBytes > hasherBlockSizeBytes) {
|
||||
key = hasher.finalize(key);
|
||||
}
|
||||
|
||||
// Clamp excess bits
|
||||
key.clamp();
|
||||
|
||||
// Clone key for inner and outer pads
|
||||
var oKey = this._oKey = key.clone();
|
||||
var iKey = this._iKey = key.clone();
|
||||
|
||||
// Shortcuts
|
||||
var oKeyWords = oKey.words;
|
||||
var iKeyWords = iKey.words;
|
||||
|
||||
// XOR keys with pad constants
|
||||
for (var i = 0; i < hasherBlockSize; i++) {
|
||||
oKeyWords[i] ^= 0x5c5c5c5c;
|
||||
iKeyWords[i] ^= 0x36363636;
|
||||
}
|
||||
oKey.sigBytes = iKey.sigBytes = hasherBlockSizeBytes;
|
||||
|
||||
// Set initial values
|
||||
this.reset();
|
||||
},
|
||||
|
||||
/**
|
||||
* Resets this HMAC to its initial state.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* hmacHasher.reset();
|
||||
*/
|
||||
reset: function () {
|
||||
// Shortcut
|
||||
var hasher = this._hasher;
|
||||
|
||||
// Reset
|
||||
hasher.reset();
|
||||
hasher.update(this._iKey);
|
||||
},
|
||||
|
||||
/**
|
||||
* Updates this HMAC with a message.
|
||||
*
|
||||
* @param {WordArray|string} messageUpdate The message to append.
|
||||
*
|
||||
* @return {HMAC} This HMAC instance.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* hmacHasher.update('message');
|
||||
* hmacHasher.update(wordArray);
|
||||
*/
|
||||
update: function (messageUpdate) {
|
||||
this._hasher.update(messageUpdate);
|
||||
|
||||
// Chainable
|
||||
return this;
|
||||
},
|
||||
|
||||
/**
|
||||
* Finalizes the HMAC computation.
|
||||
* Note that the finalize operation is effectively a destructive, read-once operation.
|
||||
*
|
||||
* @param {WordArray|string} messageUpdate (Optional) A final message update.
|
||||
*
|
||||
* @return {WordArray} The HMAC.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var hmac = hmacHasher.finalize();
|
||||
* var hmac = hmacHasher.finalize('message');
|
||||
* var hmac = hmacHasher.finalize(wordArray);
|
||||
*/
|
||||
finalize: function (messageUpdate) {
|
||||
// Shortcut
|
||||
var hasher = this._hasher;
|
||||
|
||||
// Compute HMAC
|
||||
var innerHash = hasher.finalize(messageUpdate);
|
||||
hasher.reset();
|
||||
var hmac = hasher.finalize(this._oKey.clone().concat(innerHash));
|
||||
|
||||
return hmac;
|
||||
}
|
||||
});
|
||||
}());
|
@ -1,136 +0,0 @@
|
||||
/*
|
||||
CryptoJS v3.1.2
|
||||
code.google.com/p/crypto-js
|
||||
(c) 2009-2013 by Jeff Mott. All rights reserved.
|
||||
code.google.com/p/crypto-js/wiki/License
|
||||
*/
|
||||
(function () {
|
||||
// Shortcuts
|
||||
var C = CryptoJS;
|
||||
var C_lib = C.lib;
|
||||
var WordArray = C_lib.WordArray;
|
||||
var Hasher = C_lib.Hasher;
|
||||
var C_algo = C.algo;
|
||||
|
||||
// Reusable object
|
||||
var W = [];
|
||||
|
||||
/**
|
||||
* SHA-1 hash algorithm.
|
||||
*/
|
||||
var SHA1 = C_algo.SHA1 = Hasher.extend({
|
||||
_doReset: function () {
|
||||
this._hash = new WordArray.init([
|
||||
0x67452301, 0xefcdab89,
|
||||
0x98badcfe, 0x10325476,
|
||||
0xc3d2e1f0
|
||||
]);
|
||||
},
|
||||
|
||||
_doProcessBlock: function (M, offset) {
|
||||
// Shortcut
|
||||
var H = this._hash.words;
|
||||
|
||||
// Working variables
|
||||
var a = H[0];
|
||||
var b = H[1];
|
||||
var c = H[2];
|
||||
var d = H[3];
|
||||
var e = H[4];
|
||||
|
||||
// Computation
|
||||
for (var i = 0; i < 80; i++) {
|
||||
if (i < 16) {
|
||||
W[i] = M[offset + i] | 0;
|
||||
} else {
|
||||
var n = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
|
||||
W[i] = (n << 1) | (n >>> 31);
|
||||
}
|
||||
|
||||
var t = ((a << 5) | (a >>> 27)) + e + W[i];
|
||||
if (i < 20) {
|
||||
t += ((b & c) | (~b & d)) + 0x5a827999;
|
||||
} else if (i < 40) {
|
||||
t += (b ^ c ^ d) + 0x6ed9eba1;
|
||||
} else if (i < 60) {
|
||||
t += ((b & c) | (b & d) | (c & d)) - 0x70e44324;
|
||||
} else /* if (i < 80) */ {
|
||||
t += (b ^ c ^ d) - 0x359d3e2a;
|
||||
}
|
||||
|
||||
e = d;
|
||||
d = c;
|
||||
c = (b << 30) | (b >>> 2);
|
||||
b = a;
|
||||
a = t;
|
||||
}
|
||||
|
||||
// Intermediate hash value
|
||||
H[0] = (H[0] + a) | 0;
|
||||
H[1] = (H[1] + b) | 0;
|
||||
H[2] = (H[2] + c) | 0;
|
||||
H[3] = (H[3] + d) | 0;
|
||||
H[4] = (H[4] + e) | 0;
|
||||
},
|
||||
|
||||
_doFinalize: function () {
|
||||
// Shortcuts
|
||||
var data = this._data;
|
||||
var dataWords = data.words;
|
||||
|
||||
var nBitsTotal = this._nDataBytes * 8;
|
||||
var nBitsLeft = data.sigBytes * 8;
|
||||
|
||||
// Add padding
|
||||
dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32);
|
||||
dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000);
|
||||
dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal;
|
||||
data.sigBytes = dataWords.length * 4;
|
||||
|
||||
// Hash final blocks
|
||||
this._process();
|
||||
|
||||
// Return final computed hash
|
||||
return this._hash;
|
||||
},
|
||||
|
||||
clone: function () {
|
||||
var clone = Hasher.clone.call(this);
|
||||
clone._hash = this._hash.clone();
|
||||
|
||||
return clone;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Shortcut function to the hasher's object interface.
|
||||
*
|
||||
* @param {WordArray|string} message The message to hash.
|
||||
*
|
||||
* @return {WordArray} The hash.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var hash = CryptoJS.SHA1('message');
|
||||
* var hash = CryptoJS.SHA1(wordArray);
|
||||
*/
|
||||
C.SHA1 = Hasher._createHelper(SHA1);
|
||||
|
||||
/**
|
||||
* Shortcut function to the HMAC's object interface.
|
||||
*
|
||||
* @param {WordArray|string} message The message to hash.
|
||||
* @param {WordArray|string} key The secret key.
|
||||
*
|
||||
* @return {WordArray} The HMAC.
|
||||
*
|
||||
* @static
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var hmac = CryptoJS.HmacSHA1(message, key);
|
||||
*/
|
||||
C.HmacSHA1 = Hasher._createHmacHelper(SHA1);
|
||||
}());
|
@ -1,126 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Keeps a running tally of total upload progress for a batch of files.
|
||||
*
|
||||
* @param callback Invoked when total progress changes, passing calculated total loaded & total size values.
|
||||
* @param getSize Function that returns the size of a file given its ID
|
||||
* @constructor
|
||||
*/
|
||||
qq.TotalProgress = function(callback, getSize) {
|
||||
"use strict";
|
||||
|
||||
var perFileProgress = {},
|
||||
totalLoaded = 0,
|
||||
totalSize = 0,
|
||||
|
||||
lastLoadedSent = -1,
|
||||
lastTotalSent = -1,
|
||||
callbackProxy = function(loaded, total) {
|
||||
if (loaded !== lastLoadedSent || total !== lastTotalSent) {
|
||||
callback(loaded, total);
|
||||
}
|
||||
|
||||
lastLoadedSent = loaded;
|
||||
lastTotalSent = total;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param failed Array of file IDs that have failed
|
||||
* @param retryable Array of file IDs that are retryable
|
||||
* @returns true if none of the failed files are eligible for retry
|
||||
*/
|
||||
noRetryableFiles = function(failed, retryable) {
|
||||
var none = true;
|
||||
|
||||
qq.each(failed, function(idx, failedId) {
|
||||
if (qq.indexOf(retryable, failedId) >= 0) {
|
||||
none = false;
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
return none;
|
||||
},
|
||||
|
||||
onCancel = function(id) {
|
||||
updateTotalProgress(id, -1, -1);
|
||||
delete perFileProgress[id];
|
||||
},
|
||||
|
||||
onAllComplete = function(successful, failed, retryable) {
|
||||
if (failed.length === 0 || noRetryableFiles(failed, retryable)) {
|
||||
callbackProxy(totalSize, totalSize);
|
||||
this.reset();
|
||||
}
|
||||
},
|
||||
|
||||
onNew = function(id) {
|
||||
var size = getSize(id);
|
||||
|
||||
// We might not know the size yet, such as for blob proxies
|
||||
if (size > 0) {
|
||||
updateTotalProgress(id, 0, size);
|
||||
perFileProgress[id] = {loaded: 0, total: size};
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Invokes the callback with the current total progress of all files in the batch. Called whenever it may
|
||||
* be appropriate to re-calculate and dissemenate this data.
|
||||
*
|
||||
* @param id ID of a file that has changed in some important way
|
||||
* @param newLoaded New loaded value for this file. -1 if this value should no longer be part of calculations
|
||||
* @param newTotal New total size of the file. -1 if this value should no longer be part of calculations
|
||||
*/
|
||||
updateTotalProgress = function(id, newLoaded, newTotal) {
|
||||
var oldLoaded = perFileProgress[id] ? perFileProgress[id].loaded : 0,
|
||||
oldTotal = perFileProgress[id] ? perFileProgress[id].total : 0;
|
||||
|
||||
if (newLoaded === -1 && newTotal === -1) {
|
||||
totalLoaded -= oldLoaded;
|
||||
totalSize -= oldTotal;
|
||||
}
|
||||
else {
|
||||
if (newLoaded) {
|
||||
totalLoaded += newLoaded - oldLoaded;
|
||||
}
|
||||
if (newTotal) {
|
||||
totalSize += newTotal - oldTotal;
|
||||
}
|
||||
}
|
||||
|
||||
callbackProxy(totalLoaded, totalSize);
|
||||
};
|
||||
|
||||
qq.extend(this, {
|
||||
// Called when a batch of files has completed uploading.
|
||||
onAllComplete: onAllComplete,
|
||||
|
||||
// Called when the status of a file has changed.
|
||||
onStatusChange: function(id, oldStatus, newStatus) {
|
||||
if (newStatus === qq.status.CANCELED || newStatus === qq.status.REJECTED) {
|
||||
onCancel(id);
|
||||
}
|
||||
else if (newStatus === qq.status.SUBMITTING) {
|
||||
onNew(id);
|
||||
}
|
||||
},
|
||||
|
||||
// Called whenever the upload progress of an individual file has changed.
|
||||
onIndividualProgress: function(id, loaded, total) {
|
||||
updateTotalProgress(id, loaded, total);
|
||||
perFileProgress[id] = {loaded: loaded, total: total};
|
||||
},
|
||||
|
||||
// Called whenever the total size of a file has changed, such as when the size of a generated blob is known.
|
||||
onNewSize: function(id) {
|
||||
onNew(id);
|
||||
},
|
||||
|
||||
reset: function() {
|
||||
perFileProgress = {};
|
||||
totalLoaded = 0;
|
||||
totalSize = 0;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,76 +0,0 @@
|
||||
/*globals qq*/
|
||||
/**
|
||||
* Ajax requester used to send a POST to a traditional endpoint once all chunks for a specific file have uploaded
|
||||
* successfully.
|
||||
*
|
||||
* @param o Options from the caller - will override the defaults.
|
||||
* @constructor
|
||||
*/
|
||||
qq.traditional.AllChunksDoneAjaxRequester = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
method = "POST",
|
||||
options = {
|
||||
cors: {
|
||||
allowXdr: false,
|
||||
expected: false,
|
||||
sendCredentials: false
|
||||
},
|
||||
endpoint: null,
|
||||
log: function(str, level) {}
|
||||
},
|
||||
promises = {},
|
||||
endpointHandler = {
|
||||
get: function(id) {
|
||||
return options.endpoint;
|
||||
}
|
||||
};
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
acceptHeader: "application/json",
|
||||
validMethods: [method],
|
||||
method: method,
|
||||
successfulResponseCodes: (function() {
|
||||
var codes = {};
|
||||
codes[method] = [200, 201, 202];
|
||||
return codes;
|
||||
}()),
|
||||
endpointStore: endpointHandler,
|
||||
allowXRequestedWithAndCacheControl: false,
|
||||
cors: options.cors,
|
||||
log: options.log,
|
||||
onComplete: function(id, xhr, isError) {
|
||||
var promise = promises[id];
|
||||
|
||||
delete promises[id];
|
||||
|
||||
if (isError) {
|
||||
promise.failure(xhr);
|
||||
}
|
||||
else {
|
||||
promise.success(xhr);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
complete: function(id, xhr, params, headers) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
options.log("Submitting All Chunks Done request for " + id);
|
||||
|
||||
promises[id] = promise;
|
||||
|
||||
requester.initTransport(id)
|
||||
.withParams(params)
|
||||
.withHeaders(headers)
|
||||
.send(xhr);
|
||||
|
||||
return promise;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,138 +0,0 @@
|
||||
/*globals qq*/
|
||||
/**
|
||||
* Upload handler used that assumes the current user agent does not have any support for the
|
||||
* File API, and, therefore, makes use of iframes and forms to submit the files directly to
|
||||
* a generic server.
|
||||
*
|
||||
* @param options Options passed from the base handler
|
||||
* @param proxy Callbacks & methods used to query for or push out data/changes
|
||||
*/
|
||||
qq.traditional = qq.traditional || {};
|
||||
qq.traditional.FormUploadHandler = function(options, proxy) {
|
||||
"use strict";
|
||||
|
||||
var handler = this,
|
||||
getName = proxy.getName,
|
||||
getUuid = proxy.getUuid,
|
||||
log = proxy.log;
|
||||
|
||||
/**
|
||||
* @param innerHtmlOrMessage JSON message
|
||||
* @returns {*} The parsed response, or an empty object if the response could not be parsed
|
||||
*/
|
||||
function parseJsonResponse(innerHtmlOrMessage) {
|
||||
var response = {};
|
||||
|
||||
try {
|
||||
response = qq.parseJson(innerHtmlOrMessage);
|
||||
}
|
||||
catch(error) {
|
||||
log("Error when attempting to parse iframe upload response (" + error.message + ")", "error");
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns json object received by iframe from server.
|
||||
*/
|
||||
function getIframeContentJson(id, iframe) {
|
||||
/*jshint evil: true*/
|
||||
|
||||
var response;
|
||||
|
||||
//IE may throw an "access is denied" error when attempting to access contentDocument on the iframe in some cases
|
||||
try {
|
||||
// iframe.contentWindow.document - for IE<7
|
||||
var doc = iframe.contentDocument || iframe.contentWindow.document,
|
||||
innerHtml = doc.body.innerHTML;
|
||||
|
||||
log("converting iframe's innerHTML to JSON");
|
||||
log("innerHTML = " + innerHtml);
|
||||
//plain text response may be wrapped in <pre> tag
|
||||
if (innerHtml && innerHtml.match(/^<pre/i)) {
|
||||
innerHtml = doc.body.firstChild.firstChild.nodeValue;
|
||||
}
|
||||
|
||||
response = parseJsonResponse(innerHtml);
|
||||
}
|
||||
catch(error) {
|
||||
log("Error when attempting to parse form upload response (" + error.message + ")", "error");
|
||||
response = {success: false};
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates form, that will be submitted to iframe
|
||||
*/
|
||||
function createForm(id, iframe){
|
||||
var params = options.paramsStore.get(id),
|
||||
method = options.demoMode ? "GET" : "POST",
|
||||
endpoint = options.endpointStore.get(id),
|
||||
name = getName(id);
|
||||
|
||||
params[options.uuidName] = getUuid(id);
|
||||
params[options.filenameParam] = name;
|
||||
|
||||
return handler._initFormForUpload({
|
||||
method: method,
|
||||
endpoint: endpoint,
|
||||
params: params,
|
||||
paramsInBody: options.paramsInBody,
|
||||
targetName: iframe.name
|
||||
});
|
||||
}
|
||||
|
||||
this.uploadFile = function(id) {
|
||||
var input = handler.getInput(id),
|
||||
iframe = handler._createIframe(id),
|
||||
promise = new qq.Promise(),
|
||||
form;
|
||||
|
||||
form = createForm(id, iframe);
|
||||
form.appendChild(input);
|
||||
|
||||
handler._attachLoadEvent(iframe, function(responseFromMessage){
|
||||
log("iframe loaded");
|
||||
|
||||
var response = responseFromMessage ? responseFromMessage : getIframeContentJson(id, iframe);
|
||||
|
||||
handler._detachLoadEvent(id);
|
||||
|
||||
//we can't remove an iframe if the iframe doesn't belong to the same domain
|
||||
if (!options.cors.expected) {
|
||||
qq(iframe).remove();
|
||||
}
|
||||
|
||||
if (response.success) {
|
||||
promise.success(response);
|
||||
}
|
||||
else {
|
||||
promise.failure(response);
|
||||
}
|
||||
});
|
||||
|
||||
log("Sending upload request for " + id);
|
||||
form.submit();
|
||||
qq(form).remove();
|
||||
|
||||
return promise;
|
||||
};
|
||||
|
||||
qq.extend(this, new qq.FormUploadHandler({
|
||||
options: {
|
||||
isCors: options.cors.expected,
|
||||
inputName: options.inputName
|
||||
},
|
||||
|
||||
proxy: {
|
||||
onCancel: options.onCancel,
|
||||
getName: getName,
|
||||
getUuid: getUuid,
|
||||
log: log
|
||||
}
|
||||
}
|
||||
));
|
||||
};
|
@ -1,246 +0,0 @@
|
||||
/*globals qq*/
|
||||
/**
|
||||
* Upload handler used to upload to traditional endpoints. It depends on File API support, and, therefore,
|
||||
* makes use of `XMLHttpRequest` level 2 to upload `File`s and `Blob`s to a generic server.
|
||||
*
|
||||
* @param spec Options passed from the base handler
|
||||
* @param proxy Callbacks & methods used to query for or push out data/changes
|
||||
*/
|
||||
qq.traditional = qq.traditional || {};
|
||||
qq.traditional.XhrUploadHandler = function(spec, proxy) {
|
||||
"use strict";
|
||||
|
||||
var handler = this,
|
||||
getName = proxy.getName,
|
||||
getSize = proxy.getSize,
|
||||
getUuid = proxy.getUuid,
|
||||
log = proxy.log,
|
||||
multipart = spec.forceMultipart || spec.paramsInBody,
|
||||
|
||||
addChunkingSpecificParams = function(id, params, chunkData) {
|
||||
var size = getSize(id),
|
||||
name = getName(id);
|
||||
|
||||
params[spec.chunking.paramNames.partIndex] = chunkData.part;
|
||||
params[spec.chunking.paramNames.partByteOffset] = chunkData.start;
|
||||
params[spec.chunking.paramNames.chunkSize] = chunkData.size;
|
||||
params[spec.chunking.paramNames.totalParts] = chunkData.count;
|
||||
params[spec.totalFileSizeName] = size;
|
||||
|
||||
/**
|
||||
* When a Blob is sent in a multipart request, the filename value in the content-disposition header is either "blob"
|
||||
* or an empty string. So, we will need to include the actual file name as a param in this case.
|
||||
*/
|
||||
if (multipart) {
|
||||
params[spec.filenameParam] = name;
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
allChunksDoneRequester = new qq.traditional.AllChunksDoneAjaxRequester({
|
||||
cors: spec.cors,
|
||||
endpoint: spec.chunking.success.endpoint,
|
||||
log: log
|
||||
}),
|
||||
|
||||
createReadyStateChangedHandler = function(id, xhr) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
xhr.onreadystatechange = function() {
|
||||
if (xhr.readyState === 4) {
|
||||
var result = onUploadOrChunkComplete(id, xhr);
|
||||
|
||||
if (result.success) {
|
||||
promise.success(result.response, xhr);
|
||||
}
|
||||
else {
|
||||
promise.failure(result.response, xhr);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
getChunksCompleteParams = function(id) {
|
||||
var params = spec.paramsStore.get(id),
|
||||
name = getName(id),
|
||||
size = getSize(id);
|
||||
|
||||
params[spec.uuidName] = getUuid(id);
|
||||
params[spec.filenameParam] = name;
|
||||
params[spec.totalFileSizeName] = size;
|
||||
params[spec.chunking.paramNames.totalParts] = handler._getTotalChunks(id);
|
||||
|
||||
return params;
|
||||
},
|
||||
|
||||
isErrorUploadResponse = function(xhr, response) {
|
||||
return xhr.status !== 200 || !response.success || response.reset;
|
||||
},
|
||||
|
||||
onUploadOrChunkComplete = function(id, xhr) {
|
||||
var response;
|
||||
|
||||
log("xhr - server response received for " + id);
|
||||
log("responseText = " + xhr.responseText);
|
||||
|
||||
response = parseResponse(true, xhr);
|
||||
|
||||
return {
|
||||
success: !isErrorUploadResponse(xhr, response),
|
||||
response: response
|
||||
};
|
||||
},
|
||||
|
||||
// If this is an upload response, we require a JSON payload, otherwise, it is optional.
|
||||
parseResponse = function(upload, xhr) {
|
||||
var response = {};
|
||||
|
||||
try {
|
||||
log(qq.format("Received response status {} with body: {}", xhr.status, xhr.responseText));
|
||||
response = qq.parseJson(xhr.responseText);
|
||||
}
|
||||
catch(error) {
|
||||
upload && log("Error when attempting to parse xhr response text (" + error.message + ")", "error");
|
||||
}
|
||||
|
||||
return response;
|
||||
},
|
||||
|
||||
sendChunksCompleteRequest = function(id) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
allChunksDoneRequester.complete(
|
||||
id,
|
||||
handler._createXhr(id),
|
||||
getChunksCompleteParams(id),
|
||||
spec.customHeaders.get(id)
|
||||
)
|
||||
.then(function(xhr) {
|
||||
promise.success(parseResponse(false, xhr), xhr);
|
||||
}, function(xhr) {
|
||||
promise.failure(parseResponse(false, xhr), xhr);
|
||||
});
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
setParamsAndGetEntityToSend = function(params, xhr, fileOrBlob, id) {
|
||||
var formData = new FormData(),
|
||||
method = spec.demoMode ? "GET" : "POST",
|
||||
endpoint = spec.endpointStore.get(id),
|
||||
name = getName(id),
|
||||
size = getSize(id);
|
||||
|
||||
params[spec.uuidName] = getUuid(id);
|
||||
params[spec.filenameParam] = name;
|
||||
|
||||
if (multipart) {
|
||||
params[spec.totalFileSizeName] = size;
|
||||
}
|
||||
|
||||
//build query string
|
||||
if (!spec.paramsInBody) {
|
||||
if (!multipart) {
|
||||
params[spec.inputName] = name;
|
||||
}
|
||||
endpoint = qq.obj2url(params, endpoint);
|
||||
}
|
||||
|
||||
xhr.open(method, endpoint, true);
|
||||
|
||||
if (spec.cors.expected && spec.cors.sendCredentials) {
|
||||
xhr.withCredentials = true;
|
||||
}
|
||||
|
||||
if (multipart) {
|
||||
if (spec.paramsInBody) {
|
||||
qq.obj2FormData(params, formData);
|
||||
}
|
||||
|
||||
formData.append(spec.inputName, fileOrBlob);
|
||||
return formData;
|
||||
}
|
||||
|
||||
return fileOrBlob;
|
||||
},
|
||||
|
||||
setUploadHeaders = function(id, xhr) {
|
||||
var extraHeaders = spec.customHeaders.get(id),
|
||||
fileOrBlob = handler.getFile(id);
|
||||
|
||||
xhr.setRequestHeader("Accept", "application/json");
|
||||
xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest");
|
||||
xhr.setRequestHeader("Cache-Control", "no-cache");
|
||||
|
||||
if (!multipart) {
|
||||
xhr.setRequestHeader("Content-Type", "application/octet-stream");
|
||||
//NOTE: return mime type in xhr works on chrome 16.0.9 firefox 11.0a2
|
||||
xhr.setRequestHeader("X-Mime-Type", fileOrBlob.type);
|
||||
}
|
||||
|
||||
qq.each(extraHeaders, function(name, val) {
|
||||
xhr.setRequestHeader(name, val);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
uploadChunk: function(id, chunkIdx, resuming) {
|
||||
var chunkData = handler._getChunkData(id, chunkIdx),
|
||||
xhr = handler._createXhr(id, chunkIdx),
|
||||
size = getSize(id),
|
||||
promise, toSend, params;
|
||||
|
||||
promise = createReadyStateChangedHandler(id, xhr);
|
||||
handler._registerProgressHandler(id, chunkIdx, chunkData.size);
|
||||
params = spec.paramsStore.get(id);
|
||||
addChunkingSpecificParams(id, params, chunkData);
|
||||
|
||||
if (resuming) {
|
||||
params[spec.resume.paramNames.resuming] = true;
|
||||
}
|
||||
|
||||
toSend = setParamsAndGetEntityToSend(params, xhr, chunkData.blob, id);
|
||||
setUploadHeaders(id, xhr);
|
||||
xhr.send(toSend);
|
||||
|
||||
return promise;
|
||||
},
|
||||
|
||||
uploadFile: function(id) {
|
||||
var fileOrBlob = handler.getFile(id),
|
||||
promise, xhr, params, toSend;
|
||||
|
||||
xhr = handler._createXhr(id);
|
||||
handler._registerProgressHandler(id);
|
||||
promise = createReadyStateChangedHandler(id, xhr);
|
||||
params = spec.paramsStore.get(id);
|
||||
toSend = setParamsAndGetEntityToSend(params, xhr, fileOrBlob, id);
|
||||
setUploadHeaders(id, xhr);
|
||||
xhr.send(toSend);
|
||||
|
||||
return promise;
|
||||
}
|
||||
});
|
||||
|
||||
qq.extend(this, new qq.XhrUploadHandler({
|
||||
options: qq.extend({namespace: "traditional"}, spec),
|
||||
proxy: qq.extend({getEndpoint: spec.endpointStore.get}, proxy)
|
||||
}
|
||||
));
|
||||
|
||||
qq.override(this, function(super_) {
|
||||
return {
|
||||
finalizeChunks: function(id) {
|
||||
if (spec.chunking.success.endpoint) {
|
||||
return sendChunksCompleteRequest(id);
|
||||
}
|
||||
else {
|
||||
return super_.finalizeChunks(id, qq.bind(parseResponse, this, true));
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
};
|
@ -1,46 +0,0 @@
|
||||
/* global qq */
|
||||
qq.FileButtonsClickHandler = function(s) {
|
||||
"use strict";
|
||||
|
||||
var inheritedInternalApi = {},
|
||||
spec = {
|
||||
templating: null,
|
||||
log: function(message, lvl) {},
|
||||
onDeleteFile: function(fileId) {},
|
||||
onCancel: function(fileId) {},
|
||||
onRetry: function(fileId) {},
|
||||
onPause: function(fileId) {},
|
||||
onContinue: function(fileId) {},
|
||||
onGetName: function(fileId) {}
|
||||
},
|
||||
buttonHandlers = {
|
||||
cancel: function(id) { spec.onCancel(id); },
|
||||
retry: function(id) { spec.onRetry(id); },
|
||||
deleteButton: function(id) { spec.onDeleteFile(id); },
|
||||
pause: function(id) { spec.onPause(id); },
|
||||
continueButton: function(id) { spec.onContinue(id); }
|
||||
};
|
||||
|
||||
function examineEvent(target, event) {
|
||||
qq.each(buttonHandlers, function(buttonType, handler) {
|
||||
var firstLetterCapButtonType = buttonType.charAt(0).toUpperCase() + buttonType.slice(1),
|
||||
fileId;
|
||||
|
||||
if (spec.templating["is" + firstLetterCapButtonType](target)) {
|
||||
fileId = spec.templating.getFileId(target);
|
||||
qq.preventDefault(event);
|
||||
spec.log(qq.format("Detected valid file button click event on file '{}', ID: {}.", spec.onGetName(fileId), fileId));
|
||||
handler(fileId);
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
qq.extend(spec, s);
|
||||
|
||||
spec.eventType = "click";
|
||||
spec.onHandled = examineEvent;
|
||||
spec.attachTo = spec.templating.getFileList();
|
||||
|
||||
qq.extend(this, new qq.UiEventHandler(spec, inheritedInternalApi));
|
||||
};
|
@ -1,40 +0,0 @@
|
||||
/*globals qq */
|
||||
// Child of FilenameEditHandler. Used to detect click events on filename display elements.
|
||||
qq.FilenameClickHandler = function(s) {
|
||||
"use strict";
|
||||
|
||||
var inheritedInternalApi = {},
|
||||
spec = {
|
||||
templating: null,
|
||||
log: function(message, lvl) {},
|
||||
classes: {
|
||||
file: "qq-upload-file",
|
||||
editNameIcon: "qq-edit-filename-icon"
|
||||
},
|
||||
onGetUploadStatus: function(fileId) {},
|
||||
onGetName: function(fileId) {}
|
||||
};
|
||||
|
||||
qq.extend(spec, s);
|
||||
|
||||
// This will be called by the parent handler when a `click` event is received on the list element.
|
||||
function examineEvent(target, event) {
|
||||
if (spec.templating.isFileName(target) || spec.templating.isEditIcon(target)) {
|
||||
var fileId = spec.templating.getFileId(target),
|
||||
status = spec.onGetUploadStatus(fileId);
|
||||
|
||||
// We only allow users to change filenames of files that have been submitted but not yet uploaded.
|
||||
if (status === qq.status.SUBMITTED) {
|
||||
spec.log(qq.format("Detected valid filename click event on file '{}', ID: {}.", spec.onGetName(fileId), fileId));
|
||||
qq.preventDefault(event);
|
||||
|
||||
inheritedInternalApi.handleFilenameEdit(fileId, target, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
spec.eventType = "click";
|
||||
spec.onHandled = examineEvent;
|
||||
|
||||
qq.extend(this, new qq.FilenameEditHandler(spec, inheritedInternalApi));
|
||||
};
|
@ -1,92 +0,0 @@
|
||||
/*globals qq */
|
||||
// Handles edit-related events on a file item (FineUploader mode). This is meant to be a parent handler.
|
||||
// Children will delegate to this handler when specific edit-related actions are detected.
|
||||
qq.FilenameEditHandler = function(s, inheritedInternalApi) {
|
||||
"use strict";
|
||||
|
||||
var spec = {
|
||||
templating: null,
|
||||
log: function(message, lvl) {},
|
||||
onGetUploadStatus: function(fileId) {},
|
||||
onGetName: function(fileId) {},
|
||||
onSetName: function(fileId, newName) {},
|
||||
onEditingStatusChange: function(fileId, isEditing) {}
|
||||
};
|
||||
|
||||
|
||||
function getFilenameSansExtension(fileId) {
|
||||
var filenameSansExt = spec.onGetName(fileId),
|
||||
extIdx = filenameSansExt.lastIndexOf(".");
|
||||
|
||||
if (extIdx > 0) {
|
||||
filenameSansExt = filenameSansExt.substr(0, extIdx);
|
||||
}
|
||||
|
||||
return filenameSansExt;
|
||||
}
|
||||
|
||||
function getOriginalExtension(fileId) {
|
||||
var origName = spec.onGetName(fileId);
|
||||
return qq.getExtension(origName);
|
||||
}
|
||||
|
||||
// Callback iff the name has been changed
|
||||
function handleNameUpdate(newFilenameInputEl, fileId) {
|
||||
var newName = newFilenameInputEl.value,
|
||||
origExtension;
|
||||
|
||||
if (newName !== undefined && qq.trimStr(newName).length > 0) {
|
||||
origExtension = getOriginalExtension(fileId);
|
||||
|
||||
if (origExtension !== undefined) {
|
||||
newName = newName + "." + origExtension;
|
||||
}
|
||||
|
||||
spec.onSetName(fileId, newName);
|
||||
}
|
||||
|
||||
spec.onEditingStatusChange(fileId, false);
|
||||
}
|
||||
|
||||
// The name has been updated if the filename edit input loses focus.
|
||||
function registerInputBlurHandler(inputEl, fileId) {
|
||||
inheritedInternalApi.getDisposeSupport().attach(inputEl, "blur", function() {
|
||||
handleNameUpdate(inputEl, fileId);
|
||||
});
|
||||
}
|
||||
|
||||
// The name has been updated if the user presses enter.
|
||||
function registerInputEnterKeyHandler(inputEl, fileId) {
|
||||
inheritedInternalApi.getDisposeSupport().attach(inputEl, "keyup", function(event) {
|
||||
|
||||
var code = event.keyCode || event.which;
|
||||
|
||||
if (code === 13) {
|
||||
handleNameUpdate(inputEl, fileId);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
qq.extend(spec, s);
|
||||
|
||||
spec.attachTo = spec.templating.getFileList();
|
||||
|
||||
qq.extend(this, new qq.UiEventHandler(spec, inheritedInternalApi));
|
||||
|
||||
qq.extend(inheritedInternalApi, {
|
||||
handleFilenameEdit: function(id, target, focusInput) {
|
||||
var newFilenameInputEl = spec.templating.getEditInput(id);
|
||||
|
||||
spec.onEditingStatusChange(id, true);
|
||||
|
||||
newFilenameInputEl.value = getFilenameSansExtension(id);
|
||||
|
||||
if (focusInput) {
|
||||
newFilenameInputEl.focus();
|
||||
}
|
||||
|
||||
registerInputBlurHandler(newFilenameInputEl, id);
|
||||
registerInputEnterKeyHandler(newFilenameInputEl, id);
|
||||
}
|
||||
});
|
||||
};
|
@ -1,56 +0,0 @@
|
||||
/*globals qq */
|
||||
// Base handler for UI (FineUploader mode) events.
|
||||
// Some more specific handlers inherit from this one.
|
||||
qq.UiEventHandler = function(s, protectedApi) {
|
||||
"use strict";
|
||||
|
||||
var disposer = new qq.DisposeSupport(),
|
||||
spec = {
|
||||
eventType: "click",
|
||||
attachTo: null,
|
||||
onHandled: function(target, event) {}
|
||||
};
|
||||
|
||||
|
||||
// This makes up the "public" API methods that will be accessible
|
||||
// to instances constructing a base or child handler
|
||||
qq.extend(this, {
|
||||
addHandler: function(element) {
|
||||
addHandler(element);
|
||||
},
|
||||
|
||||
dispose: function() {
|
||||
disposer.dispose();
|
||||
}
|
||||
});
|
||||
|
||||
function addHandler(element) {
|
||||
disposer.attach(element, spec.eventType, function(event) {
|
||||
// Only in IE: the `event` is a property of the `window`.
|
||||
event = event || window.event;
|
||||
|
||||
// On older browsers, we must check the `srcElement` instead of the `target`.
|
||||
var target = event.target || event.srcElement;
|
||||
|
||||
spec.onHandled(target, event);
|
||||
});
|
||||
}
|
||||
|
||||
// These make up the "protected" API methods that children of this base handler will utilize.
|
||||
qq.extend(protectedApi, {
|
||||
getFileIdFromItem: function(item) {
|
||||
return item.qqFileId;
|
||||
},
|
||||
|
||||
getDisposeSupport: function() {
|
||||
return disposer;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
qq.extend(spec, s);
|
||||
|
||||
if (spec.attachTo) {
|
||||
addHandler(spec.attachTo);
|
||||
}
|
||||
};
|
@ -1,15 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* Child of FilenameInputFocusInHandler. Used to detect focus events on file edit input elements. This child module is only
|
||||
* needed for UAs that do not support the focusin event. Currently, only Firefox lacks this event.
|
||||
*
|
||||
* @param spec Overrides for default specifications
|
||||
*/
|
||||
qq.FilenameInputFocusHandler = function(spec) {
|
||||
"use strict";
|
||||
|
||||
spec.eventType = "focus";
|
||||
spec.attachTo = null;
|
||||
|
||||
qq.extend(this, new qq.FilenameInputFocusInHandler(spec, {}));
|
||||
};
|
@ -1,34 +0,0 @@
|
||||
/*globals qq */
|
||||
// Child of FilenameEditHandler. Used to detect focusin events on file edit input elements.
|
||||
qq.FilenameInputFocusInHandler = function(s, inheritedInternalApi) {
|
||||
"use strict";
|
||||
|
||||
var spec = {
|
||||
templating: null,
|
||||
onGetUploadStatus: function(fileId) {},
|
||||
log: function(message, lvl) {}
|
||||
};
|
||||
|
||||
if (!inheritedInternalApi) {
|
||||
inheritedInternalApi = {};
|
||||
}
|
||||
|
||||
// This will be called by the parent handler when a `focusin` event is received on the list element.
|
||||
function handleInputFocus(target, event) {
|
||||
if (spec.templating.isEditInput(target)) {
|
||||
var fileId = spec.templating.getFileId(target),
|
||||
status = spec.onGetUploadStatus(fileId);
|
||||
|
||||
if (status === qq.status.SUBMITTED) {
|
||||
spec.log(qq.format("Detected valid filename input focus event on file '{}', ID: {}.", spec.onGetName(fileId), fileId));
|
||||
inheritedInternalApi.handleFilenameEdit(fileId, target);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
spec.eventType = "focusin";
|
||||
spec.onHandled = handleInputFocus;
|
||||
|
||||
qq.extend(spec, s);
|
||||
qq.extend(this, new qq.FilenameEditHandler(spec, inheritedInternalApi));
|
||||
};
|
@ -1,208 +0,0 @@
|
||||
/*globals qq */
|
||||
qq.UploadData = function(uploaderProxy) {
|
||||
"use strict";
|
||||
|
||||
var data = [],
|
||||
byUuid = {},
|
||||
byStatus = {},
|
||||
byProxyGroupId = {},
|
||||
byBatchId = {};
|
||||
|
||||
|
||||
function getDataByIds(idOrIds) {
|
||||
if (qq.isArray(idOrIds)) {
|
||||
var entries = [];
|
||||
|
||||
qq.each(idOrIds, function(idx, id) {
|
||||
entries.push(data[id]);
|
||||
});
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
return data[idOrIds];
|
||||
}
|
||||
|
||||
function getDataByUuids(uuids) {
|
||||
if (qq.isArray(uuids)) {
|
||||
var entries = [];
|
||||
|
||||
qq.each(uuids, function(idx, uuid) {
|
||||
entries.push(data[byUuid[uuid]]);
|
||||
});
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
return data[byUuid[uuids]];
|
||||
}
|
||||
|
||||
function getDataByStatus(status) {
|
||||
var statusResults = [],
|
||||
statuses = [].concat(status);
|
||||
|
||||
qq.each(statuses, function(index, statusEnum) {
|
||||
var statusResultIndexes = byStatus[statusEnum];
|
||||
|
||||
if (statusResultIndexes !== undefined) {
|
||||
qq.each(statusResultIndexes, function(i, dataIndex) {
|
||||
statusResults.push(data[dataIndex]);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return statusResults;
|
||||
}
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* Adds a new file to the data cache for tracking purposes.
|
||||
*
|
||||
* @param spec Data that describes this file. Possible properties are:
|
||||
*
|
||||
* - uuid: Initial UUID for this file.
|
||||
* - name: Initial name of this file.
|
||||
* - size: Size of this file, omit if this cannot be determined
|
||||
* - status: Initial `qq.status` for this file. Omit for `qq.status.SUBMITTING`.
|
||||
* - batchId: ID of the batch this file belongs to
|
||||
* - proxyGroupId: ID of the proxy group associated with this file
|
||||
*
|
||||
* @returns {number} Internal ID for this file.
|
||||
*/
|
||||
addFile: function(spec) {
|
||||
var status = spec.status || qq.status.SUBMITTING;
|
||||
|
||||
var id = data.push({
|
||||
name: spec.name,
|
||||
originalName: spec.name,
|
||||
uuid: spec.uuid,
|
||||
size: spec.size || -1,
|
||||
status: status
|
||||
}) - 1;
|
||||
|
||||
if (spec.batchId) {
|
||||
data[id].batchId = spec.batchId;
|
||||
|
||||
if (byBatchId[spec.batchId] === undefined) {
|
||||
byBatchId[spec.batchId] = [];
|
||||
}
|
||||
byBatchId[spec.batchId].push(id);
|
||||
}
|
||||
|
||||
if (spec.proxyGroupId) {
|
||||
data[id].proxyGroupId = spec.proxyGroupId;
|
||||
|
||||
if (byProxyGroupId[spec.proxyGroupId] === undefined) {
|
||||
byProxyGroupId[spec.proxyGroupId] = [];
|
||||
}
|
||||
byProxyGroupId[spec.proxyGroupId].push(id);
|
||||
}
|
||||
|
||||
data[id].id = id;
|
||||
byUuid[spec.uuid] = id;
|
||||
|
||||
if (byStatus[status] === undefined) {
|
||||
byStatus[status] = [];
|
||||
}
|
||||
byStatus[status].push(id);
|
||||
|
||||
uploaderProxy.onStatusChange(id, null, status);
|
||||
|
||||
return id;
|
||||
},
|
||||
|
||||
retrieve: function(optionalFilter) {
|
||||
if (qq.isObject(optionalFilter) && data.length) {
|
||||
if (optionalFilter.id !== undefined) {
|
||||
return getDataByIds(optionalFilter.id);
|
||||
}
|
||||
|
||||
else if (optionalFilter.uuid !== undefined) {
|
||||
return getDataByUuids(optionalFilter.uuid);
|
||||
}
|
||||
|
||||
else if (optionalFilter.status) {
|
||||
return getDataByStatus(optionalFilter.status);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return qq.extend([], data, true);
|
||||
}
|
||||
},
|
||||
|
||||
reset: function() {
|
||||
data = [];
|
||||
byUuid = {};
|
||||
byStatus = {};
|
||||
byBatchId = {};
|
||||
},
|
||||
|
||||
setStatus: function(id, newStatus) {
|
||||
var oldStatus = data[id].status,
|
||||
byStatusOldStatusIndex = qq.indexOf(byStatus[oldStatus], id);
|
||||
|
||||
byStatus[oldStatus].splice(byStatusOldStatusIndex, 1);
|
||||
|
||||
data[id].status = newStatus;
|
||||
|
||||
if (byStatus[newStatus] === undefined) {
|
||||
byStatus[newStatus] = [];
|
||||
}
|
||||
byStatus[newStatus].push(id);
|
||||
|
||||
uploaderProxy.onStatusChange(id, oldStatus, newStatus);
|
||||
},
|
||||
|
||||
uuidChanged: function(id, newUuid) {
|
||||
var oldUuid = data[id].uuid;
|
||||
|
||||
data[id].uuid = newUuid;
|
||||
byUuid[newUuid] = id;
|
||||
delete byUuid[oldUuid];
|
||||
},
|
||||
|
||||
updateName: function(id, newName) {
|
||||
data[id].name = newName;
|
||||
},
|
||||
|
||||
updateSize: function(id, newSize) {
|
||||
data[id].size = newSize;
|
||||
},
|
||||
|
||||
// Only applicable if this file has a parent that we may want to reference later.
|
||||
setParentId: function(targetId, parentId) {
|
||||
data[targetId].parentId = parentId;
|
||||
},
|
||||
|
||||
getIdsInProxyGroup: function(id) {
|
||||
var proxyGroupId = data[id].proxyGroupId;
|
||||
|
||||
if (proxyGroupId) {
|
||||
return byProxyGroupId[proxyGroupId];
|
||||
}
|
||||
return [];
|
||||
},
|
||||
|
||||
getIdsInBatch: function(id) {
|
||||
var batchId = data[id].batchId;
|
||||
|
||||
return byBatchId[batchId];
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
qq.status = {
|
||||
SUBMITTING: "submitting",
|
||||
SUBMITTED: "submitted",
|
||||
REJECTED: "rejected",
|
||||
QUEUED: "queued",
|
||||
CANCELED: "canceled",
|
||||
PAUSED: "paused",
|
||||
UPLOADING: "uploading",
|
||||
UPLOAD_RETRYING: "retrying upload",
|
||||
UPLOAD_SUCCESSFUL: "upload successful",
|
||||
UPLOAD_FAILED: "upload failed",
|
||||
DELETE_FAILED: "delete failed",
|
||||
DELETING: "deleting",
|
||||
DELETED: "deleted"
|
||||
};
|
@ -1,286 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Common APIs exposed to creators of upload via form/iframe handlers. This is reused and possibly overridden
|
||||
* in some cases by specific form upload handlers.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
qq.FormUploadHandler = function(spec) {
|
||||
"use strict";
|
||||
|
||||
var options = spec.options,
|
||||
handler = this,
|
||||
proxy = spec.proxy,
|
||||
formHandlerInstanceId = qq.getUniqueId(),
|
||||
onloadCallbacks = {},
|
||||
detachLoadEvents = {},
|
||||
postMessageCallbackTimers = {},
|
||||
isCors = options.isCors,
|
||||
inputName = options.inputName,
|
||||
getUuid = proxy.getUuid,
|
||||
log = proxy.log,
|
||||
corsMessageReceiver = new qq.WindowReceiveMessage({log: log});
|
||||
|
||||
|
||||
/**
|
||||
* Remove any trace of the file from the handler.
|
||||
*
|
||||
* @param id ID of the associated file
|
||||
*/
|
||||
function expungeFile(id) {
|
||||
delete detachLoadEvents[id];
|
||||
|
||||
// If we are dealing with CORS, we might still be waiting for a response from a loaded iframe.
|
||||
// In that case, terminate the timer waiting for a message from the loaded iframe
|
||||
// and stop listening for any more messages coming from this iframe.
|
||||
if (isCors) {
|
||||
clearTimeout(postMessageCallbackTimers[id]);
|
||||
delete postMessageCallbackTimers[id];
|
||||
corsMessageReceiver.stopReceivingMessages(id);
|
||||
}
|
||||
|
||||
var iframe = document.getElementById(handler._getIframeName(id));
|
||||
if (iframe) {
|
||||
// To cancel request set src to something else. We use src="javascript:false;"
|
||||
// because it doesn't trigger ie6 prompt on https
|
||||
/* jshint scripturl:true */
|
||||
iframe.setAttribute("src", "javascript:false;");
|
||||
|
||||
qq(iframe).remove();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param iframeName `document`-unique Name of the associated iframe
|
||||
* @returns {*} ID of the associated file
|
||||
*/
|
||||
function getFileIdForIframeName(iframeName) {
|
||||
return iframeName.split("_")[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an iframe to be used as a target for upload-related form submits. This also adds the iframe
|
||||
* to the current `document`. Note that the iframe is hidden from view.
|
||||
*
|
||||
* @param name Name of the iframe.
|
||||
* @returns {HTMLIFrameElement} The created iframe
|
||||
*/
|
||||
function initIframeForUpload(name) {
|
||||
var iframe = qq.toElement("<iframe src='javascript:false;' name='" + name + "' />");
|
||||
|
||||
iframe.setAttribute("id", name);
|
||||
|
||||
iframe.style.display = "none";
|
||||
document.body.appendChild(iframe);
|
||||
|
||||
return iframe;
|
||||
}
|
||||
|
||||
/**
|
||||
* If we are in CORS mode, we must listen for messages (containing the server response) from the associated
|
||||
* iframe, since we cannot directly parse the content of the iframe due to cross-origin restrictions.
|
||||
*
|
||||
* @param iframe Listen for messages on this iframe.
|
||||
* @param callback Invoke this callback with the message from the iframe.
|
||||
*/
|
||||
function registerPostMessageCallback(iframe, callback) {
|
||||
var iframeName = iframe.id,
|
||||
fileId = getFileIdForIframeName(iframeName),
|
||||
uuid = getUuid(fileId);
|
||||
|
||||
onloadCallbacks[uuid] = callback;
|
||||
|
||||
// When the iframe has loaded (after the server responds to an upload request)
|
||||
// declare the attempt a failure if we don't receive a valid message shortly after the response comes in.
|
||||
detachLoadEvents[fileId] = qq(iframe).attach("load", function() {
|
||||
if (handler.getInput(fileId)) {
|
||||
log("Received iframe load event for CORS upload request (iframe name " + iframeName + ")");
|
||||
|
||||
postMessageCallbackTimers[iframeName] = setTimeout(function() {
|
||||
var errorMessage = "No valid message received from loaded iframe for iframe name " + iframeName;
|
||||
log(errorMessage, "error");
|
||||
callback({
|
||||
error: errorMessage
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
});
|
||||
|
||||
// Listen for messages coming from this iframe. When a message has been received, cancel the timer
|
||||
// that declares the upload a failure if a message is not received within a reasonable amount of time.
|
||||
corsMessageReceiver.receiveMessage(iframeName, function(message) {
|
||||
log("Received the following window message: '" + message + "'");
|
||||
var fileId = getFileIdForIframeName(iframeName),
|
||||
response = handler._parseJsonResponse(fileId, message),
|
||||
uuid = response.uuid,
|
||||
onloadCallback;
|
||||
|
||||
if (uuid && onloadCallbacks[uuid]) {
|
||||
log("Handling response for iframe name " + iframeName);
|
||||
clearTimeout(postMessageCallbackTimers[iframeName]);
|
||||
delete postMessageCallbackTimers[iframeName];
|
||||
|
||||
handler._detachLoadEvent(iframeName);
|
||||
|
||||
onloadCallback = onloadCallbacks[uuid];
|
||||
|
||||
delete onloadCallbacks[uuid];
|
||||
corsMessageReceiver.stopReceivingMessages(iframeName);
|
||||
onloadCallback(response);
|
||||
}
|
||||
else if (!uuid) {
|
||||
log("'" + message + "' does not contain a UUID - ignoring.");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
qq.extend(this, new qq.UploadHandler(spec));
|
||||
|
||||
qq.override(this, function(super_) {
|
||||
return {
|
||||
/**
|
||||
* Adds File or Blob to the queue
|
||||
**/
|
||||
add: function(id, fileInput) {
|
||||
super_.add(id, {input: fileInput});
|
||||
|
||||
fileInput.setAttribute("name", inputName);
|
||||
|
||||
// remove file input from DOM
|
||||
if (fileInput.parentNode){
|
||||
qq(fileInput).remove();
|
||||
}
|
||||
},
|
||||
|
||||
expunge: function(id) {
|
||||
expungeFile(id);
|
||||
super_.expunge(id);
|
||||
},
|
||||
|
||||
isValid: function(id) {
|
||||
return super_.isValid(id) &&
|
||||
handler._getFileState(id).input !== undefined;
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* This function either delegates to a more specific message handler if CORS is involved,
|
||||
* or simply registers a callback when the iframe has been loaded that invokes the passed callback
|
||||
* after determining if the content of the iframe is accessible.
|
||||
*
|
||||
* @param iframe Associated iframe
|
||||
* @param callback Callback to invoke after we have determined if the iframe content is accessible.
|
||||
*/
|
||||
_attachLoadEvent: function(iframe, callback) {
|
||||
/*jslint eqeq: true*/
|
||||
var responseDescriptor;
|
||||
|
||||
if (isCors) {
|
||||
registerPostMessageCallback(iframe, callback);
|
||||
}
|
||||
else {
|
||||
detachLoadEvents[iframe.id] = qq(iframe).attach("load", function(){
|
||||
log("Received response for " + iframe.id);
|
||||
|
||||
// when we remove iframe from dom
|
||||
// the request stops, but in IE load
|
||||
// event fires
|
||||
if (!iframe.parentNode){
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// fixing Opera 10.53
|
||||
if (iframe.contentDocument &&
|
||||
iframe.contentDocument.body &&
|
||||
iframe.contentDocument.body.innerHTML == "false"){
|
||||
// In Opera event is fired second time
|
||||
// when body.innerHTML changed from false
|
||||
// to server response approx. after 1 sec
|
||||
// when we upload file with iframe
|
||||
return;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
//IE may throw an "access is denied" error when attempting to access contentDocument on the iframe in some cases
|
||||
log("Error when attempting to access iframe during handling of upload response (" + error.message + ")", "error");
|
||||
responseDescriptor = {success: false};
|
||||
}
|
||||
|
||||
callback(responseDescriptor);
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates an iframe with a specific document-unique name.
|
||||
*
|
||||
* @param id ID of the associated file
|
||||
* @returns {HTMLIFrameElement}
|
||||
*/
|
||||
_createIframe: function(id) {
|
||||
var iframeName = handler._getIframeName(id);
|
||||
|
||||
return initIframeForUpload(iframeName);
|
||||
},
|
||||
|
||||
/**
|
||||
* Called when we are no longer interested in being notified when an iframe has loaded.
|
||||
*
|
||||
* @param id Associated file ID
|
||||
*/
|
||||
_detachLoadEvent: function(id) {
|
||||
if (detachLoadEvents[id] !== undefined) {
|
||||
detachLoadEvents[id]();
|
||||
delete detachLoadEvents[id];
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @param fileId ID of the associated file
|
||||
* @returns {string} The `document`-unique name of the iframe
|
||||
*/
|
||||
_getIframeName: function(fileId) {
|
||||
return fileId + "_" + formHandlerInstanceId;
|
||||
},
|
||||
|
||||
getInput: function(id) {
|
||||
return handler._getFileState(id).input;
|
||||
},
|
||||
|
||||
/**
|
||||
* Generates a form element and appends it to the `document`. When the form is submitted, a specific iframe is targeted.
|
||||
* The name of the iframe is passed in as a property of the spec parameter, and must be unique in the `document`. Note
|
||||
* that the form is hidden from view.
|
||||
*
|
||||
* @param spec An object containing various properties to be used when constructing the form. Required properties are
|
||||
* currently: `method`, `endpoint`, `params`, `paramsInBody`, and `targetName`.
|
||||
* @returns {HTMLFormElement} The created form
|
||||
*/
|
||||
_initFormForUpload: function(spec) {
|
||||
var method = spec.method,
|
||||
endpoint = spec.endpoint,
|
||||
params = spec.params,
|
||||
paramsInBody = spec.paramsInBody,
|
||||
targetName = spec.targetName,
|
||||
form = qq.toElement("<form method='" + method + "' enctype='multipart/form-data'></form>"),
|
||||
url = endpoint;
|
||||
|
||||
if (paramsInBody) {
|
||||
qq.obj2Inputs(params, form);
|
||||
}
|
||||
else {
|
||||
url = qq.obj2url(params, endpoint);
|
||||
}
|
||||
|
||||
form.setAttribute("action", url);
|
||||
form.setAttribute("target", targetName);
|
||||
form.style.display = "none";
|
||||
document.body.appendChild(form);
|
||||
|
||||
return form;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,714 +0,0 @@
|
||||
/*globals qq*/
|
||||
/**
|
||||
* Base upload handler module. Controls more specific handlers.
|
||||
*
|
||||
* @param o Options. Passed along to the specific handler submodule as well.
|
||||
* @param namespace [optional] Namespace for the specific handler.
|
||||
*/
|
||||
qq.UploadHandlerController = function(o, namespace) {
|
||||
"use strict";
|
||||
|
||||
var controller = this,
|
||||
chunkingPossible = false,
|
||||
concurrentChunkingPossible = false,
|
||||
chunking, preventRetryResponse, log, handler,
|
||||
|
||||
options = {
|
||||
paramsStore: {},
|
||||
maxConnections: 3, // maximum number of concurrent uploads
|
||||
chunking: {
|
||||
enabled: false,
|
||||
multiple: {
|
||||
enabled: false
|
||||
}
|
||||
},
|
||||
log: function(str, level) {},
|
||||
onProgress: function(id, fileName, loaded, total){},
|
||||
onComplete: function(id, fileName, response, xhr){},
|
||||
onCancel: function(id, fileName){},
|
||||
onUploadPrep: function(id){}, // Called if non-trivial operations will be performed before onUpload
|
||||
onUpload: function(id, fileName){},
|
||||
onUploadChunk: function(id, fileName, chunkData){},
|
||||
onUploadChunkSuccess: function(id, chunkData, response, xhr){},
|
||||
onAutoRetry: function(id, fileName, response, xhr){},
|
||||
onResume: function(id, fileName, chunkData){},
|
||||
onUuidChanged: function(id, newUuid){},
|
||||
getName: function(id) {},
|
||||
setSize: function(id, newSize) {},
|
||||
isQueued: function(id) {},
|
||||
getIdsInProxyGroup: function(id) {},
|
||||
getIdsInBatch: function(id) {}
|
||||
},
|
||||
|
||||
|
||||
chunked = {
|
||||
// Called when each chunk has uploaded successfully
|
||||
done: function(id, chunkIdx, response, xhr) {
|
||||
var chunkData = handler._getChunkData(id, chunkIdx);
|
||||
|
||||
handler._getFileState(id).attemptingResume = false;
|
||||
|
||||
delete handler._getFileState(id).temp.chunkProgress[chunkIdx];
|
||||
handler._getFileState(id).loaded += chunkData.size;
|
||||
|
||||
options.onUploadChunkSuccess(id, handler._getChunkDataForCallback(chunkData), response, xhr);
|
||||
},
|
||||
|
||||
// Called when all chunks have been successfully uploaded and we want to ask the handler to perform any
|
||||
// logic associated with closing out the file, such as combining the chunks.
|
||||
finalize: function(id) {
|
||||
var size = options.getSize(id),
|
||||
name = options.getName(id);
|
||||
|
||||
log("All chunks have been uploaded for " + id + " - finalizing....");
|
||||
handler.finalizeChunks(id).then(
|
||||
function(response, xhr) {
|
||||
log("Finalize successful for " + id);
|
||||
|
||||
var normaizedResponse = upload.normalizeResponse(response, true);
|
||||
|
||||
options.onProgress(id, name, size, size);
|
||||
handler._maybeDeletePersistedChunkData(id);
|
||||
upload.cleanup(id, normaizedResponse, xhr);
|
||||
},
|
||||
function(response, xhr) {
|
||||
var normaizedResponse = upload.normalizeResponse(response, false);
|
||||
|
||||
log("Problem finalizing chunks for file ID " + id + " - " + normaizedResponse.error, "error");
|
||||
|
||||
if (normaizedResponse.reset) {
|
||||
chunked.reset(id);
|
||||
}
|
||||
|
||||
if (!options.onAutoRetry(id, name, normaizedResponse, xhr)) {
|
||||
upload.cleanup(id, normaizedResponse, xhr);
|
||||
}
|
||||
}
|
||||
);
|
||||
},
|
||||
|
||||
hasMoreParts: function(id) {
|
||||
return !!handler._getFileState(id).chunking.remaining.length;
|
||||
},
|
||||
|
||||
nextPart: function(id) {
|
||||
var nextIdx = handler._getFileState(id).chunking.remaining.shift();
|
||||
|
||||
if (nextIdx >= handler._getTotalChunks(id)) {
|
||||
nextIdx = null;
|
||||
}
|
||||
|
||||
return nextIdx;
|
||||
},
|
||||
|
||||
reset: function(id) {
|
||||
log("Server or callback has ordered chunking effort to be restarted on next attempt for item ID " + id, "error");
|
||||
|
||||
handler._maybeDeletePersistedChunkData(id);
|
||||
handler.reevaluateChunking(id);
|
||||
handler._getFileState(id).loaded = 0;
|
||||
},
|
||||
|
||||
sendNext: function(id) {
|
||||
var size = options.getSize(id),
|
||||
name = options.getName(id),
|
||||
chunkIdx = chunked.nextPart(id),
|
||||
chunkData = handler._getChunkData(id, chunkIdx),
|
||||
resuming = handler._getFileState(id).attemptingResume,
|
||||
inProgressChunks = handler._getFileState(id).chunking.inProgress || [];
|
||||
|
||||
if (handler._getFileState(id).loaded == null) {
|
||||
handler._getFileState(id).loaded = 0;
|
||||
}
|
||||
|
||||
// Don't follow-through with the resume attempt if the integrator returns false from onResume
|
||||
if (resuming && options.onResume(id, name, chunkData) === false) {
|
||||
chunked.reset(id);
|
||||
chunkIdx = chunked.nextPart(id);
|
||||
chunkData = handler._getChunkData(id, chunkIdx);
|
||||
resuming = false;
|
||||
}
|
||||
|
||||
// If all chunks have already uploaded successfully, we must be re-attempting the finalize step.
|
||||
if (chunkIdx == null && inProgressChunks.length === 0) {
|
||||
chunked.finalize(id);
|
||||
}
|
||||
|
||||
// Send the next chunk
|
||||
else {
|
||||
log("Sending chunked upload request for item " + id + ": bytes " + (chunkData.start+1) + "-" + chunkData.end + " of " + size);
|
||||
options.onUploadChunk(id, name, handler._getChunkDataForCallback(chunkData));
|
||||
|
||||
inProgressChunks.push(chunkIdx);
|
||||
handler._getFileState(id).chunking.inProgress = inProgressChunks;
|
||||
|
||||
if (concurrentChunkingPossible) {
|
||||
connectionManager.open(id, chunkIdx);
|
||||
}
|
||||
|
||||
if (concurrentChunkingPossible && connectionManager.available() && handler._getFileState(id).chunking.remaining.length) {
|
||||
chunked.sendNext(id);
|
||||
}
|
||||
|
||||
handler.uploadChunk(id, chunkIdx, resuming).then(
|
||||
// upload chunk success
|
||||
function success(response, xhr) {
|
||||
log("Chunked upload request succeeded for " + id + ", chunk " + chunkIdx);
|
||||
|
||||
handler.clearCachedChunk(id, chunkIdx);
|
||||
|
||||
var inProgressChunks = handler._getFileState(id).chunking.inProgress || [],
|
||||
responseToReport = upload.normalizeResponse(response, true),
|
||||
inProgressChunkIdx = qq.indexOf(inProgressChunks, chunkIdx);
|
||||
|
||||
log(qq.format("Chunk {} for file {} uploaded successfully.", chunkIdx, id));
|
||||
|
||||
chunked.done(id, chunkIdx, responseToReport, xhr);
|
||||
|
||||
if (inProgressChunkIdx >= 0) {
|
||||
inProgressChunks.splice(inProgressChunkIdx, 1);
|
||||
}
|
||||
|
||||
handler._maybePersistChunkedState(id);
|
||||
|
||||
if (!chunked.hasMoreParts(id) && inProgressChunks.length === 0) {
|
||||
chunked.finalize(id);
|
||||
}
|
||||
else if (chunked.hasMoreParts(id)) {
|
||||
chunked.sendNext(id);
|
||||
}
|
||||
},
|
||||
|
||||
// upload chunk failure
|
||||
function failure(response, xhr) {
|
||||
log("Chunked upload request failed for " + id + ", chunk " + chunkIdx);
|
||||
|
||||
handler.clearCachedChunk(id, chunkIdx);
|
||||
|
||||
var responseToReport = upload.normalizeResponse(response, false);
|
||||
|
||||
if (responseToReport.reset) {
|
||||
chunked.reset(id);
|
||||
}
|
||||
else {
|
||||
var inProgressIdx = qq.indexOf(handler._getFileState(id).chunking.inProgress, chunkIdx);
|
||||
if (inProgressIdx >= 0) {
|
||||
handler._getFileState(id).chunking.inProgress.splice(inProgressIdx, 1);
|
||||
handler._getFileState(id).chunking.remaining.unshift(chunkIdx);
|
||||
}
|
||||
}
|
||||
|
||||
// We may have aborted all other in-progress chunks for this file due to a failure.
|
||||
// If so, ignore the failures associated with those aborts.
|
||||
if (!handler._getFileState(id).temp.ignoreFailure) {
|
||||
// If this chunk has failed, we want to ignore all other failures of currently in-progress
|
||||
// chunks since they will be explicitly aborted
|
||||
if (concurrentChunkingPossible) {
|
||||
handler._getFileState(id).temp.ignoreFailure = true;
|
||||
|
||||
qq.each(handler._getXhrs(id), function(ckid, ckXhr) {
|
||||
ckXhr.abort();
|
||||
});
|
||||
|
||||
// We must indicate that all aborted chunks are no longer in progress
|
||||
handler.moveInProgressToRemaining(id);
|
||||
|
||||
// Free up any connections used by these chunks, but don't allow any
|
||||
// other files to take up the connections (until we have exhausted all auto-retries)
|
||||
connectionManager.free(id, true);
|
||||
}
|
||||
|
||||
if (!options.onAutoRetry(id, name, responseToReport, xhr)) {
|
||||
// If one chunk fails, abort all of the others to avoid odd race conditions that occur
|
||||
// if a chunk succeeds immediately after one fails before we have determined if the upload
|
||||
// is a failure or not.
|
||||
upload.cleanup(id, responseToReport, xhr);
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
.done(function() {
|
||||
handler.clearXhr(id, chunkIdx);
|
||||
}) ;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
connectionManager = {
|
||||
_open: [],
|
||||
_openChunks: {},
|
||||
_waiting: [],
|
||||
|
||||
available: function() {
|
||||
var max = options.maxConnections,
|
||||
openChunkEntriesCount = 0,
|
||||
openChunksCount = 0;
|
||||
|
||||
qq.each(connectionManager._openChunks, function(fileId, openChunkIndexes) {
|
||||
openChunkEntriesCount++;
|
||||
openChunksCount += openChunkIndexes.length;
|
||||
});
|
||||
|
||||
return max - (connectionManager._open.length - openChunkEntriesCount + openChunksCount);
|
||||
},
|
||||
|
||||
/**
|
||||
* Removes element from queue, starts upload of next
|
||||
*/
|
||||
free: function(id, dontAllowNext) {
|
||||
var allowNext = !dontAllowNext,
|
||||
waitingIndex = qq.indexOf(connectionManager._waiting, id),
|
||||
connectionsIndex = qq.indexOf(connectionManager._open, id),
|
||||
nextId;
|
||||
|
||||
delete connectionManager._openChunks[id];
|
||||
|
||||
if (upload.getProxyOrBlob(id) instanceof qq.BlobProxy) {
|
||||
log("Generated blob upload has ended for " + id + ", disposing generated blob.");
|
||||
delete handler._getFileState(id).file;
|
||||
}
|
||||
|
||||
// If this file was not consuming a connection, it was just waiting, so remove it from the waiting array
|
||||
if (waitingIndex >= 0) {
|
||||
connectionManager._waiting.splice(waitingIndex, 1);
|
||||
}
|
||||
// If this file was consuming a connection, allow the next file to be uploaded
|
||||
else if (allowNext && connectionsIndex >= 0) {
|
||||
connectionManager._open.splice(connectionsIndex, 1);
|
||||
|
||||
nextId = connectionManager._waiting.shift();
|
||||
if (nextId >= 0) {
|
||||
connectionManager._open.push(nextId);
|
||||
upload.start(nextId);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
getWaitingOrConnected: function() {
|
||||
var waitingOrConnected = [];
|
||||
|
||||
qq.each(waitingOrConnected, connectionManager._open);
|
||||
return qq.each(waitingOrConnected, connectionManager._waiting);
|
||||
},
|
||||
|
||||
isUsingConnection: function(id) {
|
||||
return qq.indexOf(connectionManager._open, id) >= 0;
|
||||
},
|
||||
|
||||
open: function(id, chunkIdx) {
|
||||
if (chunkIdx == null) {
|
||||
connectionManager._waiting.push(id);
|
||||
}
|
||||
|
||||
if (connectionManager.available()) {
|
||||
if (chunkIdx == null) {
|
||||
connectionManager._waiting.pop();
|
||||
connectionManager._open.push(id);
|
||||
}
|
||||
else {
|
||||
(function() {
|
||||
var openChunksEntry = connectionManager._openChunks[id] || [];
|
||||
openChunksEntry.push(chunkIdx);
|
||||
connectionManager._openChunks[id] = openChunksEntry;
|
||||
}());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
reset: function() {
|
||||
connectionManager._waiting = [];
|
||||
connectionManager._open = [];
|
||||
}
|
||||
},
|
||||
|
||||
simple = {
|
||||
send: function(id, name) {
|
||||
handler._getFileState(id).loaded = 0;
|
||||
|
||||
log("Sending simple upload request for " + id);
|
||||
handler.uploadFile(id).then(
|
||||
function(response, opt_xhr) {
|
||||
log("Simple upload request succeeded for " + id);
|
||||
|
||||
var responseToReport = upload.normalizeResponse(response, true);
|
||||
|
||||
var size = options.getSize(id);
|
||||
|
||||
options.onProgress(id, name, size, size);
|
||||
upload.maybeNewUuid(id, responseToReport);
|
||||
upload.cleanup(id, responseToReport, opt_xhr);
|
||||
},
|
||||
|
||||
function(response, opt_xhr) {
|
||||
log("Simple upload request failed for " + id);
|
||||
|
||||
var responseToReport = upload.normalizeResponse(response, false);
|
||||
|
||||
if (!options.onAutoRetry(id, name, responseToReport, opt_xhr)) {
|
||||
upload.cleanup(id, responseToReport, opt_xhr);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
upload = {
|
||||
cancel: function(id) {
|
||||
log("Cancelling " + id);
|
||||
options.paramsStore.remove(id);
|
||||
connectionManager.free(id);
|
||||
},
|
||||
|
||||
cleanup: function(id, response, opt_xhr) {
|
||||
var name = options.getName(id);
|
||||
|
||||
options.onComplete(id, name, response, opt_xhr);
|
||||
|
||||
if (handler._getFileState(id)) {
|
||||
handler._clearXhrs && handler._clearXhrs(id);
|
||||
}
|
||||
|
||||
connectionManager.free(id);
|
||||
},
|
||||
|
||||
// Returns a qq.BlobProxy, or an actual File/Blob if no proxy is involved, or undefined
|
||||
// if none of these are available for the ID
|
||||
getProxyOrBlob: function(id) {
|
||||
return (handler.getProxy && handler.getProxy(id)) ||
|
||||
(handler.getFile && handler.getFile(id));
|
||||
},
|
||||
|
||||
initHandler: function() {
|
||||
var handlerType = namespace ? qq[namespace] : qq.traditional,
|
||||
handlerModuleSubtype = qq.supportedFeatures.ajaxUploading ? "Xhr" : "Form";
|
||||
|
||||
handler = new handlerType[handlerModuleSubtype + "UploadHandler"](
|
||||
options,
|
||||
{
|
||||
getDataByUuid: options.getDataByUuid,
|
||||
getName: options.getName,
|
||||
getSize: options.getSize,
|
||||
getUuid: options.getUuid,
|
||||
log: log,
|
||||
onCancel: options.onCancel,
|
||||
onProgress: options.onProgress,
|
||||
onUuidChanged: options.onUuidChanged
|
||||
}
|
||||
);
|
||||
|
||||
if (handler._removeExpiredChunkingRecords) {
|
||||
handler._removeExpiredChunkingRecords();
|
||||
}
|
||||
},
|
||||
|
||||
isDeferredEligibleForUpload: function(id) {
|
||||
return options.isQueued(id);
|
||||
},
|
||||
|
||||
// For Blobs that are part of a group of generated images, along with a reference image,
|
||||
// this will ensure the blobs in the group are uploaded in the order they were triggered,
|
||||
// even if some async processing must be completed on one or more Blobs first.
|
||||
maybeDefer: function(id, blob) {
|
||||
// If we don't have a file/blob yet & no file/blob exists for this item, request it,
|
||||
// and then submit the upload to the specific handler once the blob is available.
|
||||
// ASSUMPTION: This condition will only ever be true if XHR uploading is supported.
|
||||
if (blob && !handler.getFile(id) && blob instanceof qq.BlobProxy) {
|
||||
|
||||
// Blob creation may take some time, so the caller may want to update the
|
||||
// UI to indicate that an operation is in progress, even before the actual
|
||||
// upload begins and an onUpload callback is invoked.
|
||||
options.onUploadPrep(id);
|
||||
|
||||
log("Attempting to generate a blob on-demand for " + id);
|
||||
blob.create().then(function(generatedBlob) {
|
||||
log("Generated an on-demand blob for " + id);
|
||||
|
||||
// Update record associated with this file by providing the generated Blob
|
||||
handler.updateBlob(id, generatedBlob);
|
||||
|
||||
// Propagate the size for this generated Blob
|
||||
options.setSize(id, generatedBlob.size);
|
||||
|
||||
// Order handler to recalculate chunking possibility, if applicable
|
||||
handler.reevaluateChunking(id);
|
||||
|
||||
upload.maybeSendDeferredFiles(id);
|
||||
},
|
||||
|
||||
// Blob could not be generated. Fail the upload & attempt to prevent retries. Also bubble error message.
|
||||
function(errorMessage) {
|
||||
var errorResponse = {};
|
||||
|
||||
if (errorMessage) {
|
||||
errorResponse.error = errorMessage;
|
||||
}
|
||||
|
||||
log(qq.format("Failed to generate blob for ID {}. Error message: {}.", id, errorMessage), "error");
|
||||
|
||||
options.onComplete(id, options.getName(id), qq.extend(errorResponse, preventRetryResponse), null);
|
||||
upload.maybeSendDeferredFiles(id);
|
||||
connectionManager.free(id);
|
||||
});
|
||||
}
|
||||
else {
|
||||
return upload.maybeSendDeferredFiles(id);
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
// Upload any grouped blobs, in the proper order, that are ready to be uploaded
|
||||
maybeSendDeferredFiles: function(id) {
|
||||
var idsInGroup = options.getIdsInProxyGroup(id),
|
||||
uploadedThisId = false;
|
||||
|
||||
if (idsInGroup && idsInGroup.length) {
|
||||
log("Maybe ready to upload proxy group file " + id);
|
||||
|
||||
qq.each(idsInGroup, function(idx, idInGroup) {
|
||||
if (upload.isDeferredEligibleForUpload(idInGroup) && !!handler.getFile(idInGroup)) {
|
||||
uploadedThisId = idInGroup === id;
|
||||
upload.now(idInGroup);
|
||||
}
|
||||
else if (upload.isDeferredEligibleForUpload(idInGroup)) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
uploadedThisId = true;
|
||||
upload.now(id);
|
||||
}
|
||||
|
||||
return uploadedThisId;
|
||||
},
|
||||
|
||||
maybeNewUuid: function (id, response) {
|
||||
if (response.newUuid !== undefined) {
|
||||
options.onUuidChanged(id, response.newUuid);
|
||||
}
|
||||
},
|
||||
|
||||
// The response coming from handler implementations may be in various formats.
|
||||
// Instead of hoping a promise nested 5 levels deep will always return an object
|
||||
// as its first param, let's just normalize the response here.
|
||||
normalizeResponse: function(originalResponse, successful) {
|
||||
var response = originalResponse;
|
||||
|
||||
// The passed "response" param may not be a response at all.
|
||||
// It could be a string, detailing the error, for example.
|
||||
if (!qq.isObject(originalResponse)) {
|
||||
response = {};
|
||||
|
||||
if (qq.isString(originalResponse) && !successful) {
|
||||
response.error = originalResponse;
|
||||
}
|
||||
}
|
||||
|
||||
response.success = successful;
|
||||
|
||||
return response;
|
||||
},
|
||||
|
||||
now: function(id) {
|
||||
var name = options.getName(id);
|
||||
|
||||
if (!controller.isValid(id)) {
|
||||
throw new qq.Error(id + " is not a valid file ID to upload!");
|
||||
}
|
||||
|
||||
options.onUpload(id, name);
|
||||
|
||||
if (chunkingPossible && handler._shouldChunkThisFile(id)) {
|
||||
chunked.sendNext(id);
|
||||
}
|
||||
else {
|
||||
simple.send(id, name);
|
||||
}
|
||||
},
|
||||
|
||||
start: function(id) {
|
||||
var blobToUpload = upload.getProxyOrBlob(id);
|
||||
|
||||
if (blobToUpload) {
|
||||
return upload.maybeDefer(id, blobToUpload);
|
||||
}
|
||||
else {
|
||||
upload.now(id);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* Adds file or file input to the queue
|
||||
**/
|
||||
add: function(id, file) {
|
||||
handler.add.apply(this, arguments);
|
||||
},
|
||||
|
||||
/**
|
||||
* Sends the file identified by id
|
||||
*/
|
||||
upload: function(id) {
|
||||
if(connectionManager.open(id)) {
|
||||
return upload.start(id);
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
retry: function(id) {
|
||||
// On retry, if concurrent chunking has been enabled, we may have aborted all other in-progress chunks
|
||||
// for a file when encountering a failed chunk upload. We then signaled the controller to ignore
|
||||
// all failures associated with these aborts. We are now retrying, so we don't want to ignore
|
||||
// any more failures at this point.
|
||||
if (concurrentChunkingPossible) {
|
||||
handler._getFileState(id).temp.ignoreFailure = false;
|
||||
}
|
||||
|
||||
// If we are attempting to retry a file that is already consuming a connection, this is likely an auto-retry.
|
||||
// Just go ahead and ask the handler to upload again.
|
||||
if (connectionManager.isUsingConnection(id)) {
|
||||
return upload.start(id);
|
||||
}
|
||||
|
||||
// If we are attempting to retry a file that is not currently consuming a connection,
|
||||
// this is likely a manual retry attempt. We will need to ensure a connection is available
|
||||
// before the retry commences.
|
||||
else {
|
||||
return controller.upload(id);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Cancels file upload by id
|
||||
*/
|
||||
cancel: function(id) {
|
||||
var cancelRetVal = handler.cancel(id);
|
||||
|
||||
if (qq.isGenericPromise(cancelRetVal)) {
|
||||
cancelRetVal.then(function() {
|
||||
upload.cancel(id);
|
||||
});
|
||||
}
|
||||
else if (cancelRetVal !== false) {
|
||||
upload.cancel(id);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Cancels all queued or in-progress uploads
|
||||
*/
|
||||
cancelAll: function() {
|
||||
var waitingOrConnected = connectionManager.getWaitingOrConnected();
|
||||
|
||||
qq.each(waitingOrConnected, function(idx, fileId) {
|
||||
controller.cancel(fileId);
|
||||
});
|
||||
|
||||
connectionManager.reset();
|
||||
},
|
||||
|
||||
// Returns a File, Blob, or the Blob/File for the reference/parent file if the targeted blob is a proxy.
|
||||
// Undefined if no file record is available.
|
||||
getFile: function(id) {
|
||||
if (handler.getProxy && handler.getProxy(id)) {
|
||||
return handler.getProxy(id).referenceBlob;
|
||||
}
|
||||
|
||||
return handler.getFile && handler.getFile(id);
|
||||
},
|
||||
|
||||
// Returns true if the Blob associated with the ID is related to a proxy s
|
||||
isProxied: function(id) {
|
||||
return !!(handler.getProxy && handler.getProxy(id));
|
||||
},
|
||||
|
||||
getInput: function(id) {
|
||||
if (handler.getInput) {
|
||||
return handler.getInput(id);
|
||||
}
|
||||
},
|
||||
|
||||
reset: function() {
|
||||
log("Resetting upload handler");
|
||||
controller.cancelAll();
|
||||
connectionManager.reset();
|
||||
handler.reset();
|
||||
},
|
||||
|
||||
expunge: function(id) {
|
||||
if (controller.isValid(id)) {
|
||||
return handler.expunge(id);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Determine if the file exists.
|
||||
*/
|
||||
isValid: function(id) {
|
||||
return handler.isValid(id);
|
||||
},
|
||||
|
||||
getResumableFilesData: function() {
|
||||
if (handler.getResumableFilesData) {
|
||||
return handler.getResumableFilesData();
|
||||
}
|
||||
return [];
|
||||
},
|
||||
|
||||
/**
|
||||
* This may or may not be implemented, depending on the handler. For handlers where a third-party ID is
|
||||
* available (such as the "key" for Amazon S3), this will return that value. Otherwise, the return value
|
||||
* will be undefined.
|
||||
*
|
||||
* @param id Internal file ID
|
||||
* @returns {*} Some identifier used by a 3rd-party service involved in the upload process
|
||||
*/
|
||||
getThirdPartyFileId: function(id) {
|
||||
if (controller.isValid(id)) {
|
||||
return handler.getThirdPartyFileId(id);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Attempts to pause the associated upload if the specific handler supports this and the file is "valid".
|
||||
* @param id ID of the upload/file to pause
|
||||
* @returns {boolean} true if the upload was paused
|
||||
*/
|
||||
pause: function(id) {
|
||||
if (controller.isResumable(id) && handler.pause && controller.isValid(id) && handler.pause(id)) {
|
||||
connectionManager.free(id);
|
||||
handler.moveInProgressToRemaining(id);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
||||
// True if the file is eligible for pause/resume.
|
||||
isResumable: function(id) {
|
||||
return !!handler.isResumable && handler.isResumable(id);
|
||||
}
|
||||
});
|
||||
|
||||
qq.extend(options, o);
|
||||
log = options.log;
|
||||
chunkingPossible = options.chunking.enabled && qq.supportedFeatures.chunking;
|
||||
concurrentChunkingPossible = chunkingPossible && options.chunking.concurrent.enabled;
|
||||
|
||||
preventRetryResponse = (function() {
|
||||
var response = {};
|
||||
|
||||
response[options.preventRetryParam] = true;
|
||||
|
||||
return response;
|
||||
}());
|
||||
|
||||
upload.initHandler();
|
||||
};
|
@ -1,64 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Common upload handler functions.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
qq.UploadHandler = function(spec) {
|
||||
"use strict";
|
||||
|
||||
var proxy = spec.proxy,
|
||||
fileState = {},
|
||||
onCancel = proxy.onCancel,
|
||||
getName = proxy.getName;
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
add: function(id, fileItem) {
|
||||
fileState[id] = fileItem;
|
||||
fileState[id].temp = {};
|
||||
},
|
||||
|
||||
cancel: function(id) {
|
||||
var self = this,
|
||||
cancelFinalizationEffort = new qq.Promise(),
|
||||
onCancelRetVal = onCancel(id, getName(id), cancelFinalizationEffort);
|
||||
|
||||
onCancelRetVal.then(function() {
|
||||
if (self.isValid(id)) {
|
||||
fileState[id].canceled = true;
|
||||
self.expunge(id);
|
||||
}
|
||||
cancelFinalizationEffort.success();
|
||||
});
|
||||
},
|
||||
|
||||
expunge: function(id) {
|
||||
delete fileState[id];
|
||||
},
|
||||
|
||||
getThirdPartyFileId: function(id) {
|
||||
return fileState[id].key;
|
||||
},
|
||||
|
||||
isValid: function(id) {
|
||||
return fileState[id] !== undefined;
|
||||
},
|
||||
|
||||
reset: function() {
|
||||
fileState = {};
|
||||
},
|
||||
|
||||
_getFileState: function(id) {
|
||||
return fileState[id];
|
||||
},
|
||||
|
||||
_setThirdPartyFileId: function(id, thirdPartyFileId) {
|
||||
fileState[id].key = thirdPartyFileId;
|
||||
},
|
||||
|
||||
_wasCanceled: function(id) {
|
||||
return !!fileState[id].canceled;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,502 +0,0 @@
|
||||
/* globals qq */
|
||||
/**
|
||||
* Common API exposed to creators of XHR handlers. This is reused and possibly overriding in some cases by specific
|
||||
* XHR upload handlers.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
qq.XhrUploadHandler = function(spec) {
|
||||
"use strict";
|
||||
|
||||
var handler = this,
|
||||
namespace = spec.options.namespace,
|
||||
proxy = spec.proxy,
|
||||
chunking = spec.options.chunking,
|
||||
resume = spec.options.resume,
|
||||
chunkFiles = chunking && spec.options.chunking.enabled && qq.supportedFeatures.chunking,
|
||||
resumeEnabled = resume && spec.options.resume.enabled && chunkFiles && qq.supportedFeatures.resume,
|
||||
getName = proxy.getName,
|
||||
getSize = proxy.getSize,
|
||||
getUuid = proxy.getUuid,
|
||||
getEndpoint = proxy.getEndpoint,
|
||||
getDataByUuid = proxy.getDataByUuid,
|
||||
onUuidChanged = proxy.onUuidChanged,
|
||||
onProgress = proxy.onProgress,
|
||||
log = proxy.log;
|
||||
|
||||
|
||||
function abort(id) {
|
||||
qq.each(handler._getXhrs(id), function(xhrId, xhr) {
|
||||
var ajaxRequester = handler._getAjaxRequester(id, xhrId);
|
||||
|
||||
xhr.onreadystatechange = null;
|
||||
xhr.upload.onprogress = null;
|
||||
xhr.abort();
|
||||
ajaxRequester && ajaxRequester.canceled && ajaxRequester.canceled(id);
|
||||
});
|
||||
}
|
||||
|
||||
qq.extend(this, new qq.UploadHandler(spec));
|
||||
|
||||
qq.override(this, function(super_) {
|
||||
return {
|
||||
/**
|
||||
* Adds File or Blob to the queue
|
||||
**/
|
||||
add: function(id, blobOrProxy) {
|
||||
if (qq.isFile(blobOrProxy) || qq.isBlob(blobOrProxy)) {
|
||||
super_.add(id, {file: blobOrProxy});
|
||||
}
|
||||
else if (blobOrProxy instanceof qq.BlobProxy) {
|
||||
super_.add(id, {proxy: blobOrProxy});
|
||||
}
|
||||
else {
|
||||
throw new Error("Passed obj is not a File, Blob, or proxy");
|
||||
}
|
||||
|
||||
handler._initTempState(id);
|
||||
resumeEnabled && handler._maybePrepareForResume(id);
|
||||
},
|
||||
|
||||
expunge: function(id) {
|
||||
abort(id);
|
||||
handler._maybeDeletePersistedChunkData(id);
|
||||
handler._clearXhrs(id);
|
||||
super_.expunge(id);
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
qq.extend(this, {
|
||||
// Clear the cached chunk `Blob` after we are done with it, just in case the `Blob` bytes are stored in memory.
|
||||
clearCachedChunk: function(id, chunkIdx) {
|
||||
delete handler._getFileState(id).temp.cachedChunks[chunkIdx];
|
||||
},
|
||||
|
||||
clearXhr: function(id, chunkIdx) {
|
||||
var tempState = handler._getFileState(id).temp;
|
||||
|
||||
if (tempState.xhrs) {
|
||||
delete tempState.xhrs[chunkIdx];
|
||||
}
|
||||
if (tempState.ajaxRequesters) {
|
||||
delete tempState.ajaxRequesters[chunkIdx];
|
||||
}
|
||||
},
|
||||
|
||||
// Called when all chunks have been successfully uploaded. Expected promissory return type.
|
||||
// This defines the default behavior if nothing further is required when all chunks have been uploaded.
|
||||
finalizeChunks: function(id, responseParser) {
|
||||
var lastChunkIdx = handler._getTotalChunks(id) - 1,
|
||||
xhr = handler._getXhr(id, lastChunkIdx);
|
||||
|
||||
if (responseParser) {
|
||||
return new qq.Promise().success(responseParser(xhr), xhr);
|
||||
}
|
||||
|
||||
return new qq.Promise().success({}, xhr);
|
||||
},
|
||||
|
||||
getFile: function(id) {
|
||||
return handler.isValid(id) && handler._getFileState(id).file;
|
||||
},
|
||||
|
||||
getProxy: function(id) {
|
||||
return handler.isValid(id) && handler._getFileState(id).proxy;
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {Array} Array of objects containing properties useful to integrators
|
||||
* when it is important to determine which files are potentially resumable.
|
||||
*/
|
||||
getResumableFilesData: function() {
|
||||
var resumableFilesData = [];
|
||||
|
||||
handler._iterateResumeRecords(function(key, uploadData) {
|
||||
handler.moveInProgressToRemaining(null, uploadData.chunking.inProgress, uploadData.chunking.remaining);
|
||||
|
||||
var data = {
|
||||
name: uploadData.name,
|
||||
remaining: uploadData.chunking.remaining,
|
||||
size: uploadData.size,
|
||||
uuid: uploadData.uuid
|
||||
};
|
||||
|
||||
if (uploadData.key) {
|
||||
data.key = uploadData.key;
|
||||
}
|
||||
|
||||
resumableFilesData.push(data);
|
||||
});
|
||||
|
||||
return resumableFilesData;
|
||||
},
|
||||
|
||||
isResumable: function(id) {
|
||||
return !!chunking && handler.isValid(id) && !handler._getFileState(id).notResumable;
|
||||
},
|
||||
|
||||
moveInProgressToRemaining: function(id, opt_inProgress, opt_remaining) {
|
||||
var inProgress = opt_inProgress || handler._getFileState(id).chunking.inProgress,
|
||||
remaining = opt_remaining || handler._getFileState(id).chunking.remaining;
|
||||
|
||||
if (inProgress) {
|
||||
inProgress.reverse();
|
||||
qq.each(inProgress, function(idx, chunkIdx) {
|
||||
remaining.unshift(chunkIdx);
|
||||
});
|
||||
inProgress.length = 0;
|
||||
}
|
||||
},
|
||||
|
||||
pause: function(id) {
|
||||
if(handler.isValid(id)) {
|
||||
log(qq.format("Aborting XHR upload for {} '{}' due to pause instruction.", id, getName(id)));
|
||||
handler._getFileState(id).paused = true;
|
||||
abort(id);
|
||||
return true;
|
||||
}
|
||||
},
|
||||
|
||||
reevaluateChunking: function(id) {
|
||||
if (chunking && handler.isValid(id)) {
|
||||
var state = handler._getFileState(id),
|
||||
totalChunks;
|
||||
|
||||
delete state.chunking;
|
||||
|
||||
state.chunking = {};
|
||||
totalChunks = handler._getTotalChunks(id);
|
||||
if (totalChunks > 1) {
|
||||
state.chunking.enabled = true;
|
||||
state.chunking.parts = totalChunks;
|
||||
state.chunking.remaining = [];
|
||||
|
||||
for (var i = 0; i < totalChunks; i++) {
|
||||
state.chunking.remaining.push(i);
|
||||
}
|
||||
|
||||
handler._initTempState(id);
|
||||
}
|
||||
else {
|
||||
state.chunking.enabled = false;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
updateBlob: function(id, newBlob) {
|
||||
if (handler.isValid(id)) {
|
||||
handler._getFileState(id).file = newBlob;
|
||||
}
|
||||
},
|
||||
|
||||
_clearXhrs: function(id) {
|
||||
var tempState = handler._getFileState(id).temp;
|
||||
|
||||
qq.each(tempState.ajaxRequesters, function(chunkId) {
|
||||
delete tempState.ajaxRequesters[chunkId];
|
||||
});
|
||||
|
||||
qq.each(tempState.xhrs, function(chunkId) {
|
||||
delete tempState.xhrs[chunkId];
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates an XHR instance for this file and stores it in the fileState.
|
||||
*
|
||||
* @param id File ID
|
||||
* @param opt_chunkIdx The chunk index associated with this XHR, if applicable
|
||||
* @returns {XMLHttpRequest}
|
||||
*/
|
||||
_createXhr: function(id, opt_chunkIdx) {
|
||||
return handler._registerXhr(id, opt_chunkIdx, qq.createXhrInstance());
|
||||
},
|
||||
|
||||
_getAjaxRequester: function(id, opt_chunkIdx) {
|
||||
var chunkIdx = opt_chunkIdx == null ? -1 : opt_chunkIdx;
|
||||
return handler._getFileState(id).temp.ajaxRequesters[chunkIdx];
|
||||
},
|
||||
|
||||
_getChunkData: function(id, chunkIndex) {
|
||||
var chunkSize = chunking.partSize,
|
||||
fileSize = getSize(id),
|
||||
fileOrBlob = handler.getFile(id),
|
||||
startBytes = chunkSize * chunkIndex,
|
||||
endBytes = startBytes+chunkSize >= fileSize ? fileSize : startBytes+chunkSize,
|
||||
totalChunks = handler._getTotalChunks(id),
|
||||
cachedChunks = this._getFileState(id).temp.cachedChunks,
|
||||
|
||||
// To work around a Webkit GC bug, we must keep each chunk `Blob` in scope until we are done with it.
|
||||
// See https://github.com/Widen/fine-uploader/issues/937#issuecomment-41418760
|
||||
blob = cachedChunks[chunkIndex] || qq.sliceBlob(fileOrBlob, startBytes, endBytes);
|
||||
|
||||
cachedChunks[chunkIndex] = blob;
|
||||
|
||||
return {
|
||||
part: chunkIndex,
|
||||
start: startBytes,
|
||||
end: endBytes,
|
||||
count: totalChunks,
|
||||
blob: blob,
|
||||
size: endBytes - startBytes
|
||||
};
|
||||
},
|
||||
|
||||
_getChunkDataForCallback: function(chunkData) {
|
||||
return {
|
||||
partIndex: chunkData.part,
|
||||
startByte: chunkData.start + 1,
|
||||
endByte: chunkData.end,
|
||||
totalParts: chunkData.count
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* @param id File ID
|
||||
* @returns {string} Identifier for this item that may appear in the browser's local storage
|
||||
*/
|
||||
_getLocalStorageId: function(id) {
|
||||
var formatVersion = "5.0",
|
||||
name = getName(id),
|
||||
size = getSize(id),
|
||||
chunkSize = chunking.partSize,
|
||||
endpoint = getEndpoint(id);
|
||||
|
||||
return qq.format("qq{}resume{}-{}-{}-{}-{}", namespace, formatVersion, name, size, chunkSize, endpoint);
|
||||
},
|
||||
|
||||
_getMimeType: function(id) {
|
||||
return handler.getFile(id).type;
|
||||
},
|
||||
|
||||
_getPersistableData: function(id) {
|
||||
return handler._getFileState(id).chunking;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param id ID of the associated file
|
||||
* @returns {number} Number of parts this file can be divided into, or undefined if chunking is not supported in this UA
|
||||
*/
|
||||
_getTotalChunks: function(id) {
|
||||
if (chunking) {
|
||||
var fileSize = getSize(id),
|
||||
chunkSize = chunking.partSize;
|
||||
|
||||
return Math.ceil(fileSize / chunkSize);
|
||||
}
|
||||
},
|
||||
|
||||
_getXhr: function(id, opt_chunkIdx) {
|
||||
var chunkIdx = opt_chunkIdx == null ? -1 : opt_chunkIdx;
|
||||
return handler._getFileState(id).temp.xhrs[chunkIdx];
|
||||
},
|
||||
|
||||
_getXhrs: function(id) {
|
||||
return handler._getFileState(id).temp.xhrs;
|
||||
},
|
||||
|
||||
// Iterates through all XHR handler-created resume records (in local storage),
|
||||
// invoking the passed callback and passing in the key and value of each local storage record.
|
||||
_iterateResumeRecords: function(callback) {
|
||||
if (resumeEnabled) {
|
||||
qq.each(localStorage, function(key, item) {
|
||||
if (key.indexOf(qq.format("qq{}resume-", namespace)) === 0) {
|
||||
var uploadData = JSON.parse(item);
|
||||
callback(key, uploadData);
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
_initTempState: function(id) {
|
||||
handler._getFileState(id).temp = {
|
||||
ajaxRequesters: {},
|
||||
chunkProgress: {},
|
||||
xhrs: {},
|
||||
cachedChunks: {}
|
||||
};
|
||||
},
|
||||
|
||||
_markNotResumable: function(id) {
|
||||
handler._getFileState(id).notResumable = true;
|
||||
},
|
||||
|
||||
// Removes a chunked upload record from local storage, if possible.
|
||||
// Returns true if the item was removed, false otherwise.
|
||||
_maybeDeletePersistedChunkData: function(id) {
|
||||
var localStorageId;
|
||||
|
||||
if (resumeEnabled && handler.isResumable(id)) {
|
||||
localStorageId = handler._getLocalStorageId(id);
|
||||
|
||||
if (localStorageId && localStorage.getItem(localStorageId)) {
|
||||
localStorage.removeItem(localStorageId);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
// If this is a resumable upload, grab the relevant data from storage and items in memory that track this upload
|
||||
// so we can pick up from where we left off.
|
||||
_maybePrepareForResume: function(id) {
|
||||
var state = handler._getFileState(id),
|
||||
localStorageId, persistedData;
|
||||
|
||||
// Resume is enabled and possible and this is the first time we've tried to upload this file in this session,
|
||||
// so prepare for a resume attempt.
|
||||
if (resumeEnabled && state.key === undefined) {
|
||||
localStorageId = handler._getLocalStorageId(id);
|
||||
persistedData = localStorage.getItem(localStorageId);
|
||||
|
||||
// If we found this item in local storage, maybe we should resume it.
|
||||
if (persistedData) {
|
||||
persistedData = JSON.parse(persistedData);
|
||||
|
||||
// If we found a resume record but we have already handled this file in this session,
|
||||
// don't try to resume it & ensure we don't persist future check data
|
||||
if (getDataByUuid(persistedData.uuid)) {
|
||||
handler._markNotResumable(id);
|
||||
}
|
||||
else {
|
||||
log(qq.format("Identified file with ID {} and name of {} as resumable.", id, getName(id)));
|
||||
|
||||
onUuidChanged(id, persistedData.uuid);
|
||||
|
||||
state.key = persistedData.key;
|
||||
state.chunking = persistedData.chunking;
|
||||
state.loaded = persistedData.loaded;
|
||||
state.attemptingResume = true;
|
||||
|
||||
handler.moveInProgressToRemaining(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Persist any data needed to resume this upload in a new session.
|
||||
_maybePersistChunkedState: function(id) {
|
||||
var state = handler._getFileState(id),
|
||||
localStorageId, persistedData;
|
||||
|
||||
// If local storage isn't supported by the browser, or if resume isn't enabled or possible, give up
|
||||
if (resumeEnabled && handler.isResumable(id)) {
|
||||
localStorageId = handler._getLocalStorageId(id);
|
||||
|
||||
persistedData = {
|
||||
name: getName(id),
|
||||
size: getSize(id),
|
||||
uuid: getUuid(id),
|
||||
key: state.key,
|
||||
chunking: state.chunking,
|
||||
loaded: state.loaded,
|
||||
lastUpdated: Date.now()
|
||||
};
|
||||
|
||||
localStorage.setItem(localStorageId, JSON.stringify(persistedData));
|
||||
}
|
||||
},
|
||||
|
||||
_registerProgressHandler: function(id, chunkIdx, chunkSize) {
|
||||
var xhr = handler._getXhr(id, chunkIdx),
|
||||
progressCalculator = {
|
||||
simple: function(loaded, total) {
|
||||
var fileSize = getSize(id);
|
||||
|
||||
if (loaded === total) {
|
||||
onProgress(id, name, fileSize, fileSize);
|
||||
}
|
||||
else {
|
||||
onProgress(id, name, (loaded >= fileSize ? fileSize-1 : loaded), fileSize);
|
||||
}
|
||||
},
|
||||
|
||||
chunked: function(loaded, total) {
|
||||
var chunkProgress = handler._getFileState(id).temp.chunkProgress,
|
||||
totalSuccessfullyLoadedForFile = handler._getFileState(id).loaded,
|
||||
loadedForRequest = loaded,
|
||||
totalForRequest = total,
|
||||
totalFileSize = getSize(id),
|
||||
estActualChunkLoaded = loadedForRequest - (totalForRequest - chunkSize),
|
||||
totalLoadedForFile = totalSuccessfullyLoadedForFile;
|
||||
|
||||
chunkProgress[chunkIdx] = estActualChunkLoaded;
|
||||
|
||||
qq.each(chunkProgress, function(chunkIdx, chunkLoaded) {
|
||||
totalLoadedForFile += chunkLoaded;
|
||||
});
|
||||
|
||||
onProgress(id, name, totalLoadedForFile, totalFileSize);
|
||||
}
|
||||
};
|
||||
|
||||
xhr.upload.onprogress = function(e) {
|
||||
if (e.lengthComputable) {
|
||||
/* jshint eqnull: true */
|
||||
var type = chunkSize == null ? "simple" : "chunked";
|
||||
progressCalculator[type](e.loaded, e.total);
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Registers an XHR transport instance created elsewhere.
|
||||
*
|
||||
* @param id ID of the associated file
|
||||
* @param opt_chunkIdx The chunk index associated with this XHR, if applicable
|
||||
* @param xhr XMLHttpRequest object instance
|
||||
* @param opt_ajaxRequester `qq.AjaxRequester` associated with this request, if applicable.
|
||||
* @returns {XMLHttpRequest}
|
||||
*/
|
||||
_registerXhr: function(id, opt_chunkIdx, xhr, opt_ajaxRequester) {
|
||||
var xhrsId = opt_chunkIdx == null ? -1 : opt_chunkIdx,
|
||||
tempState = handler._getFileState(id).temp;
|
||||
|
||||
tempState.xhrs = tempState.xhrs || {};
|
||||
tempState.ajaxRequesters = tempState.ajaxRequesters || {};
|
||||
|
||||
tempState.xhrs[xhrsId] = xhr;
|
||||
|
||||
if (opt_ajaxRequester) {
|
||||
tempState.ajaxRequesters[xhrsId] = opt_ajaxRequester;
|
||||
}
|
||||
|
||||
return xhr;
|
||||
},
|
||||
|
||||
// Deletes any local storage records that are "expired".
|
||||
_removeExpiredChunkingRecords: function() {
|
||||
var expirationDays = resume.recordsExpireIn;
|
||||
|
||||
handler._iterateResumeRecords(function(key, uploadData) {
|
||||
var expirationDate = new Date(uploadData.lastUpdated);
|
||||
|
||||
// transform updated date into expiration date
|
||||
expirationDate.setDate(expirationDate.getDate() + expirationDays);
|
||||
|
||||
if (expirationDate.getTime() <= Date.now()) {
|
||||
log("Removing expired resume record with key " + key);
|
||||
localStorage.removeItem(key);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Determine if the associated file should be chunked.
|
||||
*
|
||||
* @param id ID of the associated file
|
||||
* @returns {*} true if chunking is enabled, possible, and the file can be split into more than 1 part
|
||||
*/
|
||||
_shouldChunkThisFile: function(id) {
|
||||
var state = handler._getFileState(id);
|
||||
|
||||
if (!state.chunking) {
|
||||
handler.reevaluateChunking(id);
|
||||
}
|
||||
|
||||
return state.chunking.enabled;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,696 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* Defines the public API for FineUploader mode.
|
||||
*/
|
||||
(function(){
|
||||
"use strict";
|
||||
|
||||
qq.uiPublicApi = {
|
||||
clearStoredFiles: function() {
|
||||
this._parent.prototype.clearStoredFiles.apply(this, arguments);
|
||||
this._templating.clearFiles();
|
||||
},
|
||||
|
||||
addExtraDropzone: function(element){
|
||||
this._dnd && this._dnd.setupExtraDropzone(element);
|
||||
},
|
||||
|
||||
removeExtraDropzone: function(element){
|
||||
if (this._dnd) {
|
||||
return this._dnd.removeDropzone(element);
|
||||
}
|
||||
},
|
||||
|
||||
getItemByFileId: function(id) {
|
||||
return this._templating.getFileContainer(id);
|
||||
},
|
||||
|
||||
reset: function() {
|
||||
this._parent.prototype.reset.apply(this, arguments);
|
||||
this._templating.reset();
|
||||
|
||||
if (!this._options.button && this._templating.getButton()) {
|
||||
this._defaultButtonId = this._createUploadButton({element: this._templating.getButton()}).getButtonId();
|
||||
}
|
||||
|
||||
if (this._dnd) {
|
||||
this._dnd.dispose();
|
||||
this._dnd = this._setupDragAndDrop();
|
||||
}
|
||||
|
||||
this._totalFilesInBatch = 0;
|
||||
this._filesInBatchAddedToUi = 0;
|
||||
|
||||
this._setupClickAndEditEventHandlers();
|
||||
},
|
||||
|
||||
setName: function(id, newName) {
|
||||
var formattedFilename = this._options.formatFileName(newName);
|
||||
|
||||
this._parent.prototype.setName.apply(this, arguments);
|
||||
this._templating.updateFilename(id, formattedFilename);
|
||||
},
|
||||
|
||||
pauseUpload: function(id) {
|
||||
var paused = this._parent.prototype.pauseUpload.apply(this, arguments);
|
||||
|
||||
paused && this._templating.uploadPaused(id);
|
||||
return paused;
|
||||
},
|
||||
|
||||
continueUpload: function(id) {
|
||||
var continued = this._parent.prototype.continueUpload.apply(this, arguments);
|
||||
|
||||
continued && this._templating.uploadContinued(id);
|
||||
return continued;
|
||||
},
|
||||
|
||||
getId: function(fileContainerOrChildEl) {
|
||||
return this._templating.getFileId(fileContainerOrChildEl);
|
||||
},
|
||||
|
||||
getDropTarget: function(fileId) {
|
||||
var file = this.getFile(fileId);
|
||||
|
||||
return file.qqDropTarget;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Defines the private (internal) API for FineUploader mode.
|
||||
*/
|
||||
qq.uiPrivateApi = {
|
||||
_getButton: function(buttonId) {
|
||||
var button = this._parent.prototype._getButton.apply(this, arguments);
|
||||
|
||||
if (!button) {
|
||||
if (buttonId === this._defaultButtonId) {
|
||||
button = this._templating.getButton();
|
||||
}
|
||||
}
|
||||
|
||||
return button;
|
||||
},
|
||||
|
||||
_removeFileItem: function(fileId) {
|
||||
this._templating.removeFile(fileId);
|
||||
},
|
||||
|
||||
_setupClickAndEditEventHandlers: function() {
|
||||
this._fileButtonsClickHandler = qq.FileButtonsClickHandler && this._bindFileButtonsClickEvent();
|
||||
|
||||
// A better approach would be to check specifically for focusin event support by querying the DOM API,
|
||||
// but the DOMFocusIn event is not exposed as a property, so we have to resort to UA string sniffing.
|
||||
this._focusinEventSupported = !qq.firefox();
|
||||
|
||||
if (this._isEditFilenameEnabled())
|
||||
{
|
||||
this._filenameClickHandler = this._bindFilenameClickEvent();
|
||||
this._filenameInputFocusInHandler = this._bindFilenameInputFocusInEvent();
|
||||
this._filenameInputFocusHandler = this._bindFilenameInputFocusEvent();
|
||||
}
|
||||
},
|
||||
|
||||
_setupDragAndDrop: function() {
|
||||
var self = this,
|
||||
dropZoneElements = this._options.dragAndDrop.extraDropzones,
|
||||
templating = this._templating,
|
||||
defaultDropZone = templating.getDropZone();
|
||||
|
||||
defaultDropZone && dropZoneElements.push(defaultDropZone);
|
||||
|
||||
return new qq.DragAndDrop({
|
||||
dropZoneElements: dropZoneElements,
|
||||
allowMultipleItems: this._options.multiple,
|
||||
classes: {
|
||||
dropActive: this._options.classes.dropActive
|
||||
},
|
||||
callbacks: {
|
||||
processingDroppedFiles: function() {
|
||||
templating.showDropProcessing();
|
||||
},
|
||||
processingDroppedFilesComplete: function(files, targetEl) {
|
||||
templating.hideDropProcessing();
|
||||
|
||||
qq.each(files, function(idx, file) {
|
||||
file.qqDropTarget = targetEl;
|
||||
});
|
||||
|
||||
if (files.length) {
|
||||
self.addFiles(files, null, null);
|
||||
}
|
||||
},
|
||||
dropError: function(code, errorData) {
|
||||
self._itemError(code, errorData);
|
||||
},
|
||||
dropLog: function(message, level) {
|
||||
self.log(message, level);
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_bindFileButtonsClickEvent: function() {
|
||||
var self = this;
|
||||
|
||||
return new qq.FileButtonsClickHandler({
|
||||
templating: this._templating,
|
||||
|
||||
log: function(message, lvl) {
|
||||
self.log(message, lvl);
|
||||
},
|
||||
|
||||
onDeleteFile: function(fileId) {
|
||||
self.deleteFile(fileId);
|
||||
},
|
||||
|
||||
onCancel: function(fileId) {
|
||||
self.cancel(fileId);
|
||||
},
|
||||
|
||||
onRetry: function(fileId) {
|
||||
qq(self._templating.getFileContainer(fileId)).removeClass(self._classes.retryable);
|
||||
self._templating.hideRetry(fileId);
|
||||
self.retry(fileId);
|
||||
},
|
||||
|
||||
onPause: function(fileId) {
|
||||
self.pauseUpload(fileId);
|
||||
},
|
||||
|
||||
onContinue: function(fileId) {
|
||||
self.continueUpload(fileId);
|
||||
},
|
||||
|
||||
onGetName: function(fileId) {
|
||||
return self.getName(fileId);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_isEditFilenameEnabled: function() {
|
||||
/*jshint -W014 */
|
||||
return this._templating.isEditFilenamePossible()
|
||||
&& !this._options.autoUpload
|
||||
&& qq.FilenameClickHandler
|
||||
&& qq.FilenameInputFocusHandler
|
||||
&& qq.FilenameInputFocusHandler;
|
||||
},
|
||||
|
||||
_filenameEditHandler: function() {
|
||||
var self = this,
|
||||
templating = this._templating;
|
||||
|
||||
return {
|
||||
templating: templating,
|
||||
log: function(message, lvl) {
|
||||
self.log(message, lvl);
|
||||
},
|
||||
onGetUploadStatus: function(fileId) {
|
||||
return self.getUploads({id: fileId}).status;
|
||||
},
|
||||
onGetName: function(fileId) {
|
||||
return self.getName(fileId);
|
||||
},
|
||||
onSetName: function(id, newName) {
|
||||
self.setName(id, newName);
|
||||
},
|
||||
onEditingStatusChange: function(id, isEditing) {
|
||||
var qqInput = qq(templating.getEditInput(id)),
|
||||
qqFileContainer = qq(templating.getFileContainer(id));
|
||||
|
||||
if (isEditing) {
|
||||
qqInput.addClass("qq-editing");
|
||||
templating.hideFilename(id);
|
||||
templating.hideEditIcon(id);
|
||||
}
|
||||
else {
|
||||
qqInput.removeClass("qq-editing");
|
||||
templating.showFilename(id);
|
||||
templating.showEditIcon(id);
|
||||
}
|
||||
|
||||
// Force IE8 and older to repaint
|
||||
qqFileContainer.addClass("qq-temp").removeClass("qq-temp");
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
_onUploadStatusChange: function(id, oldStatus, newStatus) {
|
||||
this._parent.prototype._onUploadStatusChange.apply(this, arguments);
|
||||
|
||||
if (this._isEditFilenameEnabled()) {
|
||||
// Status for a file exists before it has been added to the DOM, so we must be careful here.
|
||||
if (this._templating.getFileContainer(id) && newStatus !== qq.status.SUBMITTED) {
|
||||
this._templating.markFilenameEditable(id);
|
||||
this._templating.hideEditIcon(id);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
_bindFilenameInputFocusInEvent: function() {
|
||||
var spec = qq.extend({}, this._filenameEditHandler());
|
||||
|
||||
return new qq.FilenameInputFocusInHandler(spec);
|
||||
},
|
||||
|
||||
_bindFilenameInputFocusEvent: function() {
|
||||
var spec = qq.extend({}, this._filenameEditHandler());
|
||||
|
||||
return new qq.FilenameInputFocusHandler(spec);
|
||||
},
|
||||
|
||||
_bindFilenameClickEvent: function() {
|
||||
var spec = qq.extend({}, this._filenameEditHandler());
|
||||
|
||||
return new qq.FilenameClickHandler(spec);
|
||||
},
|
||||
|
||||
_storeForLater: function(id) {
|
||||
this._parent.prototype._storeForLater.apply(this, arguments);
|
||||
this._templating.hideSpinner(id);
|
||||
},
|
||||
|
||||
_onAllComplete: function(successful, failed) {
|
||||
this._parent.prototype._onAllComplete.apply(this, arguments);
|
||||
this._templating.resetTotalProgress();
|
||||
},
|
||||
|
||||
_onSubmit: function(id, name) {
|
||||
var file = this.getFile(id);
|
||||
|
||||
if (file && file.qqPath && this._options.dragAndDrop.reportDirectoryPaths) {
|
||||
this._paramsStore.addReadOnly(id, {
|
||||
qqpath: file.qqPath
|
||||
});
|
||||
}
|
||||
|
||||
this._parent.prototype._onSubmit.apply(this, arguments);
|
||||
this._addToList(id, name);
|
||||
},
|
||||
|
||||
// The file item has been added to the DOM.
|
||||
_onSubmitted: function(id) {
|
||||
// If the edit filename feature is enabled, mark the filename element as "editable" and the associated edit icon
|
||||
if (this._isEditFilenameEnabled()) {
|
||||
this._templating.markFilenameEditable(id);
|
||||
this._templating.showEditIcon(id);
|
||||
|
||||
// If the focusin event is not supported, we must add a focus handler to the newly create edit filename text input
|
||||
if (!this._focusinEventSupported) {
|
||||
this._filenameInputFocusHandler.addHandler(this._templating.getEditInput(id));
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Update the progress bar & percentage as the file is uploaded
|
||||
_onProgress: function(id, name, loaded, total){
|
||||
this._parent.prototype._onProgress.apply(this, arguments);
|
||||
|
||||
this._templating.updateProgress(id, loaded, total);
|
||||
|
||||
if (Math.round(loaded / total * 100) === 100) {
|
||||
this._templating.hideCancel(id);
|
||||
this._templating.hidePause(id);
|
||||
this._templating.hideProgress(id);
|
||||
this._templating.setStatusText(id, this._options.text.waitingForResponse);
|
||||
|
||||
// If ~last byte was sent, display total file size
|
||||
this._displayFileSize(id);
|
||||
}
|
||||
else {
|
||||
// If still uploading, display percentage - total size is actually the total request(s) size
|
||||
this._displayFileSize(id, loaded, total);
|
||||
}
|
||||
},
|
||||
|
||||
_onTotalProgress: function(loaded, total) {
|
||||
this._parent.prototype._onTotalProgress.apply(this, arguments);
|
||||
this._templating.updateTotalProgress(loaded, total);
|
||||
},
|
||||
|
||||
_onComplete: function(id, name, result, xhr) {
|
||||
var parentRetVal = this._parent.prototype._onComplete.apply(this, arguments),
|
||||
templating = this._templating,
|
||||
fileContainer = templating.getFileContainer(id),
|
||||
self = this;
|
||||
|
||||
function completeUpload(result) {
|
||||
// If this file is not represented in the templating module, perhaps it was hidden intentionally.
|
||||
// If so, don't perform any UI-related tasks related to this file.
|
||||
if (!fileContainer) {
|
||||
return;
|
||||
}
|
||||
|
||||
templating.setStatusText(id);
|
||||
|
||||
qq(fileContainer).removeClass(self._classes.retrying);
|
||||
templating.hideProgress(id);
|
||||
|
||||
if (!self._options.disableCancelForFormUploads || qq.supportedFeatures.ajaxUploading) {
|
||||
templating.hideCancel(id);
|
||||
}
|
||||
templating.hideSpinner(id);
|
||||
|
||||
if (result.success) {
|
||||
self._markFileAsSuccessful(id);
|
||||
}
|
||||
else {
|
||||
qq(fileContainer).addClass(self._classes.fail);
|
||||
|
||||
if (templating.isRetryPossible() && !self._preventRetries[id]) {
|
||||
qq(fileContainer).addClass(self._classes.retryable);
|
||||
templating.showRetry(id);
|
||||
}
|
||||
self._controlFailureTextDisplay(id, result);
|
||||
}
|
||||
}
|
||||
|
||||
// The parent may need to perform some async operation before we can accurately determine the status of the upload.
|
||||
if (parentRetVal instanceof qq.Promise) {
|
||||
parentRetVal.done(function(newResult) {
|
||||
completeUpload(newResult);
|
||||
});
|
||||
|
||||
}
|
||||
else {
|
||||
completeUpload(result);
|
||||
}
|
||||
|
||||
return parentRetVal;
|
||||
},
|
||||
|
||||
_markFileAsSuccessful: function(id) {
|
||||
var templating = this._templating;
|
||||
|
||||
if (this._isDeletePossible()) {
|
||||
templating.showDeleteButton(id);
|
||||
}
|
||||
|
||||
qq(templating.getFileContainer(id)).addClass(this._classes.success);
|
||||
|
||||
this._maybeUpdateThumbnail(id);
|
||||
},
|
||||
|
||||
_onUploadPrep: function(id) {
|
||||
this._parent.prototype._onUploadPrep.apply(this, arguments);
|
||||
this._templating.showSpinner(id);
|
||||
},
|
||||
|
||||
_onUpload: function(id, name){
|
||||
var parentRetVal = this._parent.prototype._onUpload.apply(this, arguments);
|
||||
|
||||
this._templating.showSpinner(id);
|
||||
|
||||
return parentRetVal;
|
||||
},
|
||||
|
||||
_onUploadChunk: function(id, chunkData) {
|
||||
this._parent.prototype._onUploadChunk.apply(this, arguments);
|
||||
|
||||
// Only display the pause button if we have finished uploading at least one chunk
|
||||
// & this file can be resumed
|
||||
if (chunkData.partIndex > 0 && this._handler.isResumable(id)) {
|
||||
this._templating.allowPause(id);
|
||||
}
|
||||
},
|
||||
|
||||
_onCancel: function(id, name) {
|
||||
this._parent.prototype._onCancel.apply(this, arguments);
|
||||
this._removeFileItem(id);
|
||||
|
||||
if (this._getNotFinished() === 0) {
|
||||
this._templating.resetTotalProgress();
|
||||
}
|
||||
},
|
||||
|
||||
_onBeforeAutoRetry: function(id) {
|
||||
var retryNumForDisplay, maxAuto, retryNote;
|
||||
|
||||
this._parent.prototype._onBeforeAutoRetry.apply(this, arguments);
|
||||
|
||||
this._showCancelLink(id);
|
||||
|
||||
if (this._options.retry.showAutoRetryNote) {
|
||||
retryNumForDisplay = this._autoRetries[id];
|
||||
maxAuto = this._options.retry.maxAutoAttempts;
|
||||
|
||||
retryNote = this._options.retry.autoRetryNote.replace(/\{retryNum\}/g, retryNumForDisplay);
|
||||
retryNote = retryNote.replace(/\{maxAuto\}/g, maxAuto);
|
||||
|
||||
this._templating.setStatusText(id, retryNote);
|
||||
qq(this._templating.getFileContainer(id)).addClass(this._classes.retrying);
|
||||
}
|
||||
},
|
||||
|
||||
//return false if we should not attempt the requested retry
|
||||
_onBeforeManualRetry: function(id) {
|
||||
if (this._parent.prototype._onBeforeManualRetry.apply(this, arguments)) {
|
||||
this._templating.resetProgress(id);
|
||||
qq(this._templating.getFileContainer(id)).removeClass(this._classes.fail);
|
||||
this._templating.setStatusText(id);
|
||||
this._templating.showSpinner(id);
|
||||
this._showCancelLink(id);
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
qq(this._templating.getFileContainer(id)).addClass(this._classes.retryable);
|
||||
this._templating.showRetry(id);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
_onSubmitDelete: function(id) {
|
||||
var onSuccessCallback = qq.bind(this._onSubmitDeleteSuccess, this);
|
||||
|
||||
this._parent.prototype._onSubmitDelete.call(this, id, onSuccessCallback);
|
||||
},
|
||||
|
||||
_onSubmitDeleteSuccess: function(id, uuid, additionalMandatedParams) {
|
||||
if (this._options.deleteFile.forceConfirm) {
|
||||
this._showDeleteConfirm.apply(this, arguments);
|
||||
}
|
||||
else {
|
||||
this._sendDeleteRequest.apply(this, arguments);
|
||||
}
|
||||
},
|
||||
|
||||
_onDeleteComplete: function(id, xhr, isError) {
|
||||
this._parent.prototype._onDeleteComplete.apply(this, arguments);
|
||||
|
||||
this._templating.hideSpinner(id);
|
||||
|
||||
if (isError) {
|
||||
this._templating.setStatusText(id, this._options.deleteFile.deletingFailedText);
|
||||
this._templating.showDeleteButton(id);
|
||||
}
|
||||
else {
|
||||
this._removeFileItem(id);
|
||||
}
|
||||
},
|
||||
|
||||
_sendDeleteRequest: function(id, uuid, additionalMandatedParams) {
|
||||
this._templating.hideDeleteButton(id);
|
||||
this._templating.showSpinner(id);
|
||||
this._templating.setStatusText(id, this._options.deleteFile.deletingStatusText);
|
||||
this._deleteHandler.sendDelete.apply(this, arguments);
|
||||
},
|
||||
|
||||
_showDeleteConfirm: function(id, uuid, mandatedParams) {
|
||||
/*jshint -W004 */
|
||||
var fileName = this.getName(id),
|
||||
confirmMessage = this._options.deleteFile.confirmMessage.replace(/\{filename\}/g, fileName),
|
||||
uuid = this.getUuid(id),
|
||||
deleteRequestArgs = arguments,
|
||||
self = this,
|
||||
retVal;
|
||||
|
||||
retVal = this._options.showConfirm(confirmMessage);
|
||||
|
||||
if (qq.isGenericPromise(retVal)) {
|
||||
retVal.then(function () {
|
||||
self._sendDeleteRequest.apply(self, deleteRequestArgs);
|
||||
});
|
||||
}
|
||||
else if (retVal !== false) {
|
||||
self._sendDeleteRequest.apply(self, deleteRequestArgs);
|
||||
}
|
||||
},
|
||||
|
||||
_addToList: function(id, name, canned) {
|
||||
var prependData,
|
||||
prependIndex = 0,
|
||||
dontDisplay = this._handler.isProxied(id) && this._options.scaling.hideScaled;
|
||||
|
||||
// If we don't want this file to appear in the UI, skip all of this UI-related logic.
|
||||
if (dontDisplay) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this._options.display.prependFiles) {
|
||||
if (this._totalFilesInBatch > 1 && this._filesInBatchAddedToUi > 0) {
|
||||
prependIndex = this._filesInBatchAddedToUi - 1;
|
||||
}
|
||||
|
||||
prependData = {
|
||||
index: prependIndex
|
||||
};
|
||||
}
|
||||
|
||||
if (!canned) {
|
||||
if (this._options.disableCancelForFormUploads && !qq.supportedFeatures.ajaxUploading) {
|
||||
this._templating.disableCancel();
|
||||
}
|
||||
|
||||
if (!this._options.multiple) {
|
||||
this._handler.cancelAll();
|
||||
this._clearList();
|
||||
}
|
||||
}
|
||||
|
||||
this._templating.addFile(id, this._options.formatFileName(name), prependData);
|
||||
|
||||
if (canned) {
|
||||
this._thumbnailUrls[id] && this._templating.updateThumbnail(id, this._thumbnailUrls[id], true);
|
||||
}
|
||||
else {
|
||||
this._templating.generatePreview(id, this.getFile(id));
|
||||
}
|
||||
|
||||
this._filesInBatchAddedToUi += 1;
|
||||
|
||||
if (canned ||
|
||||
(this._options.display.fileSizeOnSubmit && qq.supportedFeatures.ajaxUploading)) {
|
||||
|
||||
this._displayFileSize(id);
|
||||
}
|
||||
},
|
||||
|
||||
_clearList: function(){
|
||||
this._templating.clearFiles();
|
||||
this.clearStoredFiles();
|
||||
},
|
||||
|
||||
_displayFileSize: function(id, loadedSize, totalSize) {
|
||||
var size = this.getSize(id),
|
||||
sizeForDisplay = this._formatSize(size);
|
||||
|
||||
if (size >= 0) {
|
||||
if (loadedSize !== undefined && totalSize !== undefined) {
|
||||
sizeForDisplay = this._formatProgress(loadedSize, totalSize);
|
||||
}
|
||||
|
||||
this._templating.updateSize(id, sizeForDisplay);
|
||||
}
|
||||
},
|
||||
|
||||
_formatProgress: function (uploadedSize, totalSize) {
|
||||
var message = this._options.text.formatProgress;
|
||||
function r(name, replacement) { message = message.replace(name, replacement); }
|
||||
|
||||
r("{percent}", Math.round(uploadedSize / totalSize * 100));
|
||||
r("{total_size}", this._formatSize(totalSize));
|
||||
return message;
|
||||
},
|
||||
|
||||
_controlFailureTextDisplay: function(id, response) {
|
||||
var mode, maxChars, responseProperty, failureReason, shortFailureReason;
|
||||
|
||||
mode = this._options.failedUploadTextDisplay.mode;
|
||||
maxChars = this._options.failedUploadTextDisplay.maxChars;
|
||||
responseProperty = this._options.failedUploadTextDisplay.responseProperty;
|
||||
|
||||
if (mode === "custom") {
|
||||
failureReason = response[responseProperty];
|
||||
if (failureReason) {
|
||||
if (failureReason.length > maxChars) {
|
||||
shortFailureReason = failureReason.substring(0, maxChars) + "...";
|
||||
}
|
||||
}
|
||||
else {
|
||||
failureReason = this._options.text.failUpload;
|
||||
}
|
||||
|
||||
this._templating.setStatusText(id, shortFailureReason || failureReason);
|
||||
|
||||
if (this._options.failedUploadTextDisplay.enableTooltip) {
|
||||
this._showTooltip(id, failureReason);
|
||||
}
|
||||
}
|
||||
else if (mode === "default") {
|
||||
this._templating.setStatusText(id, this._options.text.failUpload);
|
||||
}
|
||||
else if (mode !== "none") {
|
||||
this.log("failedUploadTextDisplay.mode value of '" + mode + "' is not valid", "warn");
|
||||
}
|
||||
},
|
||||
|
||||
_showTooltip: function(id, text) {
|
||||
this._templating.getFileContainer(id).title = text;
|
||||
},
|
||||
|
||||
_showCancelLink: function(id) {
|
||||
if (!this._options.disableCancelForFormUploads || qq.supportedFeatures.ajaxUploading) {
|
||||
this._templating.showCancel(id);
|
||||
}
|
||||
},
|
||||
|
||||
_itemError: function(code, name, item) {
|
||||
var message = this._parent.prototype._itemError.apply(this, arguments);
|
||||
this._options.showMessage(message);
|
||||
},
|
||||
|
||||
_batchError: function(message) {
|
||||
this._parent.prototype._batchError.apply(this, arguments);
|
||||
this._options.showMessage(message);
|
||||
},
|
||||
|
||||
_setupPastePrompt: function() {
|
||||
var self = this;
|
||||
|
||||
this._options.callbacks.onPasteReceived = function() {
|
||||
var message = self._options.paste.namePromptMessage,
|
||||
defaultVal = self._options.paste.defaultName;
|
||||
|
||||
return self._options.showPrompt(message, defaultVal);
|
||||
};
|
||||
},
|
||||
|
||||
_fileOrBlobRejected: function(id, name) {
|
||||
this._totalFilesInBatch -= 1;
|
||||
this._parent.prototype._fileOrBlobRejected.apply(this, arguments);
|
||||
},
|
||||
|
||||
_prepareItemsForUpload: function(items, params, endpoint) {
|
||||
this._totalFilesInBatch = items.length;
|
||||
this._filesInBatchAddedToUi = 0;
|
||||
this._parent.prototype._prepareItemsForUpload.apply(this, arguments);
|
||||
},
|
||||
|
||||
_maybeUpdateThumbnail: function(fileId) {
|
||||
var thumbnailUrl = this._thumbnailUrls[fileId];
|
||||
|
||||
this._templating.updateThumbnail(fileId, thumbnailUrl);
|
||||
},
|
||||
|
||||
_addCannedFile: function(sessionData) {
|
||||
var id = this._parent.prototype._addCannedFile.apply(this, arguments);
|
||||
|
||||
this._addToList(id, this.getName(id), true);
|
||||
this._templating.hideSpinner(id);
|
||||
this._templating.hideCancel(id);
|
||||
this._markFileAsSuccessful(id);
|
||||
|
||||
return id;
|
||||
},
|
||||
|
||||
_setSize: function(id, newSize) {
|
||||
this._parent.prototype._setSize.apply(this, arguments);
|
||||
|
||||
this._templating.updateSize(id, this._formatSize(newSize));
|
||||
}
|
||||
};
|
||||
}());
|
@ -1,296 +0,0 @@
|
||||
/*globals qq*/
|
||||
(function(){
|
||||
"use strict";
|
||||
|
||||
qq.FineUploaderBasic = function(o) {
|
||||
var self = this;
|
||||
|
||||
// These options define FineUploaderBasic mode.
|
||||
this._options = {
|
||||
debug: false,
|
||||
button: null,
|
||||
multiple: true,
|
||||
maxConnections: 3,
|
||||
disableCancelForFormUploads: false,
|
||||
autoUpload: true,
|
||||
|
||||
request: {
|
||||
endpoint: "/server/upload",
|
||||
params: {},
|
||||
paramsInBody: true,
|
||||
customHeaders: {},
|
||||
forceMultipart: true,
|
||||
inputName: "qqfile",
|
||||
uuidName: "qquuid",
|
||||
totalFileSizeName: "qqtotalfilesize",
|
||||
filenameParam: "qqfilename"
|
||||
},
|
||||
|
||||
validation: {
|
||||
allowedExtensions: [],
|
||||
sizeLimit: 0,
|
||||
minSizeLimit: 0,
|
||||
itemLimit: 0,
|
||||
stopOnFirstInvalidFile: true,
|
||||
acceptFiles: null,
|
||||
image: {
|
||||
maxHeight: 0,
|
||||
maxWidth: 0,
|
||||
minHeight: 0,
|
||||
minWidth: 0
|
||||
}
|
||||
},
|
||||
|
||||
callbacks: {
|
||||
onSubmit: function(id, name){},
|
||||
onSubmitted: function(id, name){},
|
||||
onComplete: function(id, name, responseJSON, maybeXhr){},
|
||||
onAllComplete: function(successful, failed) {},
|
||||
onCancel: function(id, name){},
|
||||
onUpload: function(id, name){},
|
||||
onUploadChunk: function(id, name, chunkData){},
|
||||
onUploadChunkSuccess: function(id, chunkData, responseJSON, xhr){},
|
||||
onResume: function(id, fileName, chunkData){},
|
||||
onProgress: function(id, name, loaded, total){},
|
||||
onTotalProgress: function(loaded, total){},
|
||||
onError: function(id, name, reason, maybeXhrOrXdr) {},
|
||||
onAutoRetry: function(id, name, attemptNumber) {},
|
||||
onManualRetry: function(id, name) {},
|
||||
onValidateBatch: function(fileOrBlobData) {},
|
||||
onValidate: function(fileOrBlobData) {},
|
||||
onSubmitDelete: function(id) {},
|
||||
onDelete: function(id){},
|
||||
onDeleteComplete: function(id, xhrOrXdr, isError){},
|
||||
onPasteReceived: function(blob) {},
|
||||
onStatusChange: function(id, oldStatus, newStatus) {},
|
||||
onSessionRequestComplete: function(response, success, xhrOrXdr) {}
|
||||
},
|
||||
|
||||
messages: {
|
||||
typeError: "{file} has an invalid extension. Valid extension(s): {extensions}.",
|
||||
sizeError: "{file} is too large, maximum file size is {sizeLimit}.",
|
||||
minSizeError: "{file} is too small, minimum file size is {minSizeLimit}.",
|
||||
emptyError: "{file} is empty, please select files again without it.",
|
||||
noFilesError: "No files to upload.",
|
||||
tooManyItemsError: "Too many items ({netItems}) would be uploaded. Item limit is {itemLimit}.",
|
||||
maxHeightImageError: "Image is too tall.",
|
||||
maxWidthImageError: "Image is too wide.",
|
||||
minHeightImageError: "Image is not tall enough.",
|
||||
minWidthImageError: "Image is not wide enough.",
|
||||
retryFailTooManyItems: "Retry failed - you have reached your file limit.",
|
||||
onLeave: "The files are being uploaded, if you leave now the upload will be canceled."
|
||||
},
|
||||
|
||||
retry: {
|
||||
enableAuto: false,
|
||||
maxAutoAttempts: 3,
|
||||
autoAttemptDelay: 5,
|
||||
preventRetryResponseProperty: "preventRetry"
|
||||
},
|
||||
|
||||
classes: {
|
||||
buttonHover: "qq-upload-button-hover",
|
||||
buttonFocus: "qq-upload-button-focus"
|
||||
},
|
||||
|
||||
chunking: {
|
||||
enabled: false,
|
||||
concurrent: {
|
||||
enabled: false
|
||||
},
|
||||
paramNames: {
|
||||
partIndex: "qqpartindex",
|
||||
partByteOffset: "qqpartbyteoffset",
|
||||
chunkSize: "qqchunksize",
|
||||
totalFileSize: "qqtotalfilesize",
|
||||
totalParts: "qqtotalparts"
|
||||
},
|
||||
partSize: 2000000,
|
||||
// only relevant for traditional endpoints, only required when concurrent.enabled === true
|
||||
success: {
|
||||
endpoint: null
|
||||
}
|
||||
},
|
||||
|
||||
resume: {
|
||||
enabled: false,
|
||||
recordsExpireIn: 7, //days
|
||||
paramNames: {
|
||||
resuming: "qqresume"
|
||||
}
|
||||
},
|
||||
|
||||
formatFileName: function(fileOrBlobName) {
|
||||
if (fileOrBlobName !== undefined && fileOrBlobName.length > 33) {
|
||||
fileOrBlobName = fileOrBlobName.slice(0, 19) + "..." + fileOrBlobName.slice(-14);
|
||||
}
|
||||
return fileOrBlobName;
|
||||
},
|
||||
|
||||
text: {
|
||||
defaultResponseError: "Upload failure reason unknown",
|
||||
sizeSymbols: ["kB", "MB", "GB", "TB", "PB", "EB"]
|
||||
},
|
||||
|
||||
deleteFile : {
|
||||
enabled: false,
|
||||
method: "DELETE",
|
||||
endpoint: "/server/upload",
|
||||
customHeaders: {},
|
||||
params: {}
|
||||
},
|
||||
|
||||
cors: {
|
||||
expected: false,
|
||||
sendCredentials: false,
|
||||
allowXdr: false
|
||||
},
|
||||
|
||||
blobs: {
|
||||
defaultName: "misc_data"
|
||||
},
|
||||
|
||||
paste: {
|
||||
targetElement: null,
|
||||
defaultName: "pasted_image"
|
||||
},
|
||||
|
||||
camera: {
|
||||
ios: false,
|
||||
|
||||
// if ios is true: button is null means target the default button, otherwise target the button specified
|
||||
button: null
|
||||
},
|
||||
|
||||
// This refers to additional upload buttons to be handled by Fine Uploader.
|
||||
// Each element is an object, containing `element` as the only required
|
||||
// property. The `element` must be a container that will ultimately
|
||||
// contain an invisible `<input type="file">` created by Fine Uploader.
|
||||
// Optional properties of each object include `multiple`, `validation`,
|
||||
// and `folders`.
|
||||
extraButtons: [],
|
||||
|
||||
// Depends on the session module. Used to query the server for an initial file list
|
||||
// during initialization and optionally after a `reset`.
|
||||
session: {
|
||||
endpoint: null,
|
||||
params: {},
|
||||
customHeaders: {},
|
||||
refreshOnReset: true
|
||||
},
|
||||
|
||||
// Send parameters associated with an existing form along with the files
|
||||
form: {
|
||||
// Element ID, HTMLElement, or null
|
||||
element: "qq-form",
|
||||
|
||||
// Overrides the base `autoUpload`, unless `element` is null.
|
||||
autoUpload: false,
|
||||
|
||||
// true = upload files on form submission (and squelch submit event)
|
||||
interceptSubmit: true
|
||||
},
|
||||
|
||||
// scale images client side, upload a new file for each scaled version
|
||||
scaling: {
|
||||
// send the original file as well
|
||||
sendOriginal: true,
|
||||
|
||||
// fox orientation for scaled images
|
||||
orient: true,
|
||||
|
||||
// If null, scaled image type will match reference image type. This value will be referred to
|
||||
// for any size record that does not specific a type.
|
||||
defaultType: null,
|
||||
|
||||
defaultQuality: 80,
|
||||
|
||||
failureText: "Failed to scale",
|
||||
|
||||
includeExif: false,
|
||||
|
||||
// metadata about each requested scaled version
|
||||
sizes: []
|
||||
}
|
||||
};
|
||||
|
||||
// Replace any default options with user defined ones
|
||||
qq.extend(this._options, o, true);
|
||||
|
||||
this._buttons = [];
|
||||
this._extraButtonSpecs = {};
|
||||
this._buttonIdsForFileIds = [];
|
||||
|
||||
this._wrapCallbacks();
|
||||
this._disposeSupport = new qq.DisposeSupport();
|
||||
|
||||
this._storedIds = [];
|
||||
this._autoRetries = [];
|
||||
this._retryTimeouts = [];
|
||||
this._preventRetries = [];
|
||||
this._thumbnailUrls = [];
|
||||
|
||||
this._netUploadedOrQueued = 0;
|
||||
this._netUploaded = 0;
|
||||
this._uploadData = this._createUploadDataTracker();
|
||||
|
||||
this._initFormSupportAndParams();
|
||||
|
||||
this._customHeadersStore = this._createStore(this._options.request.customHeaders);
|
||||
this._deleteFileCustomHeadersStore = this._createStore(this._options.deleteFile.customHeaders);
|
||||
|
||||
this._deleteFileParamsStore = this._createStore(this._options.deleteFile.params);
|
||||
|
||||
this._endpointStore = this._createStore(this._options.request.endpoint);
|
||||
this._deleteFileEndpointStore = this._createStore(this._options.deleteFile.endpoint);
|
||||
|
||||
this._handler = this._createUploadHandler();
|
||||
|
||||
this._deleteHandler = qq.DeleteFileAjaxRequester && this._createDeleteHandler();
|
||||
|
||||
if (this._options.button) {
|
||||
this._defaultButtonId = this._createUploadButton({element: this._options.button}).getButtonId();
|
||||
}
|
||||
|
||||
this._generateExtraButtonSpecs();
|
||||
|
||||
this._handleCameraAccess();
|
||||
|
||||
if (this._options.paste.targetElement) {
|
||||
if (qq.PasteSupport) {
|
||||
this._pasteHandler = this._createPasteHandler();
|
||||
}
|
||||
else {
|
||||
this.log("Paste support module not found", "error");
|
||||
}
|
||||
}
|
||||
|
||||
this._preventLeaveInProgress();
|
||||
|
||||
this._imageGenerator = qq.ImageGenerator && new qq.ImageGenerator(qq.bind(this.log, this));
|
||||
this._refreshSessionData();
|
||||
|
||||
this._succeededSinceLastAllComplete = [];
|
||||
this._failedSinceLastAllComplete = [];
|
||||
|
||||
this._scaler = (qq.Scaler && new qq.Scaler(this._options.scaling, qq.bind(this.log, this))) || {};
|
||||
if (this._scaler.enabled) {
|
||||
this._customNewFileHandler = qq.bind(this._scaler.handleNewFile, this._scaler);
|
||||
}
|
||||
|
||||
if (qq.TotalProgress && qq.supportedFeatures.progressBar) {
|
||||
this._totalProgress = new qq.TotalProgress(
|
||||
qq.bind(this._onTotalProgress, this),
|
||||
|
||||
function(id) {
|
||||
var entry = self._uploadData.retrieve({id: id});
|
||||
return (entry && entry.size) || 0;
|
||||
}
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
// Define the private & public API methods.
|
||||
qq.FineUploaderBasic.prototype = qq.basePublicApi;
|
||||
qq.extend(qq.FineUploaderBasic.prototype, qq.basePrivateApi);
|
||||
}());
|
@ -1,170 +0,0 @@
|
||||
/*globals qq */
|
||||
/**
|
||||
* This defines FineUploader mode, which is a default UI w/ drag & drop uploading.
|
||||
*/
|
||||
qq.FineUploader = function(o, namespace) {
|
||||
"use strict";
|
||||
|
||||
// By default this should inherit instance data from FineUploaderBasic, but this can be overridden
|
||||
// if the (internal) caller defines a different parent. The parent is also used by
|
||||
// the private and public API functions that need to delegate to a parent function.
|
||||
this._parent = namespace ? qq[namespace].FineUploaderBasic : qq.FineUploaderBasic;
|
||||
this._parent.apply(this, arguments);
|
||||
|
||||
// Options provided by FineUploader mode
|
||||
qq.extend(this._options, {
|
||||
element: null,
|
||||
|
||||
button: null,
|
||||
|
||||
listElement: null,
|
||||
|
||||
dragAndDrop: {
|
||||
extraDropzones: [],
|
||||
reportDirectoryPaths: false
|
||||
},
|
||||
|
||||
text: {
|
||||
formatProgress: "{percent}% of {total_size}",
|
||||
failUpload: "Upload failed",
|
||||
waitingForResponse: "Processing...",
|
||||
paused: "Paused"
|
||||
},
|
||||
|
||||
template: "qq-template",
|
||||
|
||||
classes: {
|
||||
retrying: "qq-upload-retrying",
|
||||
retryable: "qq-upload-retryable",
|
||||
success: "qq-upload-success",
|
||||
fail: "qq-upload-fail",
|
||||
editable: "qq-editable",
|
||||
hide: "qq-hide",
|
||||
dropActive: "qq-upload-drop-area-active"
|
||||
},
|
||||
|
||||
failedUploadTextDisplay: {
|
||||
mode: "default", //default, custom, or none
|
||||
maxChars: 50,
|
||||
responseProperty: "error",
|
||||
enableTooltip: true
|
||||
},
|
||||
|
||||
messages: {
|
||||
tooManyFilesError: "You may only drop one file",
|
||||
unsupportedBrowser: "Unrecoverable error - this browser does not permit file uploading of any kind."
|
||||
},
|
||||
|
||||
retry: {
|
||||
showAutoRetryNote: true,
|
||||
autoRetryNote: "Retrying {retryNum}/{maxAuto}..."
|
||||
},
|
||||
|
||||
deleteFile: {
|
||||
forceConfirm: false,
|
||||
confirmMessage: "Are you sure you want to delete {filename}?",
|
||||
deletingStatusText: "Deleting...",
|
||||
deletingFailedText: "Delete failed"
|
||||
|
||||
},
|
||||
|
||||
display: {
|
||||
fileSizeOnSubmit: false,
|
||||
prependFiles: false
|
||||
},
|
||||
|
||||
paste: {
|
||||
promptForName: false,
|
||||
namePromptMessage: "Please name this image"
|
||||
},
|
||||
|
||||
thumbnails: {
|
||||
placeholders: {
|
||||
waitUntilResponse: false,
|
||||
notAvailablePath: null,
|
||||
waitingPath: null
|
||||
}
|
||||
},
|
||||
|
||||
scaling: {
|
||||
hideScaled: false
|
||||
},
|
||||
|
||||
showMessage: function(message){
|
||||
setTimeout(function() {
|
||||
window.alert(message);
|
||||
}, 0);
|
||||
},
|
||||
|
||||
showConfirm: function(message) {
|
||||
return window.confirm(message);
|
||||
},
|
||||
|
||||
showPrompt: function(message, defaultValue) {
|
||||
return window.prompt(message, defaultValue);
|
||||
}
|
||||
}, true);
|
||||
|
||||
// Replace any default options with user defined ones
|
||||
qq.extend(this._options, o, true);
|
||||
|
||||
this._templating = new qq.Templating({
|
||||
log: qq.bind(this.log, this),
|
||||
templateIdOrEl: this._options.template,
|
||||
containerEl: this._options.element,
|
||||
fileContainerEl: this._options.listElement,
|
||||
button: this._options.button,
|
||||
imageGenerator: this._imageGenerator,
|
||||
classes: {
|
||||
hide: this._options.classes.hide,
|
||||
editable: this._options.classes.editable
|
||||
},
|
||||
placeholders: {
|
||||
waitUntilUpdate: this._options.thumbnails.placeholders.waitUntilResponse,
|
||||
thumbnailNotAvailable: this._options.thumbnails.placeholders.notAvailablePath,
|
||||
waitingForThumbnail: this._options.thumbnails.placeholders.waitingPath
|
||||
},
|
||||
text: this._options.text
|
||||
});
|
||||
|
||||
if (!qq.supportedFeatures.uploading || (this._options.cors.expected && !qq.supportedFeatures.uploadCors)) {
|
||||
this._templating.renderFailure(this._options.messages.unsupportedBrowser);
|
||||
}
|
||||
else {
|
||||
this._wrapCallbacks();
|
||||
|
||||
this._templating.render();
|
||||
|
||||
this._classes = this._options.classes;
|
||||
|
||||
if (!this._options.button && this._templating.getButton()) {
|
||||
this._defaultButtonId = this._createUploadButton({element: this._templating.getButton()}).getButtonId();
|
||||
}
|
||||
|
||||
this._setupClickAndEditEventHandlers();
|
||||
|
||||
if (qq.DragAndDrop && qq.supportedFeatures.fileDrop) {
|
||||
this._dnd = this._setupDragAndDrop();
|
||||
}
|
||||
|
||||
if (this._options.paste.targetElement && this._options.paste.promptForName) {
|
||||
if (qq.PasteSupport) {
|
||||
this._setupPastePrompt();
|
||||
}
|
||||
else {
|
||||
this.log("Paste support module not found.", "error");
|
||||
}
|
||||
}
|
||||
|
||||
this._totalFilesInBatch = 0;
|
||||
this._filesInBatchAddedToUi = 0;
|
||||
}
|
||||
};
|
||||
|
||||
// Inherit the base public & private API methods
|
||||
qq.extend(qq.FineUploader.prototype, qq.basePublicApi);
|
||||
qq.extend(qq.FineUploader.prototype, qq.basePrivateApi);
|
||||
|
||||
// Add the FineUploader/default UI public & private UI methods, which may override some base methods.
|
||||
qq.extend(qq.FineUploader.prototype, qq.uiPublicApi);
|
||||
qq.extend(qq.FineUploader.prototype, qq.uiPrivateApi);
|
@ -1,114 +0,0 @@
|
||||
/*globals qq, XMLHttpRequest*/
|
||||
/**
|
||||
* Sends a POST request to the server to notify it of a successful upload to an endpoint. The server is expected to indicate success
|
||||
* or failure via the response status. Specific information about the failure can be passed from the server via an `error`
|
||||
* property (by default) in an "application/json" response.
|
||||
*
|
||||
* @param o Options associated with all requests.
|
||||
* @constructor
|
||||
*/
|
||||
qq.UploadSuccessAjaxRequester = function(o) {
|
||||
"use strict";
|
||||
|
||||
var requester,
|
||||
pendingRequests = [],
|
||||
options = {
|
||||
method: "POST",
|
||||
endpoint: null,
|
||||
maxConnections: 3,
|
||||
customHeaders: {},
|
||||
paramsStore: {},
|
||||
cors: {
|
||||
expected: false,
|
||||
sendCredentials: false
|
||||
},
|
||||
log: function(str, level) {}
|
||||
};
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
function handleSuccessResponse(id, xhrOrXdr, isError) {
|
||||
var promise = pendingRequests[id],
|
||||
responseJson = xhrOrXdr.responseText,
|
||||
successIndicator = {success: true},
|
||||
failureIndicator = {success: false},
|
||||
parsedResponse;
|
||||
|
||||
delete pendingRequests[id];
|
||||
|
||||
options.log(qq.format("Received the following response body to an upload success request for id {}: {}", id, responseJson));
|
||||
|
||||
try {
|
||||
parsedResponse = qq.parseJson(responseJson);
|
||||
|
||||
// If this is a cross-origin request, the server may return a 200 response w/ error or success properties
|
||||
// in order to ensure any specific error message is picked up by Fine Uploader for all browsers,
|
||||
// since XDomainRequest (used in IE9 and IE8) doesn't give you access to the
|
||||
// response body for an "error" response.
|
||||
if (isError || (parsedResponse && (parsedResponse.error || parsedResponse.success === false))) {
|
||||
options.log("Upload success request was rejected by the server.", "error");
|
||||
promise.failure(qq.extend(parsedResponse, failureIndicator));
|
||||
}
|
||||
else {
|
||||
options.log("Upload success was acknowledged by the server.");
|
||||
promise.success(qq.extend(parsedResponse, successIndicator));
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// This will be executed if a JSON response is not present. This is not mandatory, so account for this properly.
|
||||
if (isError) {
|
||||
options.log(qq.format("Your server indicated failure in its upload success request response for id {}!", id), "error");
|
||||
promise.failure(failureIndicator);
|
||||
}
|
||||
else {
|
||||
options.log("Upload success was acknowledged by the server.");
|
||||
promise.success(successIndicator);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
requester = qq.extend(this, new qq.AjaxRequester({
|
||||
acceptHeader: "application/json",
|
||||
method: options.method,
|
||||
endpointStore: {
|
||||
get: function() {
|
||||
return options.endpoint;
|
||||
}
|
||||
},
|
||||
paramsStore: options.paramsStore,
|
||||
maxConnections: options.maxConnections,
|
||||
customHeaders: options.customHeaders,
|
||||
log: options.log,
|
||||
onComplete: handleSuccessResponse,
|
||||
cors: options.cors,
|
||||
successfulResponseCodes: {
|
||||
POST: [200]
|
||||
}
|
||||
}));
|
||||
|
||||
|
||||
qq.extend(this, {
|
||||
/**
|
||||
* Sends a request to the server, notifying it that a recently submitted file was successfully sent.
|
||||
*
|
||||
* @param id ID of the associated file
|
||||
* @param spec `Object` with the properties that correspond to important values that we want to
|
||||
* send to the server with this request.
|
||||
* @returns {qq.Promise} A promise to be fulfilled when the response has been received and parsed. The parsed
|
||||
* payload of the response will be passed into the `failure` or `success` promise method.
|
||||
*/
|
||||
sendSuccessRequest: function(id, spec) {
|
||||
var promise = new qq.Promise();
|
||||
|
||||
options.log("Submitting upload success request/notification for " + id);
|
||||
|
||||
requester.initTransport(id)
|
||||
.withParams(spec)
|
||||
.send();
|
||||
|
||||
pendingRequests[id] = promise;
|
||||
|
||||
return promise;
|
||||
}
|
||||
});
|
||||
};
|
@ -1,793 +0,0 @@
|
||||
/*globals window, navigator, document, FormData, File, HTMLInputElement, XMLHttpRequest, Blob, Storage, ActiveXObject */
|
||||
/* jshint -W079 */
|
||||
var qq = function(element) {
|
||||
"use strict";
|
||||
|
||||
return {
|
||||
hide: function() {
|
||||
element.style.display = "none";
|
||||
return this;
|
||||
},
|
||||
|
||||
/** Returns the function which detaches attached event */
|
||||
attach: function(type, fn) {
|
||||
if (element.addEventListener){
|
||||
element.addEventListener(type, fn, false);
|
||||
} else if (element.attachEvent){
|
||||
element.attachEvent("on" + type, fn);
|
||||
}
|
||||
return function() {
|
||||
qq(element).detach(type, fn);
|
||||
};
|
||||
},
|
||||
|
||||
detach: function(type, fn) {
|
||||
if (element.removeEventListener){
|
||||
element.removeEventListener(type, fn, false);
|
||||
} else if (element.attachEvent){
|
||||
element.detachEvent("on" + type, fn);
|
||||
}
|
||||
return this;
|
||||
},
|
||||
|
||||
contains: function(descendant) {
|
||||
// The [W3C spec](http://www.w3.org/TR/domcore/#dom-node-contains)
|
||||
// says a `null` (or ostensibly `undefined`) parameter
|
||||
// passed into `Node.contains` should result in a false return value.
|
||||
// IE7 throws an exception if the parameter is `undefined` though.
|
||||
if (!descendant) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// compareposition returns false in this case
|
||||
if (element === descendant) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (element.contains){
|
||||
return element.contains(descendant);
|
||||
} else {
|
||||
/*jslint bitwise: true*/
|
||||
return !!(descendant.compareDocumentPosition(element) & 8);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Insert this element before elementB.
|
||||
*/
|
||||
insertBefore: function(elementB) {
|
||||
elementB.parentNode.insertBefore(element, elementB);
|
||||
return this;
|
||||
},
|
||||
|
||||
remove: function() {
|
||||
element.parentNode.removeChild(element);
|
||||
return this;
|
||||
},
|
||||
|
||||
/**
|
||||
* Sets styles for an element.
|
||||
* Fixes opacity in IE6-8.
|
||||
*/
|
||||
css: function(styles) {
|
||||
/*jshint eqnull: true*/
|
||||
if (element.style == null) {
|
||||
throw new qq.Error("Can't apply style to node as it is not on the HTMLElement prototype chain!");
|
||||
}
|
||||
|
||||
/*jshint -W116*/
|
||||
if (styles.opacity != null){
|
||||
if (typeof element.style.opacity !== "string" && typeof(element.filters) !== "undefined"){
|
||||
styles.filter = "alpha(opacity=" + Math.round(100 * styles.opacity) + ")";
|
||||
}
|
||||
}
|
||||
qq.extend(element.style, styles);
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
hasClass: function(name) {
|
||||
var re = new RegExp("(^| )" + name + "( |$)");
|
||||
return re.test(element.className);
|
||||
},
|
||||
|
||||
addClass: function(name) {
|
||||
if (!qq(element).hasClass(name)){
|
||||
element.className += " " + name;
|
||||
}
|
||||
return this;
|
||||
},
|
||||
|
||||
removeClass: function(name) {
|
||||
var re = new RegExp("(^| )" + name + "( |$)");
|
||||
element.className = element.className.replace(re, " ").replace(/^\s+|\s+$/g, "");
|
||||
return this;
|
||||
},
|
||||
|
||||
getByClass: function(className) {
|
||||
var candidates,
|
||||
result = [];
|
||||
|
||||
if (element.querySelectorAll){
|
||||
return element.querySelectorAll("." + className);
|
||||
}
|
||||
|
||||
candidates = element.getElementsByTagName("*");
|
||||
|
||||
qq.each(candidates, function(idx, val) {
|
||||
if (qq(val).hasClass(className)){
|
||||
result.push(val);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
},
|
||||
|
||||
children: function() {
|
||||
var children = [],
|
||||
child = element.firstChild;
|
||||
|
||||
while (child){
|
||||
if (child.nodeType === 1){
|
||||
children.push(child);
|
||||
}
|
||||
child = child.nextSibling;
|
||||
}
|
||||
|
||||
return children;
|
||||
},
|
||||
|
||||
setText: function(text) {
|
||||
element.innerText = text;
|
||||
element.textContent = text;
|
||||
return this;
|
||||
},
|
||||
|
||||
clearText: function() {
|
||||
return qq(element).setText("");
|
||||
},
|
||||
|
||||
// Returns true if the attribute exists on the element
|
||||
// AND the value of the attribute is NOT "false" (case-insensitive)
|
||||
hasAttribute: function(attrName) {
|
||||
var attrVal;
|
||||
|
||||
if (element.hasAttribute) {
|
||||
|
||||
if (!element.hasAttribute(attrName)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*jshint -W116*/
|
||||
return (/^false$/i).exec(element.getAttribute(attrName)) == null;
|
||||
}
|
||||
else {
|
||||
attrVal = element[attrName];
|
||||
|
||||
if (attrVal === undefined) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*jshint -W116*/
|
||||
return (/^false$/i).exec(attrVal) == null;
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
(function(){
|
||||
"use strict";
|
||||
|
||||
qq.log = function(message, level) {
|
||||
if (window.console) {
|
||||
if (!level || level === "info") {
|
||||
window.console.log(message);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (window.console[level]) {
|
||||
window.console[level](message);
|
||||
}
|
||||
else {
|
||||
window.console.log("<" + level + "> " + message);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
qq.isObject = function(variable) {
|
||||
return variable && !variable.nodeType && Object.prototype.toString.call(variable) === "[object Object]";
|
||||
};
|
||||
|
||||
qq.isFunction = function(variable) {
|
||||
return typeof(variable) === "function";
|
||||
};
|
||||
|
||||
/**
|
||||
* Check the type of a value. Is it an "array"?
|
||||
*
|
||||
* @param value value to test.
|
||||
* @returns true if the value is an array or associated with an `ArrayBuffer`
|
||||
*/
|
||||
qq.isArray = function(value) {
|
||||
return Object.prototype.toString.call(value) === "[object Array]" ||
|
||||
(value && window.ArrayBuffer && value.buffer && value.buffer.constructor === ArrayBuffer);
|
||||
};
|
||||
|
||||
// Looks for an object on a `DataTransfer` object that is associated with drop events when utilizing the Filesystem API.
|
||||
qq.isItemList = function(maybeItemList) {
|
||||
return Object.prototype.toString.call(maybeItemList) === "[object DataTransferItemList]";
|
||||
};
|
||||
|
||||
// Looks for an object on a `NodeList` or an `HTMLCollection`|`HTMLFormElement`|`HTMLSelectElement`
|
||||
// object that is associated with collections of Nodes.
|
||||
qq.isNodeList = function(maybeNodeList) {
|
||||
return Object.prototype.toString.call(maybeNodeList) === "[object NodeList]" ||
|
||||
// If `HTMLCollection` is the actual type of the object, we must determine this
|
||||
// by checking for expected properties/methods on the object
|
||||
(maybeNodeList.item && maybeNodeList.namedItem);
|
||||
};
|
||||
|
||||
qq.isString = function(maybeString) {
|
||||
return Object.prototype.toString.call(maybeString) === "[object String]";
|
||||
};
|
||||
|
||||
qq.trimStr = function(string) {
|
||||
if (String.prototype.trim) {
|
||||
return string.trim();
|
||||
}
|
||||
|
||||
return string.replace(/^\s+|\s+$/g,"");
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* @param str String to format.
|
||||
* @returns {string} A string, swapping argument values with the associated occurrence of {} in the passed string.
|
||||
*/
|
||||
qq.format = function(str) {
|
||||
|
||||
var args = Array.prototype.slice.call(arguments, 1),
|
||||
newStr = str,
|
||||
nextIdxToReplace = newStr.indexOf("{}");
|
||||
|
||||
qq.each(args, function(idx, val) {
|
||||
var strBefore = newStr.substring(0, nextIdxToReplace),
|
||||
strAfter = newStr.substring(nextIdxToReplace+2);
|
||||
|
||||
newStr = strBefore + val + strAfter;
|
||||
nextIdxToReplace = newStr.indexOf("{}", nextIdxToReplace + val.length);
|
||||
|
||||
// End the loop if we have run out of tokens (when the arguments exceed the # of tokens)
|
||||
if (nextIdxToReplace < 0) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
return newStr;
|
||||
};
|
||||
|
||||
qq.isFile = function(maybeFile) {
|
||||
return window.File && Object.prototype.toString.call(maybeFile) === "[object File]";
|
||||
};
|
||||
|
||||
qq.isFileList = function(maybeFileList) {
|
||||
return window.FileList && Object.prototype.toString.call(maybeFileList) === "[object FileList]";
|
||||
};
|
||||
|
||||
qq.isFileOrInput = function(maybeFileOrInput) {
|
||||
return qq.isFile(maybeFileOrInput) || qq.isInput(maybeFileOrInput);
|
||||
};
|
||||
|
||||
qq.isInput = function(maybeInput, notFile) {
|
||||
var evaluateType = function(type) {
|
||||
var normalizedType = type.toLowerCase();
|
||||
|
||||
if (notFile) {
|
||||
return normalizedType !== "file";
|
||||
}
|
||||
|
||||
return normalizedType === "file";
|
||||
};
|
||||
|
||||
if (window.HTMLInputElement) {
|
||||
if (Object.prototype.toString.call(maybeInput) === "[object HTMLInputElement]") {
|
||||
if (maybeInput.type && evaluateType(maybeInput.type)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (maybeInput.tagName) {
|
||||
if (maybeInput.tagName.toLowerCase() === "input") {
|
||||
if (maybeInput.type && evaluateType(maybeInput.type)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
qq.isBlob = function(maybeBlob) {
|
||||
if (window.Blob && Object.prototype.toString.call(maybeBlob) === "[object Blob]") {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
qq.isXhrUploadSupported = function() {
|
||||
var input = document.createElement("input");
|
||||
input.type = "file";
|
||||
|
||||
return (
|
||||
input.multiple !== undefined &&
|
||||
typeof File !== "undefined" &&
|
||||
typeof FormData !== "undefined" &&
|
||||
typeof (qq.createXhrInstance()).upload !== "undefined" );
|
||||
};
|
||||
|
||||
// Fall back to ActiveX is native XHR is disabled (possible in any version of IE).
|
||||
qq.createXhrInstance = function() {
|
||||
if (window.XMLHttpRequest) {
|
||||
return new XMLHttpRequest();
|
||||
}
|
||||
|
||||
try {
|
||||
return new ActiveXObject("MSXML2.XMLHTTP.3.0");
|
||||
}
|
||||
catch(error) {
|
||||
qq.log("Neither XHR or ActiveX are supported!", "error");
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
qq.isFolderDropSupported = function(dataTransfer) {
|
||||
return dataTransfer.items &&
|
||||
dataTransfer.items.length > 0 &&
|
||||
dataTransfer.items[0].webkitGetAsEntry;
|
||||
};
|
||||
|
||||
qq.isFileChunkingSupported = function() {
|
||||
return !qq.androidStock() && //Android's stock browser cannot upload Blobs correctly
|
||||
qq.isXhrUploadSupported() &&
|
||||
(File.prototype.slice !== undefined || File.prototype.webkitSlice !== undefined || File.prototype.mozSlice !== undefined);
|
||||
};
|
||||
|
||||
qq.sliceBlob = function(fileOrBlob, start, end) {
|
||||
var slicer = fileOrBlob.slice || fileOrBlob.mozSlice || fileOrBlob.webkitSlice;
|
||||
|
||||
return slicer.call(fileOrBlob, start, end);
|
||||
};
|
||||
|
||||
qq.arrayBufferToHex = function(buffer) {
|
||||
var bytesAsHex = "",
|
||||
bytes = new Uint8Array(buffer);
|
||||
|
||||
|
||||
qq.each(bytes, function(idx, byt) {
|
||||
var byteAsHexStr = byt.toString(16);
|
||||
|
||||
if (byteAsHexStr.length < 2) {
|
||||
byteAsHexStr = "0" + byteAsHexStr;
|
||||
}
|
||||
|
||||
bytesAsHex += byteAsHexStr;
|
||||
});
|
||||
|
||||
return bytesAsHex;
|
||||
};
|
||||
|
||||
qq.readBlobToHex = function(blob, startOffset, length) {
|
||||
var initialBlob = qq.sliceBlob(blob, startOffset, startOffset + length),
|
||||
fileReader = new FileReader(),
|
||||
promise = new qq.Promise();
|
||||
|
||||
fileReader.onload = function() {
|
||||
promise.success(qq.arrayBufferToHex(fileReader.result));
|
||||
};
|
||||
|
||||
fileReader.onerror = promise.failure;
|
||||
|
||||
fileReader.readAsArrayBuffer(initialBlob);
|
||||
|
||||
return promise;
|
||||
};
|
||||
|
||||
qq.extend = function(first, second, extendNested) {
|
||||
qq.each(second, function(prop, val) {
|
||||
if (extendNested && qq.isObject(val)) {
|
||||
if (first[prop] === undefined) {
|
||||
first[prop] = {};
|
||||
}
|
||||
qq.extend(first[prop], val, true);
|
||||
}
|
||||
else {
|
||||
first[prop] = val;
|
||||
}
|
||||
});
|
||||
|
||||
return first;
|
||||
};
|
||||
|
||||
/**
|
||||
* Allow properties in one object to override properties in another,
|
||||
* keeping track of the original values from the target object.
|
||||
*
|
||||
* Note that the pre-overriden properties to be overriden by the source will be passed into the `sourceFn` when it is invoked.
|
||||
*
|
||||
* @param target Update properties in this object from some source
|
||||
* @param sourceFn A function that, when invoked, will return properties that will replace properties with the same name in the target.
|
||||
* @returns {object} The target object
|
||||
*/
|
||||
qq.override = function(target, sourceFn) {
|
||||
var super_ = {},
|
||||
source = sourceFn(super_);
|
||||
|
||||
qq.each(source, function(srcPropName, srcPropVal) {
|
||||
if (target[srcPropName] !== undefined) {
|
||||
super_[srcPropName] = target[srcPropName];
|
||||
}
|
||||
|
||||
target[srcPropName] = srcPropVal;
|
||||
});
|
||||
|
||||
return target;
|
||||
};
|
||||
|
||||
/**
|
||||
* Searches for a given element in the array, returns -1 if it is not present.
|
||||
* @param {Number} [from] The index at which to begin the search
|
||||
*/
|
||||
qq.indexOf = function(arr, elt, from){
|
||||
if (arr.indexOf) {
|
||||
return arr.indexOf(elt, from);
|
||||
}
|
||||
|
||||
from = from || 0;
|
||||
var len = arr.length;
|
||||
|
||||
if (from < 0) {
|
||||
from += len;
|
||||
}
|
||||
|
||||
for (; from < len; from+=1){
|
||||
if (arr.hasOwnProperty(from) && arr[from] === elt){
|
||||
return from;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
};
|
||||
|
||||
//this is a version 4 UUID
|
||||
qq.getUniqueId = function(){
|
||||
return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, function(c) {
|
||||
/*jslint eqeq: true, bitwise: true*/
|
||||
var r = Math.random()*16|0, v = c == "x" ? r : (r&0x3|0x8);
|
||||
return v.toString(16);
|
||||
});
|
||||
};
|
||||
|
||||
//
|
||||
// Browsers and platforms detection
|
||||
qq.ie = function() {
|
||||
return navigator.userAgent.indexOf("MSIE") !== -1;
|
||||
};
|
||||
|
||||
qq.ie7 = function() {
|
||||
return navigator.userAgent.indexOf("MSIE 7") !== -1;
|
||||
};
|
||||
|
||||
qq.ie10 = function() {
|
||||
return navigator.userAgent.indexOf("MSIE 10") !== -1;
|
||||
};
|
||||
|
||||
qq.ie11 = function() {
|
||||
return (navigator.userAgent.indexOf("Trident") !== -1 &&
|
||||
navigator.userAgent.indexOf("rv:11") !== -1);
|
||||
};
|
||||
|
||||
qq.safari = function() {
|
||||
return navigator.vendor !== undefined && navigator.vendor.indexOf("Apple") !== -1;
|
||||
};
|
||||
|
||||
qq.chrome = function() {
|
||||
return navigator.vendor !== undefined && navigator.vendor.indexOf("Google") !== -1;
|
||||
};
|
||||
|
||||
qq.opera = function() {
|
||||
return navigator.vendor !== undefined && navigator.vendor.indexOf("Opera") !== -1;
|
||||
};
|
||||
|
||||
qq.firefox = function() {
|
||||
return (!qq.ie11() && navigator.userAgent.indexOf("Mozilla") !== -1 && navigator.vendor !== undefined && navigator.vendor === "");
|
||||
};
|
||||
|
||||
qq.windows = function() {
|
||||
return navigator.platform === "Win32";
|
||||
};
|
||||
|
||||
qq.android = function() {
|
||||
return navigator.userAgent.toLowerCase().indexOf("android") !== -1;
|
||||
};
|
||||
|
||||
// We need to identify the Android stock browser via the UA string to work around various bugs in this browser,
|
||||
// such as the one that prevents a `Blob` from being uploaded.
|
||||
qq.androidStock = function() {
|
||||
return qq.android() && navigator.userAgent.toLowerCase().indexOf("chrome") < 0;
|
||||
};
|
||||
|
||||
qq.ios7 = function() {
|
||||
return qq.ios() && navigator.userAgent.indexOf(" OS 7_") !== -1;
|
||||
};
|
||||
|
||||
qq.ios = function() {
|
||||
/*jshint -W014 */
|
||||
return navigator.userAgent.indexOf("iPad") !== -1
|
||||
|| navigator.userAgent.indexOf("iPod") !== -1
|
||||
|| navigator.userAgent.indexOf("iPhone") !== -1;
|
||||
};
|
||||
|
||||
//
|
||||
// Events
|
||||
|
||||
qq.preventDefault = function(e){
|
||||
if (e.preventDefault){
|
||||
e.preventDefault();
|
||||
} else{
|
||||
e.returnValue = false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates and returns element from html string
|
||||
* Uses innerHTML to create an element
|
||||
*/
|
||||
qq.toElement = (function(){
|
||||
var div = document.createElement("div");
|
||||
return function(html){
|
||||
div.innerHTML = html;
|
||||
var element = div.firstChild;
|
||||
div.removeChild(element);
|
||||
return element;
|
||||
};
|
||||
}());
|
||||
|
||||
//key and value are passed to callback for each entry in the iterable item
|
||||
qq.each = function(iterableItem, callback) {
|
||||
var keyOrIndex, retVal;
|
||||
|
||||
if (iterableItem) {
|
||||
// Iterate through [`Storage`](http://www.w3.org/TR/webstorage/#the-storage-interface) items
|
||||
if (window.Storage && iterableItem.constructor === window.Storage) {
|
||||
for (keyOrIndex = 0; keyOrIndex < iterableItem.length; keyOrIndex++) {
|
||||
retVal = callback(iterableItem.key(keyOrIndex), iterableItem.getItem(iterableItem.key(keyOrIndex)));
|
||||
if (retVal === false) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// `DataTransferItemList` & `NodeList` objects are array-like and should be treated as arrays
|
||||
// when iterating over items inside the object.
|
||||
else if (qq.isArray(iterableItem) || qq.isItemList(iterableItem) || qq.isNodeList(iterableItem)) {
|
||||
for (keyOrIndex = 0; keyOrIndex < iterableItem.length; keyOrIndex++) {
|
||||
retVal = callback(keyOrIndex, iterableItem[keyOrIndex]);
|
||||
if (retVal === false) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (qq.isString(iterableItem)) {
|
||||
for (keyOrIndex = 0; keyOrIndex < iterableItem.length; keyOrIndex++) {
|
||||
retVal = callback(keyOrIndex, iterableItem.charAt(keyOrIndex));
|
||||
if (retVal === false) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (keyOrIndex in iterableItem) {
|
||||
if (Object.prototype.hasOwnProperty.call(iterableItem, keyOrIndex)) {
|
||||
retVal = callback(keyOrIndex, iterableItem[keyOrIndex]);
|
||||
if (retVal === false) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//include any args that should be passed to the new function after the context arg
|
||||
qq.bind = function(oldFunc, context) {
|
||||
if (qq.isFunction(oldFunc)) {
|
||||
var args = Array.prototype.slice.call(arguments, 2);
|
||||
|
||||
return function() {
|
||||
var newArgs = qq.extend([], args);
|
||||
if (arguments.length) {
|
||||
newArgs = newArgs.concat(Array.prototype.slice.call(arguments));
|
||||
}
|
||||
return oldFunc.apply(context, newArgs);
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error("first parameter must be a function!");
|
||||
};
|
||||
|
||||
/**
|
||||
* obj2url() takes a json-object as argument and generates
|
||||
* a querystring. pretty much like jQuery.param()
|
||||
*
|
||||
* how to use:
|
||||
*
|
||||
* `qq.obj2url({a:'b',c:'d'},'http://any.url/upload?otherParam=value');`
|
||||
*
|
||||
* will result in:
|
||||
*
|
||||
* `http://any.url/upload?otherParam=value&a=b&c=d`
|
||||
*
|
||||
* @param Object JSON-Object
|
||||
* @param String current querystring-part
|
||||
* @return String encoded querystring
|
||||
*/
|
||||
qq.obj2url = function(obj, temp, prefixDone){
|
||||
/*jshint laxbreak: true*/
|
||||
var uristrings = [],
|
||||
prefix = "&",
|
||||
add = function(nextObj, i){
|
||||
var nextTemp = temp
|
||||
? (/\[\]$/.test(temp)) // prevent double-encoding
|
||||
? temp
|
||||
: temp+"["+i+"]"
|
||||
: i;
|
||||
if ((nextTemp !== "undefined") && (i !== "undefined")) {
|
||||
uristrings.push(
|
||||
(typeof nextObj === "object")
|
||||
? qq.obj2url(nextObj, nextTemp, true)
|
||||
: (Object.prototype.toString.call(nextObj) === "[object Function]")
|
||||
? encodeURIComponent(nextTemp) + "=" + encodeURIComponent(nextObj())
|
||||
: encodeURIComponent(nextTemp) + "=" + encodeURIComponent(nextObj)
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
if (!prefixDone && temp) {
|
||||
prefix = (/\?/.test(temp)) ? (/\?$/.test(temp)) ? "" : "&" : "?";
|
||||
uristrings.push(temp);
|
||||
uristrings.push(qq.obj2url(obj));
|
||||
} else if ((Object.prototype.toString.call(obj) === "[object Array]") && (typeof obj !== "undefined") ) {
|
||||
qq.each(obj, function(idx, val) {
|
||||
add(val, idx);
|
||||
});
|
||||
} else if ((typeof obj !== "undefined") && (obj !== null) && (typeof obj === "object")){
|
||||
qq.each(obj, function(prop, val) {
|
||||
add(val, prop);
|
||||
});
|
||||
} else {
|
||||
uristrings.push(encodeURIComponent(temp) + "=" + encodeURIComponent(obj));
|
||||
}
|
||||
|
||||
if (temp) {
|
||||
return uristrings.join(prefix);
|
||||
} else {
|
||||
return uristrings.join(prefix)
|
||||
.replace(/^&/, "")
|
||||
.replace(/%20/g, "+");
|
||||
}
|
||||
};
|
||||
|
||||
qq.obj2FormData = function(obj, formData, arrayKeyName) {
|
||||
if (!formData) {
|
||||
formData = new FormData();
|
||||
}
|
||||
|
||||
qq.each(obj, function(key, val) {
|
||||
key = arrayKeyName ? arrayKeyName + "[" + key + "]" : key;
|
||||
|
||||
if (qq.isObject(val)) {
|
||||
qq.obj2FormData(val, formData, key);
|
||||
}
|
||||
else if (qq.isFunction(val)) {
|
||||
formData.append(key, val());
|
||||
}
|
||||
else {
|
||||
formData.append(key, val);
|
||||
}
|
||||
});
|
||||
|
||||
return formData;
|
||||
};
|
||||
|
||||
qq.obj2Inputs = function(obj, form) {
|
||||
var input;
|
||||
|
||||
if (!form) {
|
||||
form = document.createElement("form");
|
||||
}
|
||||
|
||||
qq.obj2FormData(obj, {
|
||||
append: function(key, val) {
|
||||
input = document.createElement("input");
|
||||
input.setAttribute("name", key);
|
||||
input.setAttribute("value", val);
|
||||
form.appendChild(input);
|
||||
}
|
||||
});
|
||||
|
||||
return form;
|
||||
};
|
||||
|
||||
/**
|
||||
* Not recommended for use outside of Fine Uploader since this falls back to an unchecked eval if JSON.parse is not
|
||||
* implemented. For a more secure JSON.parse polyfill, use Douglas Crockford's json2.js.
|
||||
*/
|
||||
qq.parseJson = function(json) {
|
||||
/*jshint evil: true*/
|
||||
if (window.JSON && qq.isFunction(JSON.parse)) {
|
||||
return JSON.parse(json);
|
||||
} else {
|
||||
return eval("(" + json + ")");
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieve the extension of a file, if it exists.
|
||||
*
|
||||
* @param filename
|
||||
* @returns {string || undefined}
|
||||
*/
|
||||
qq.getExtension = function(filename) {
|
||||
var extIdx = filename.lastIndexOf(".") + 1;
|
||||
|
||||
if (extIdx > 0) {
|
||||
return filename.substr(extIdx, filename.length - extIdx);
|
||||
}
|
||||
};
|
||||
|
||||
qq.getFilename = function(blobOrFileInput) {
|
||||
/*jslint regexp: true*/
|
||||
|
||||
if (qq.isInput(blobOrFileInput)) {
|
||||
// get input value and remove path to normalize
|
||||
return blobOrFileInput.value.replace(/.*(\/|\\)/, "");
|
||||
}
|
||||
else if (qq.isFile(blobOrFileInput)) {
|
||||
if (blobOrFileInput.fileName !== null && blobOrFileInput.fileName !== undefined) {
|
||||
return blobOrFileInput.fileName;
|
||||
}
|
||||
}
|
||||
|
||||
return blobOrFileInput.name;
|
||||
};
|
||||
|
||||
/**
|
||||
* A generic module which supports object disposing in dispose() method.
|
||||
* */
|
||||
qq.DisposeSupport = function() {
|
||||
var disposers = [];
|
||||
|
||||
return {
|
||||
/** Run all registered disposers */
|
||||
dispose: function() {
|
||||
var disposer;
|
||||
do {
|
||||
disposer = disposers.shift();
|
||||
if (disposer) {
|
||||
disposer();
|
||||
}
|
||||
}
|
||||
while (disposer);
|
||||
},
|
||||
|
||||
/** Attach event handler and register de-attacher as a disposer */
|
||||
attach: function() {
|
||||
var args = arguments;
|
||||
/*jslint undef:true*/
|
||||
this.addDisposer(qq(args[0]).attach.apply(this, Array.prototype.slice.call(arguments, 1)));
|
||||
},
|
||||
|
||||
/** Add disposer to the collection */
|
||||
addDisposer: function(disposeFunction) {
|
||||
disposers.push(disposeFunction);
|
||||
}
|
||||
};
|
||||
};
|
||||
}());
|
@ -1,2 +0,0 @@
|
||||
/*global qq */
|
||||
qq.version="5.0.3";
|
@ -1,36 +0,0 @@
|
||||
/*globals qq */
|
||||
/*jshint -W117 */
|
||||
qq.WindowReceiveMessage = function(o) {
|
||||
"use strict";
|
||||
|
||||
var options = {
|
||||
log: function(message, level) {}
|
||||
},
|
||||
callbackWrapperDetachers = {};
|
||||
|
||||
qq.extend(options, o);
|
||||
|
||||
qq.extend(this, {
|
||||
receiveMessage : function(id, callback) {
|
||||
var onMessageCallbackWrapper = function(event) {
|
||||
callback(event.data);
|
||||
};
|
||||
|
||||
if (window.postMessage) {
|
||||
callbackWrapperDetachers[id] = qq(window).attach("message", onMessageCallbackWrapper);
|
||||
}
|
||||
else {
|
||||
log("iframe message passing not supported in this browser!", "error");
|
||||
}
|
||||
},
|
||||
|
||||
stopReceivingMessages : function(id) {
|
||||
if (window.postMessage) {
|
||||
var detacher = callbackWrapperDetachers[id];
|
||||
if (detacher) {
|
||||
detacher();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
Before Width: | Height: | Size: 1.6 KiB |
Before Width: | Height: | Size: 3.9 KiB |
Before Width: | Height: | Size: 5.6 KiB |
Before Width: | Height: | Size: 3.1 KiB |
@ -1,54 +0,0 @@
|
||||
<?php
|
||||
|
||||
# Inverse comment if you want to debug
|
||||
#$output = shell_exec("source /home/fineuploaderdocs/.virtualenvs/docfu/bin/activate; docfu -v -d -b feature/static-doc-generation http://github.com/Widen/fine-uploader /home/fineuploaderdocs/docs.fineuploader.com/");
|
||||
#print_r($output);
|
||||
|
||||
# Base of the docfu command to generate documentation
|
||||
$DOCFU = "source /home/fineuploaderdocs/.virtualenvs/docfu/bin/activate; docfu ";
|
||||
|
||||
# Where to generate documentation
|
||||
$DEST = "/home/fineuploaderdocs/docs.fineuploader.com/";
|
||||
|
||||
# do we have a POST?
|
||||
if ($_SERVER['REQUEST_METHOD'] === 'POST') {
|
||||
|
||||
# does it contain a payload?
|
||||
if (isset($_REQUEST['payload'])) {
|
||||
$json_payload = $_REQUEST['payload'];
|
||||
|
||||
if (isset($json_payload['ref'])) {
|
||||
# decode dat payload
|
||||
$payload = json_decode($json_payload, true);
|
||||
$url = $payload['repository']['url'];
|
||||
|
||||
# Parse the branch or tag from the payload
|
||||
$ref = explode('/', $payload['ref'], 2);
|
||||
$ref = $ref[1];
|
||||
$ref = explode('/', $ref, 2);
|
||||
|
||||
$ref_type;
|
||||
if ($ref[0] === 'heads') {
|
||||
$ref_type = 'branch';
|
||||
$DOCFU = $DOCFU . "-b ". $ref[1] . " " . $url . " " . $DEST;
|
||||
} elseif ($ref[0] == 'tags') {
|
||||
$ref_type = 'tag';
|
||||
$DOCFU = $DOCFU . "-t ". $ref[1] . " " . $url . " " . $DEST;
|
||||
}
|
||||
|
||||
$deleted = $payload['deleted'];
|
||||
|
||||
# we've deleted a branch, delete the docs
|
||||
if ($deleted == TRUE) {
|
||||
$stdout = shell_exec("rm -rf " . $DEST . $ref_type . "/" . $ref[1]);
|
||||
print_r($stdout);
|
||||
} else {
|
||||
print_r($DOCFU);
|
||||
$stoud = shell_exec($DOCFU);
|
||||
print_r($stdout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
?>
|
@ -1,91 +0,0 @@
|
||||
# .htaccess file for docs.fineuploader.com
|
||||
#
|
||||
# The plan:
|
||||
# * the directory structure of the docs is like so:
|
||||
#
|
||||
# ~/docs.fineuploader.com/<branch_or_tag>/<branch_or_tag_value>/<path>
|
||||
#
|
||||
# We simply want to make request for root foward to the 'master' branch and we
|
||||
# want to strip out `html` in all the paths.
|
||||
|
||||
|
||||
#####
|
||||
# Authentication
|
||||
#####
|
||||
|
||||
# Require Authentication for POST requests to the hooks.php
|
||||
SetEnvIf Request_URI ^(?!/hook.php) no_require_auth=true
|
||||
AuthType Basic
|
||||
AuthName "Password protected"
|
||||
AuthUserFile /home/fineuploaderdocs/docs.fineuploader.com/.htpasswd
|
||||
Require valid-user
|
||||
|
||||
Order Deny,Allow
|
||||
Deny from all
|
||||
Satisfy any
|
||||
Require valid-user
|
||||
Allow from env=no_require_auth
|
||||
|
||||
Options +FollowSymLinks
|
||||
RewriteEngine On
|
||||
RewriteBase /
|
||||
|
||||
#####
|
||||
# favicon.ico
|
||||
#####
|
||||
RewriteRule favicon.ico favicon.ico [PT,L]
|
||||
|
||||
#####
|
||||
# hook.php
|
||||
#####
|
||||
|
||||
# Hook.php responds to GitHub webooks. When it receives a notification
|
||||
# about newly pushed code, it rebuilds the documentation form the
|
||||
# most recent source.
|
||||
RewriteRule ^hook.php$ hook.php [L]
|
||||
|
||||
#####
|
||||
# URLS
|
||||
#####
|
||||
|
||||
# set master branch as root
|
||||
RewriteRule ^(?!branch|tag\/)(.*)$ branch/master/$1 [PT,L]
|
||||
|
||||
# Update: Fri Aug 16 10:11:15 CDT 2013
|
||||
# By: Mark Feltner
|
||||
#
|
||||
# Documentation has been refactored for S3 support. Page rules have changed.
|
||||
# All 404 pages should redirect to the documentation root to mitigate any broken links.
|
||||
|
||||
########
|
||||
# Redirect old pages:
|
||||
########
|
||||
RewriteRule ^branch/master/api/callbacks.html api/events.html [R,L]
|
||||
RewriteRule ^branch/master/api/drag-and-drop.html features/drag-and-drop.html [R,L]
|
||||
RewriteRule ^branch/master/api/feature-detection.html browser-support.html [R,L]
|
||||
RewriteRule ^branch/master/api/fineuploader(basic)?.html api/methods.html [R,L]
|
||||
RewriteRule ^branch/master/api/promise.html features/async-tasks-and-promises.html [R,L]
|
||||
RewriteRule ^branch/master/api/qquery.html api/qq.html [R,L]
|
||||
RewriteRule ^branch/master/integrating/debugging_errors.html features/handling-errors.html [R,L]
|
||||
RewriteRule ^branch/master/integrating/limitations-ie.html browser-support.html [R,L]
|
||||
RewriteRule ^branch/master/integrating/styling.html features/styling.html [R,L]
|
||||
RewriteRule ^branch/master/integrating/modes/fineuploader.html modes/ui.html [R,L]
|
||||
RewriteRule ^branch/master/integrating/modes/fineuploaderbasic.html modes/core.html [R,L]
|
||||
RewriteRule ^branch/master/integrating/modes/.*jquery.*.html integrating/jquery.html [R,L]
|
||||
RewriteRule ^branch/master/integrating/options/fineuploader.html api/options-ui.html [R,L]
|
||||
RewriteRule ^branch/master/integrating/options/fineuploaderbasic.html api/options.html [R,L]
|
||||
RewriteRule ^branch/master/integrating/quickstart/index.html quickstart/01-getting-started.html [R,L]
|
||||
RewriteRule ^branch/master/integrating/server/index.html endpoint_handlers/traditional.html [R,L]
|
||||
RewriteRule ^branch/master/overview/index.html index.html [R,L]
|
||||
RewriteRule ^branch/master/overview/development.html contributing.html [R,L]
|
||||
RewriteRule ^branch/master/overview/faq.html faq.html [R,L]
|
||||
RewriteRule ^branch/master/overview/features.html index.html#features [R,L]
|
||||
RewriteRule ^branch/master/overview/support.html support.html [R,L]
|
||||
|
||||
########
|
||||
# Redirect all 404s to the root:
|
||||
########
|
||||
ErrorDocument 404 /
|
||||
#RewriteCond %{REQUEST_FILENAME} !-f
|
||||
#RewriteRule . / [L,R=301]
|
||||
|
@ -1,220 +0,0 @@
|
||||
|
||||
/* ==========================================================================
|
||||
Author's custom styles
|
||||
========================================================================== */
|
||||
body
|
||||
{
|
||||
padding-top: 60px;
|
||||
padding-bottom: 40px;
|
||||
color: #525252;
|
||||
background-color: #FCFCFA;
|
||||
font-family:'Maven Pro', sans-serif !important;
|
||||
}
|
||||
|
||||
@media (max-width: 979px)
|
||||
{
|
||||
body
|
||||
{
|
||||
padding-top: 0px;
|
||||
}
|
||||
}
|
||||
code, pre { color: #f87436; }
|
||||
pre
|
||||
{
|
||||
overflow: -moz-scrollbars-vertical;
|
||||
overflow: scroll;
|
||||
word-wrap: normal !important;
|
||||
white-space: pre !important;
|
||||
}
|
||||
.nav > .brand > img
|
||||
{
|
||||
margin-top: -13px;
|
||||
}
|
||||
.dropdown-menu
|
||||
{
|
||||
min-width: 220px;
|
||||
}
|
||||
.dropdown-menu.api
|
||||
{
|
||||
padding-left: 20px;
|
||||
}
|
||||
h1, h2, h3
|
||||
{
|
||||
margin: 20px 0;
|
||||
}
|
||||
h1 {
|
||||
font-size: 30px;
|
||||
color: #00abc7;
|
||||
}
|
||||
h2
|
||||
{
|
||||
size: 24px;
|
||||
color: #00abc7;
|
||||
}
|
||||
h3
|
||||
{
|
||||
color: #00ABC7;
|
||||
}
|
||||
h2, h3, h4, h5
|
||||
{
|
||||
margin-top: -40px;
|
||||
padding-top: 60px;
|
||||
}
|
||||
|
||||
.alert .alert-heading
|
||||
{
|
||||
padding-top: 0px;
|
||||
}
|
||||
|
||||
.navbar-inverse .brand, .navbar-inverse .nav > li > a,
|
||||
.navbar-inverse .nav-collapse .nav > li > a,
|
||||
.navbar-inverse .nav-collapse .dropdown-menu a
|
||||
{
|
||||
color: #6C6C6C;
|
||||
text-shadow:none;
|
||||
font-size:13px;
|
||||
font-weight: normal;
|
||||
text-transform:uppercase;
|
||||
}
|
||||
.navbar-inverse .brand, .navbar-inverse .nav > li > a:hover
|
||||
{
|
||||
color: #F87436 !important;
|
||||
}
|
||||
.navbar-inverse .nav li.dropdown.open > .dropdown-toggle,
|
||||
.navbar-inverse .nav li.dropdown.active > .dropdown-toggle,
|
||||
.navbar-inverse .nav li.dropdown.open.active > .dropdown-toggle
|
||||
{
|
||||
background-color:#525252;
|
||||
}
|
||||
.dropdown-menu > .active > a,
|
||||
.dropdown-menu > .active > a:hover,
|
||||
.dropdown-menu > .active > a:focus
|
||||
{
|
||||
background-color: #66ccdd;
|
||||
background-image:linear-gradient(to bottom, #66ccdd, #66ccdd);
|
||||
}
|
||||
.navbar-inverse .btn-navbar
|
||||
{
|
||||
background-color:#F87436;
|
||||
background-image:linear-gradient(to bottom, #F87436, #F7631E)
|
||||
}
|
||||
.navbar-inverse .btn-navbar:hover
|
||||
{
|
||||
background-color:#F7631E;
|
||||
}
|
||||
.navbar-inverse .nav-collapse .nav > li > a:hover
|
||||
{
|
||||
background-color:#FFFfff;
|
||||
}
|
||||
.navbar-inverse .nav-collapse .dropdown-menu a:hover
|
||||
{
|
||||
background:none;
|
||||
color:#F87436;
|
||||
}
|
||||
.navbar-inverse .nav .active > a:hover,
|
||||
.navbar-inverse .nav .active > a:focus {
|
||||
color:#005580;
|
||||
text-decoration:underline;
|
||||
}
|
||||
.navbar-inverse .navbar-inner
|
||||
{
|
||||
background: #66ccdd;
|
||||
/* fallback for non-supporting browsers */
|
||||
background-image: -webkit-gradient(radial,33% 25px,0,center center,141,from(#ddf3f7),to(#00abc7));
|
||||
/* old WebKit Syntax */
|
||||
background-image: -webkit-radial-gradient(33% 25px,circle contain,#ddf3f7 0%,#00abc7 5000%);
|
||||
/* New WebKit syntax */
|
||||
background-image: -moz-radial-gradient(33% 25px,circle contain,#ddf3f7 0%,#00abc7 5000%);
|
||||
background-image: -ms-radial-gradient(33% 25px,circle contain,#ddf3f7 0%,#00abc7 5000%);
|
||||
/* IE10+ */
|
||||
background-image: -o-radial-gradient(33% 25px,circle contain,#ddf3f7 0%,#00abc7 5000%);
|
||||
/* Opera (13?) */
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#D1F8FF',endColorstr='#00859b',GradientType=0);
|
||||
-webkit-box-shadow: inset 0 1px 5px rgba(100,100,100,.2), inset 0 -1px 5px rgba(100,100,100,.2);
|
||||
-moz-box-shadow: inset 0 1px 5px rgba(0,100,100,.2), inset 0 -1px 5px rgba(100,100,100,.2);
|
||||
box-shadow: inset 0 1px 5px rgba(100,100,100,.2), inset 0 -1px 7px rgba(100,100,100,.2);
|
||||
border:none;
|
||||
}
|
||||
|
||||
.content { width: 80%; max-width: 960px; }
|
||||
|
||||
a:hover {
|
||||
-webkit-transition: all .15s ease-in-out;
|
||||
-moz-transition: all .15s ease-in-out;
|
||||
transition: all .15s ease-in-out;
|
||||
}
|
||||
a:visited
|
||||
{
|
||||
color: #0088CC;
|
||||
}
|
||||
.reference-table thead { font-weight: bold; }
|
||||
.dropdown-menu .divider { margin-right: 20px; }
|
||||
|
||||
.dropdown-backdrop
|
||||
{
|
||||
position: static;
|
||||
}
|
||||
|
||||
div .accordion-heading { padding: 8px; }
|
||||
dt { font-weight: normal; }
|
||||
|
||||
.accordion-inner { padding: 9px 0px; }
|
||||
.accordion-inner .nav-list { padding-right: 2px; padding-left: 10px; }
|
||||
|
||||
h3 a, a:hover, a:focus, :visited {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
h4 a, a:hover, a:focus, :visited {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.label-object {
|
||||
background-color: #00abc7;
|
||||
}
|
||||
.label-array {
|
||||
background-color: #C71B00;
|
||||
}
|
||||
.label-function {
|
||||
background-color: #00C71B;
|
||||
}
|
||||
.label-integer {
|
||||
background-color: #FF5B42;
|
||||
}
|
||||
.label-string {
|
||||
background-color: #00abc7;
|
||||
}
|
||||
.label-boolean {
|
||||
background-color: #00859b;
|
||||
}
|
||||
.label-htmlelement {
|
||||
background-color: #f87436;
|
||||
}
|
||||
.label-qq.promise {
|
||||
background-color: #C700AC;
|
||||
}
|
||||
.label-xmlhttprequest {
|
||||
background-color: #66ccdd;
|
||||
}
|
||||
.label-xmldomainrequest {
|
||||
background-color: #66ccdd;
|
||||
}
|
||||
.label-undefined {
|
||||
background-color: #525252;
|
||||
}
|
||||
.label-null {
|
||||
background-color: #525252;
|
||||
}
|
||||
.label-other {
|
||||
background-color: #7c858c;
|
||||
}
|
||||
|
||||
.event-params > hr {
|
||||
margin: 2px;
|
||||
}
|
||||
.method-return > hr {
|
||||
margin: 2px;
|
||||
}
|
||||
.method-params > hr {
|
||||
margin: 2px;
|
||||
}
|
@ -1,69 +0,0 @@
|
||||
.hll { background-color: #ffffcc }
|
||||
.c { color: #8f5902; font-style: italic } /* Comment */
|
||||
.err { color: #a40000; border: 0px solid #ef2929 } /* Error */
|
||||
.g { color: #000000 } /* Generic */
|
||||
.k { color: #204a87; font-weight: bold } /* Keyword */
|
||||
.l { color: #000000 } /* Literal */
|
||||
.n { color: #000000 } /* Name */
|
||||
.o { color: #ce5c00; font-weight: bold } /* Operator */
|
||||
.x { color: #000000 } /* Other */
|
||||
.p { color: #000000; font-weight: bold } /* Punctuation */
|
||||
.cm { color: #8f5902; font-style: italic } /* Comment.Multiline */
|
||||
.cp { color: #8f5902; font-style: italic } /* Comment.Preproc */
|
||||
.c1 { color: #8f5902; font-style: italic } /* Comment.Single */
|
||||
.cs { color: #8f5902; font-style: italic } /* Comment.Special */
|
||||
.gd { color: #a40000 } /* Generic.Deleted */
|
||||
.ge { color: #000000; font-style: italic } /* Generic.Emph */
|
||||
.gr { color: #ef2929 } /* Generic.Error */
|
||||
.gh { color: #000080; font-weight: bold } /* Generic.Heading */
|
||||
.gi { color: #00A000 } /* Generic.Inserted */
|
||||
.go { color: #000000; font-style: italic } /* Generic.Output */
|
||||
.gp { color: #8f5902 } /* Generic.Prompt */
|
||||
.gs { color: #000000; font-weight: bold } /* Generic.Strong */
|
||||
.gu { color: #800080; font-weight: bold } /* Generic.Subheading */
|
||||
.gt { color: #a40000; font-weight: bold } /* Generic.Traceback */
|
||||
.kc { color: #204a87; font-weight: bold } /* Keyword.Constant */
|
||||
.kd { color: #204a87; font-weight: bold } /* Keyword.Declaration */
|
||||
.kn { color: #204a87; font-weight: bold } /* Keyword.Namespace */
|
||||
.kp { color: #204a87; font-weight: bold } /* Keyword.Pseudo */
|
||||
.kr { color: #204a87; font-weight: bold } /* Keyword.Reserved */
|
||||
.kt { color: #204a87; font-weight: bold } /* Keyword.Type */
|
||||
.ld { color: #000000 } /* Literal.Date */
|
||||
.m { color: #0000cf; font-weight: bold } /* Literal.Number */
|
||||
.s { color: #4e9a06 } /* Literal.String */
|
||||
.na { color: #c4a000 } /* Name.Attribute */
|
||||
.nb { color: #204a87 } /* Name.Builtin */
|
||||
.nc { color: #000000 } /* Name.Class */
|
||||
.no { color: #000000 } /* Name.Constant */
|
||||
.nd { color: #5c35cc; font-weight: bold } /* Name.Decorator */
|
||||
.ni { color: #ce5c00 } /* Name.Entity */
|
||||
.ne { color: #cc0000; font-weight: bold } /* Name.Exception */
|
||||
.nf { color: #000000 } /* Name.Function */
|
||||
.nl { color: #f57900 } /* Name.Label */
|
||||
.nn { color: #000000 } /* Name.Namespace */
|
||||
.nx { color: #000000 } /* Name.Other */
|
||||
.py { color: #000000 } /* Name.Property */
|
||||
.nt { color: #204a87; font-weight: bold } /* Name.Tag */
|
||||
.nv { color: #000000 } /* Name.Variable */
|
||||
.ow { color: #204a87; font-weight: bold } /* Operator.Word */
|
||||
.w { color: #f8f8f8; text-decoration: underline } /* Text.Whitespace */
|
||||
.mf { color: #0000cf; font-weight: bold } /* Literal.Number.Float */
|
||||
.mh { color: #0000cf; font-weight: bold } /* Literal.Number.Hex */
|
||||
.mi { color: #0000cf; font-weight: bold } /* Literal.Number.Integer */
|
||||
.mo { color: #0000cf; font-weight: bold } /* Literal.Number.Oct */
|
||||
.sb { color: #4e9a06 } /* Literal.String.Backtick */
|
||||
.sc { color: #4e9a06 } /* Literal.String.Char */
|
||||
.sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */
|
||||
.s2 { color: #4e9a06 } /* Literal.String.Double */
|
||||
.se { color: #4e9a06 } /* Literal.String.Escape */
|
||||
.sh { color: #4e9a06 } /* Literal.String.Heredoc */
|
||||
.si { color: #4e9a06 } /* Literal.String.Interpol */
|
||||
.sx { color: #4e9a06 } /* Literal.String.Other */
|
||||
.sr { color: #4e9a06 } /* Literal.String.Regex */
|
||||
.s1 { color: #4e9a06 } /* Literal.String.Single */
|
||||
.ss { color: #4e9a06 } /* Literal.String.Symbol */
|
||||
.bp { color: #3465a4 } /* Name.Builtin.Pseudo */
|
||||
.vc { color: #000000 } /* Name.Variable.Class */
|
||||
.vg { color: #000000 } /* Name.Variable.Global */
|
||||
.vi { color: #000000 } /* Name.Variable.Instance */
|
||||
.il { color: #0000cf; font-weight: bold } /* Literal.Number.Integer.Long */
|
Before Width: | Height: | Size: 8.6 KiB |
Before Width: | Height: | Size: 12 KiB |
@ -1,18 +0,0 @@
|
||||
/**
|
||||
* Resize the dropdown-menu's to be 80% of the window height, and
|
||||
* add an overflow-y property so that all the elements are shown.
|
||||
*/
|
||||
$(function() {
|
||||
'use strict';
|
||||
|
||||
var setDropdownHeight = function(){
|
||||
|
||||
var dropdownHeight = $(window.top).height() * .80; // take 80% of the current window height
|
||||
$('.dropdown-menu').css('max-height', dropdownHeight);
|
||||
$('.dropdown-menu').css('overflow-y', 'auto');
|
||||
|
||||
};
|
||||
|
||||
$(window).resize(setDropdownHeight);
|
||||
setDropdownHeight();
|
||||
});
|