2016-12-29 07:49:51 +09:00
|
|
|
import * as mongodb from 'mongodb';
|
|
|
|
import * as crypto from 'crypto';
|
|
|
|
import * as gm from 'gm';
|
2017-02-06 21:11:09 +09:00
|
|
|
import * as debug from 'debug';
|
2017-01-03 06:03:19 +09:00
|
|
|
import fileType = require('file-type');
|
|
|
|
import prominence = require('prominence');
|
2017-11-06 14:37:00 +09:00
|
|
|
import DriveFile, { getGridFSBucket } from '../models/drive-file';
|
2016-12-29 07:49:51 +09:00
|
|
|
import DriveFolder from '../models/drive-folder';
|
|
|
|
import serialize from '../serializers/drive-file';
|
|
|
|
import event from '../event';
|
2017-01-17 09:17:52 +09:00
|
|
|
import config from '../../conf';
|
2017-11-14 03:46:30 +09:00
|
|
|
import { Buffer } from 'buffer';
|
|
|
|
import * as fs from 'fs';
|
|
|
|
import * as tmp from 'tmp';
|
|
|
|
import * as stream from 'stream';
|
2016-12-29 07:49:51 +09:00
|
|
|
|
2017-02-06 21:11:09 +09:00
|
|
|
const log = debug('misskey:register-drive-file');
|
|
|
|
|
2017-11-14 03:46:30 +09:00
|
|
|
const tmpFile = (): Promise<string> => new Promise((resolve, reject) => {
|
|
|
|
tmp.file((e, path) => {
|
2017-11-14 03:47:42 +09:00
|
|
|
if (e) return reject(e);
|
|
|
|
resolve(path);
|
|
|
|
});
|
|
|
|
});
|
2017-11-14 03:46:30 +09:00
|
|
|
|
|
|
|
const addToGridFS = (name: string, readable: stream.Readable, type: string, metadata: any): Promise<any> =>
|
|
|
|
getGridFSBucket()
|
|
|
|
.then(bucket => new Promise((resolve, reject) => {
|
|
|
|
const writeStream = bucket.openUploadStream(name, { contentType: type, metadata });
|
|
|
|
writeStream.once('finish', (doc) => { resolve(doc); });
|
|
|
|
writeStream.on('error', reject);
|
|
|
|
readable.pipe(writeStream);
|
2017-11-14 03:47:42 +09:00
|
|
|
}));
|
2017-11-06 14:37:00 +09:00
|
|
|
|
2016-12-29 07:49:51 +09:00
|
|
|
/**
|
|
|
|
* Add file to drive
|
|
|
|
*
|
|
|
|
* @param user User who wish to add file
|
2017-11-14 04:39:21 +09:00
|
|
|
* @param file File path or readableStream
|
2016-12-29 07:49:51 +09:00
|
|
|
* @param comment Comment
|
|
|
|
* @param type File type
|
|
|
|
* @param folderId Folder ID
|
|
|
|
* @param force If set to true, forcibly upload the file even if there is a file with the same hash.
|
|
|
|
* @return Object that represents added file
|
|
|
|
*/
|
|
|
|
export default (
|
|
|
|
user: any,
|
2017-11-14 04:39:21 +09:00
|
|
|
file: string | stream.Readable,
|
2016-12-29 07:49:51 +09:00
|
|
|
name: string = null,
|
|
|
|
comment: string = null,
|
|
|
|
folderId: mongodb.ObjectID = null,
|
|
|
|
force: boolean = false
|
2017-11-14 03:46:30 +09:00
|
|
|
) => new Promise<any>((resolve, reject) => {
|
2017-02-06 21:11:09 +09:00
|
|
|
log(`registering ${name} (user: ${user.username})`);
|
|
|
|
|
2017-11-14 03:46:30 +09:00
|
|
|
// Get file path
|
|
|
|
new Promise((res: (v: string) => void, rej) => {
|
|
|
|
if (typeof file === 'string') {
|
2017-11-14 03:47:42 +09:00
|
|
|
res(file);
|
|
|
|
return;
|
2016-12-29 07:49:51 +09:00
|
|
|
}
|
2017-11-14 03:46:30 +09:00
|
|
|
if (typeof file === 'object' && typeof file.read === 'function') {
|
|
|
|
tmpFile()
|
|
|
|
.then(path => {
|
2017-11-14 03:47:42 +09:00
|
|
|
const readable: stream.Readable = file;
|
|
|
|
const writable = fs.createWriteStream(path);
|
2017-11-14 03:46:30 +09:00
|
|
|
readable
|
|
|
|
.on('error', rej)
|
|
|
|
.on('end', () => {
|
2017-11-14 03:47:42 +09:00
|
|
|
res(path);
|
2017-11-14 03:46:30 +09:00
|
|
|
})
|
|
|
|
.pipe(writable)
|
2017-11-14 03:47:42 +09:00
|
|
|
.on('error', rej);
|
2017-11-14 03:46:30 +09:00
|
|
|
})
|
2017-11-14 03:47:42 +09:00
|
|
|
.catch(rej);
|
2016-12-29 07:49:51 +09:00
|
|
|
}
|
2017-11-14 03:47:42 +09:00
|
|
|
rej(new Error('un-compatible file.'));
|
2017-11-14 03:46:30 +09:00
|
|
|
})
|
|
|
|
// Calculate hash, get content type and get file size
|
|
|
|
.then(path => Promise.all([
|
|
|
|
path,
|
|
|
|
// hash
|
|
|
|
((): Promise<string> => new Promise((res, rej) => {
|
2017-11-14 03:47:42 +09:00
|
|
|
const readable = fs.createReadStream(path);
|
|
|
|
const hash = crypto.createHash('md5');
|
2017-11-14 04:11:53 +09:00
|
|
|
const chunks = [];
|
2017-11-14 03:46:30 +09:00
|
|
|
readable
|
|
|
|
.on('error', rej)
|
|
|
|
.pipe(hash)
|
2017-11-14 04:11:53 +09:00
|
|
|
.on('error', rej)
|
|
|
|
.on('data', (chunk) => chunks.push(chunk))
|
|
|
|
.on('end', () => {
|
|
|
|
const buffer = Buffer.concat(chunks);
|
|
|
|
res(buffer.toString('hex'));
|
|
|
|
});
|
2017-11-14 03:46:30 +09:00
|
|
|
}))(),
|
|
|
|
// mime
|
|
|
|
((): Promise<[string, string | null]> => new Promise((res, rej) => {
|
2017-11-14 03:47:42 +09:00
|
|
|
const readable = fs.createReadStream(path);
|
2017-11-14 03:46:30 +09:00
|
|
|
readable
|
|
|
|
.on('error', rej)
|
|
|
|
.once('data', (buffer: Buffer) => {
|
2017-11-14 03:47:42 +09:00
|
|
|
readable.destroy();
|
|
|
|
const type = fileType(buffer);
|
2017-11-14 03:46:30 +09:00
|
|
|
if (!type) {
|
2017-11-14 03:47:42 +09:00
|
|
|
return res(['application/octet-stream', null]);
|
2017-11-14 03:46:30 +09:00
|
|
|
}
|
2017-11-14 03:47:42 +09:00
|
|
|
return res([type.mime, type.ext]);
|
|
|
|
});
|
2017-11-14 03:46:30 +09:00
|
|
|
}))(),
|
|
|
|
// size
|
|
|
|
((): Promise<number> => new Promise((res, rej) => {
|
|
|
|
fs.stat(path, (err, stats) => {
|
2017-11-14 03:47:42 +09:00
|
|
|
if (err) return rej(err);
|
|
|
|
res(stats.size);
|
|
|
|
});
|
2017-11-14 03:46:30 +09:00
|
|
|
}))()
|
|
|
|
]))
|
|
|
|
.then(async ([path, hash, [mime, ext], size]) => {
|
2017-11-14 03:47:42 +09:00
|
|
|
log(`hash: ${hash}, mime: ${mime}, ext: ${ext}, size: ${size}`);
|
2017-11-14 03:46:30 +09:00
|
|
|
|
|
|
|
// detect name
|
|
|
|
const detectedName: string = name || (ext ? `untitled.${ext}` : 'untitled');
|
|
|
|
|
|
|
|
if (!force) {
|
|
|
|
// Check if there is a file with the same hash
|
|
|
|
const much = await DriveFile.findOne({
|
|
|
|
md5: hash,
|
|
|
|
'metadata.user_id': user._id
|
|
|
|
});
|
|
|
|
|
|
|
|
if (much !== null) {
|
|
|
|
log('file with same hash is found');
|
|
|
|
return resolve(much);
|
|
|
|
} else {
|
|
|
|
log('file with same hash is not found');
|
|
|
|
}
|
|
|
|
}
|
2016-12-29 07:49:51 +09:00
|
|
|
|
2017-11-14 03:46:30 +09:00
|
|
|
const [properties, folder] = await Promise.all([
|
|
|
|
// properties
|
|
|
|
(async () => {
|
|
|
|
if (!/^image\/.*$/.test(mime)) {
|
2017-11-14 03:47:42 +09:00
|
|
|
return null;
|
2017-11-14 03:46:30 +09:00
|
|
|
}
|
|
|
|
// If the file is an image, calculate width and height to save in property
|
2017-11-14 03:56:39 +09:00
|
|
|
const g = gm(fs.createReadStream(path), name);
|
2017-11-14 03:46:30 +09:00
|
|
|
const size = await prominence(g).size();
|
|
|
|
const properties = {
|
|
|
|
width: size.width,
|
|
|
|
height: size.height
|
|
|
|
};
|
|
|
|
log('image width and height is calculated');
|
2017-11-14 03:47:42 +09:00
|
|
|
return properties;
|
2017-11-14 03:46:30 +09:00
|
|
|
})(),
|
|
|
|
// folder
|
|
|
|
(async () => {
|
|
|
|
if (!folderId) {
|
2017-11-14 03:47:42 +09:00
|
|
|
return null;
|
2017-11-14 03:46:30 +09:00
|
|
|
}
|
|
|
|
const driveFolder = await DriveFolder.findOne({
|
|
|
|
_id: folderId,
|
|
|
|
user_id: user._id
|
2017-11-14 03:47:42 +09:00
|
|
|
});
|
2017-11-14 03:46:30 +09:00
|
|
|
if (!driveFolder) {
|
2017-11-14 03:47:42 +09:00
|
|
|
throw 'folder-not-found';
|
2017-11-14 03:46:30 +09:00
|
|
|
}
|
2017-11-14 03:47:42 +09:00
|
|
|
return driveFolder;
|
2017-11-14 03:46:30 +09:00
|
|
|
})(),
|
|
|
|
// usage checker
|
|
|
|
(async () => {
|
|
|
|
// Calculate drive usage
|
|
|
|
const usage = await DriveFile
|
|
|
|
.aggregate([
|
|
|
|
{ $match: { 'metadata.user_id': user._id } },
|
|
|
|
{
|
|
|
|
$project: {
|
|
|
|
length: true
|
|
|
|
}
|
|
|
|
},
|
|
|
|
{
|
|
|
|
$group: {
|
|
|
|
_id: null,
|
|
|
|
usage: { $sum: '$length' }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
])
|
|
|
|
.then((aggregates: any[]) => {
|
|
|
|
if (aggregates.length > 0) {
|
2017-11-14 03:47:42 +09:00
|
|
|
return aggregates[0].usage;
|
2017-11-14 03:46:30 +09:00
|
|
|
}
|
2017-11-14 03:47:42 +09:00
|
|
|
return 0;
|
2017-11-14 03:46:30 +09:00
|
|
|
});
|
|
|
|
|
|
|
|
log(`drive usage is ${usage}`);
|
|
|
|
|
|
|
|
// If usage limit exceeded
|
|
|
|
if (usage + size > user.drive_capacity) {
|
|
|
|
throw 'no-free-space';
|
|
|
|
}
|
|
|
|
})()
|
2017-11-14 03:47:42 +09:00
|
|
|
]);
|
2017-11-14 03:46:30 +09:00
|
|
|
|
2017-11-14 03:47:42 +09:00
|
|
|
const readable = fs.createReadStream(path);
|
2017-11-14 03:46:30 +09:00
|
|
|
|
2017-11-14 04:28:51 +09:00
|
|
|
return addToGridFS(detectedName, readable, mime, {
|
2017-11-14 03:46:30 +09:00
|
|
|
user_id: user._id,
|
|
|
|
folder_id: folder !== null ? folder._id : null,
|
|
|
|
comment: comment,
|
|
|
|
properties: properties
|
2017-11-14 03:47:42 +09:00
|
|
|
});
|
2017-11-14 03:46:30 +09:00
|
|
|
})
|
|
|
|
.then(file => {
|
|
|
|
log(`drive file has been created ${file._id}`);
|
2017-11-14 03:47:42 +09:00
|
|
|
resolve(file);
|
2017-11-14 03:46:30 +09:00
|
|
|
|
2017-11-14 04:28:51 +09:00
|
|
|
serialize(file)
|
|
|
|
.then(serializedFile => {
|
|
|
|
// Publish drive_file_created event
|
|
|
|
event(user._id, 'drive_file_created', serializedFile);
|
|
|
|
|
|
|
|
// Register to search database
|
|
|
|
if (config.elasticsearch.enable) {
|
|
|
|
const es = require('../../db/elasticsearch');
|
|
|
|
es.index({
|
|
|
|
index: 'misskey',
|
|
|
|
type: 'drive_file',
|
|
|
|
id: file._id.toString(),
|
|
|
|
body: {
|
|
|
|
name: file.name,
|
|
|
|
user_id: user._id.toString()
|
|
|
|
}
|
|
|
|
});
|
2017-11-14 03:46:30 +09:00
|
|
|
}
|
|
|
|
});
|
|
|
|
})
|
2017-11-14 03:47:42 +09:00
|
|
|
.catch(reject);
|
2016-12-29 07:49:51 +09:00
|
|
|
});
|