Merge pull request #11018 from geoffreak/s3-upload-stream-typing

s3-upload-stream typing [types-2.0]
This commit is contained in:
Andy 2016-09-20 08:02:18 -07:00 committed by GitHub
commit 8c2d6efe1d
3 changed files with 64 additions and 0 deletions

24
s3-upload-stream/index.d.ts vendored Normal file
View File

@ -0,0 +1,24 @@
// Type definitions for s3-upload-stream
// Project: https://github.com/nathanpeck/s3-upload-stream
// Definitions by: Joshua DeVinney <https://github.com/geoffreak>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/// <reference types="node" />
/// <reference types="aws-sdk" />
import * as stream from 'stream';
import * as AWS from 'aws-sdk';
declare namespace s3Stream {
export interface S3StreamUploader {
upload(destinationDetails: AWS.s3.PutObjectRequest, sessionDetails?: any): S3WriteStream;
}
export interface S3WriteStream extends stream.Writable {
maxPartSize(sizeInBytes: number): void;
concurrentParts(numberOfParts: number): void;
}
}
declare function s3Stream(client: AWS.S3): s3Stream.S3StreamUploader;
export = s3Stream;

View File

@ -0,0 +1,21 @@
/// <reference types="node" />
/// <reference path="../aws-sdk/index.d.ts" />
import * as fs from 'fs';
import * as S3Stream from 's3-upload-stream';
import * as AWS from 'aws-sdk';
var s3Stream = S3Stream(new AWS.S3());
var read = fs.createReadStream('/path/to/a/file');
var upload = s3Stream.upload({
Bucket: "bucket-name",
Key: "key-name",
ACL: "public-read",
StorageClass: "REDUCED_REDUNDANCY",
ContentType: "binary/octet-stream"
});
upload.concurrentParts(5);
read.pipe(upload);

View File

@ -0,0 +1,19 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "es6",
"noImplicitAny": true,
"strictNullChecks": false,
"baseUrl": "../",
"typeRoots": [
"../"
],
"types": [],
"noEmit": true,
"forceConsistentCasingInFileNames": true
},
"files": [
"index.d.ts",
"s3-upload-stream-tests.ts"
]
}