inital commit
This commit is contained in:
commit
90eb152860
|
@ -0,0 +1,12 @@
|
|||
/target
|
||||
/dist
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
bin/
|
||||
pkg/
|
||||
wasm-pack.log
|
||||
worker/
|
||||
node_modules/
|
||||
.cargo-ok
|
||||
wrangler.toml
|
||||
config.json
|
|
@ -0,0 +1 @@
|
|||
module.exports = require("blob-polyfill").Blob
|
|
@ -0,0 +1,17 @@
|
|||
import main from "./src/index.coffee"
|
||||
|
||||
main.apply(this)
|
||||
|
||||
/*addEventListener('fetch', event => {
|
||||
event.respondWith(handleRequest(event.request))
|
||||
})*/
|
||||
/**
|
||||
* Respond with hello worker text
|
||||
* @param {Request} request
|
||||
*/
|
||||
/*
|
||||
async function handleRequest(request) {
|
||||
return new Response('Hello worker!', {
|
||||
headers: { 'content-type': 'text/plain' },
|
||||
})
|
||||
}*/
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"private": true,
|
||||
"name": "paste",
|
||||
"version": "1.0.0",
|
||||
"description": "A template for kick starting a Cloudflare Workers project",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"format": "prettier --write '**/*.{js,css,json,md}'"
|
||||
},
|
||||
"author": "Peter Cai <peter@typeblog.net>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"aws-sdk": "^2.619.0",
|
||||
"blob-polyfill": "^4.0.20190430",
|
||||
"coffee-loader": "^0.9.0",
|
||||
"coffeescript": "^2.5.1",
|
||||
"json-loader": "^0.5.7",
|
||||
"webpack": "^4.41.6"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
import * as util from './util'
|
||||
import * as s3 from './s3'
|
||||
import _ from './prelude'
|
||||
|
||||
main = ->
|
||||
s3.loadAWSConfig _
|
||||
|
||||
addEventListener 'fetch', (event) =>
|
||||
event.respondWith handleRequest event
|
||||
|
||||
buildInvalidResponse = (msg) ->
|
||||
if not msg
|
||||
msg = "Invalid Request"
|
||||
new Response msg,
|
||||
status: 400
|
||||
|
||||
handleRequest = (event) ->
|
||||
# Validate file name first, since this is shared logic
|
||||
file = util.getFileName event.request.url
|
||||
if not file
|
||||
return buildInvalidResponse _
|
||||
|
||||
# Handle PUT and GET separately
|
||||
if event.request.method == "PUT"
|
||||
handlePUT event.request, file
|
||||
else if event.request.method == "GET"
|
||||
handleGET event.request, file
|
||||
else
|
||||
buildInvalidResponse _
|
||||
|
||||
handlePUT = (req, file) ->
|
||||
if not util.validateLength req
|
||||
return buildInvalidResponse "Maximum upload size: " + util.MAX_UPLOAD_SIZE
|
||||
|
||||
# Generate a valid ID first
|
||||
id = null
|
||||
path = null
|
||||
loop
|
||||
id = util.randomID _
|
||||
path = util.idToPath id
|
||||
files = await s3.listFiles path
|
||||
break if !files or files.length == 0
|
||||
|
||||
path = path + "/" + file
|
||||
len = req.headers.get "content-length"
|
||||
|
||||
# Upload the file to S3
|
||||
try
|
||||
await s3.uploadFile
|
||||
Key: path
|
||||
ContentType: req.headers.get "content-type"
|
||||
ContentLength: len
|
||||
Body: await util.readToBlob req.body
|
||||
catch err
|
||||
console.log err
|
||||
return buildInvalidResponse err
|
||||
|
||||
# Simply return the path in body
|
||||
new Response "/paste/" + id,
|
||||
status: 200
|
||||
|
||||
handleGET = (req, file) ->
|
||||
new Response "Hello, Coffee! file: " + file,
|
||||
headers:
|
||||
"content-type": "text/plain"
|
||||
|
||||
export default main
|
|
@ -0,0 +1 @@
|
|||
export default _ = null
|
|
@ -0,0 +1,26 @@
|
|||
import config from '../config.json'
|
||||
import AWS from 'aws-sdk'
|
||||
import _ from './prelude'
|
||||
|
||||
loadAWSConfig = ->
|
||||
AWS.config.update config.aws
|
||||
|
||||
getS3 = ->
|
||||
new AWS.S3
|
||||
endpoint: new AWS.Endpoint config.s3.endpoint
|
||||
|
||||
uploadFile = (params) ->
|
||||
params['Bucket'] = config.s3.bucket
|
||||
getS3 _
|
||||
.putObject params
|
||||
.promise _
|
||||
|
||||
listFiles = (path) ->
|
||||
(await getS3 _
|
||||
.listObjects
|
||||
Bucket: config.s3.bucket
|
||||
Prefix: path
|
||||
.promise _)
|
||||
.Contents
|
||||
|
||||
export { loadAWSConfig, uploadFile, listFiles }
|
|
@ -0,0 +1,53 @@
|
|||
# Maimum upload size (in bytes)
|
||||
MAX_UPLOAD_SIZE = 10 * 1024 * 1024 # 10 MB
|
||||
|
||||
# Validate content-length header
|
||||
validateLength = (req) ->
|
||||
(Number.parseInt req.headers.get "content-length") <= MAX_UPLOAD_SIZE
|
||||
|
||||
# Only accept paths like `/paste/:file_name`
|
||||
# No further slahses are supported
|
||||
getFileName = (url) ->
|
||||
url = new URL url
|
||||
if url.pathname[0] isnt '/'
|
||||
return null
|
||||
parts = url.pathname.split '/'
|
||||
if parts.length isnt 3
|
||||
return null
|
||||
if parts[1] isnt 'paste'
|
||||
return null
|
||||
return parts[2]
|
||||
|
||||
# Generate random file ID
|
||||
DICTIONARY = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
||||
ID_LENGTH = 6
|
||||
|
||||
randomID = ->
|
||||
[0..ID_LENGTH].map =>
|
||||
DICTIONARY[Math.floor Math.random() * DICTIONARY.length]
|
||||
.join ''
|
||||
|
||||
# Convert a random ID into file path
|
||||
idToPath = (id) ->
|
||||
id.split ''
|
||||
.join '/'
|
||||
|
||||
# Convert a ReadableStream into Blob
|
||||
# AWS-SDK does not support ReadableStream, unfortunately
|
||||
readToBlob = (stream) ->
|
||||
reader = stream.getReader()
|
||||
ret = []
|
||||
loop
|
||||
{ done, value } = await reader.read()
|
||||
break if done
|
||||
ret.push value
|
||||
new Blob ret
|
||||
|
||||
export {
|
||||
getFileName,
|
||||
validateLength,
|
||||
MAX_UPLOAD_SIZE,
|
||||
randomID,
|
||||
idToPath,
|
||||
readToBlob
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
const webpack = require('webpack')
|
||||
const path = require('path')
|
||||
|
||||
module.exports = {
|
||||
target: "webworker",
|
||||
entry: "./index.js",
|
||||
mode: "production",
|
||||
optimization: {
|
||||
// We no not want to minimize our code.
|
||||
minimize: false
|
||||
},
|
||||
resolve: {
|
||||
extensions: ['.js', '.coffee'],
|
||||
alias: {
|
||||
'blob-shim': path.resolve(__dirname, './blob-shim.js'),
|
||||
}
|
||||
},
|
||||
plugins: [
|
||||
new webpack.NormalModuleReplacementPlugin(
|
||||
// Rewritten xhr.js to use Fetch API
|
||||
// Mostly from <https://github.com/aws/aws-sdk-js/issues/2807>
|
||||
// Modified to fix a few bugs
|
||||
/node_modules\/aws-sdk\/lib\/http\/xhr.js/,
|
||||
'../../../../xhr-shim.js'
|
||||
),
|
||||
new webpack.NormalModuleReplacementPlugin(
|
||||
// Force it to use node_parser
|
||||
// Because we are not actually in browser
|
||||
/node_modules\/aws-sdk\/lib\/xml\/browser_parser.js/,
|
||||
'./node_parser.js'
|
||||
),
|
||||
new webpack.ProvidePlugin({
|
||||
'Blob': 'blob-shim'
|
||||
})
|
||||
],
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.coffee$/,
|
||||
use: [ 'coffee-loader' ]
|
||||
},
|
||||
{
|
||||
type: 'javascript/auto', // Needed for aws-sdk
|
||||
test: /\.json$/,
|
||||
use: [ 'json-loader' ]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,97 @@
|
|||
var AWS = require('./node_modules/aws-sdk/lib/core');
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
require('./node_modules/aws-sdk/lib/http');
|
||||
|
||||
/**
|
||||
* @api private
|
||||
*/
|
||||
|
||||
AWS.XHRClient = AWS.util.inherit({
|
||||
handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {
|
||||
var self = this;
|
||||
var endpoint = httpRequest.endpoint;
|
||||
var emitter = new EventEmitter();
|
||||
var href = endpoint.protocol + '//' + endpoint.hostname;
|
||||
if (endpoint.port !== 80 && endpoint.port !== 443) {
|
||||
href += ':' + endpoint.port;
|
||||
}
|
||||
href += httpRequest.path;
|
||||
|
||||
callback(emitter);
|
||||
var headers = new Headers();
|
||||
|
||||
AWS.util.each(httpRequest.headers, function (key, value) {
|
||||
if (key !== 'Content-Length' && key !== 'User-Agent' && key !== 'Host') {
|
||||
headers.set(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
var credentials = 'omit';
|
||||
|
||||
if (httpOptions.xhrWithCredentials) {
|
||||
credentials = 'include';
|
||||
}
|
||||
|
||||
var request = new Request(href, {
|
||||
method: httpRequest.method,
|
||||
headers: headers,
|
||||
body: httpRequest.method == "GET" ? null : httpRequest.body
|
||||
});
|
||||
|
||||
fetch(request).then(function(response) {
|
||||
if (!response.ok) {
|
||||
throw Error(response.statusText);
|
||||
}
|
||||
return response;
|
||||
}).then(function(response) {
|
||||
emitter.statusCode = response.status;
|
||||
new Error(response.headers)
|
||||
emitter.headers = self.parseHeaders(response.headers);
|
||||
emitter.emit('headers', emitter.statusCode, emitter.headers);
|
||||
response.text().then(function(res){
|
||||
console.log(res);
|
||||
self.finishRequest(res, emitter);
|
||||
}).catch(function(err){
|
||||
console.log(err);
|
||||
});
|
||||
|
||||
}).catch(function(err) {
|
||||
errCallback(AWS.util.error(new Error('Network Failure' + err), {
|
||||
code: 'NetworkingError'
|
||||
}));
|
||||
|
||||
});
|
||||
|
||||
return emitter;
|
||||
},
|
||||
|
||||
parseHeaders: function parseHeaders(rawHeaders) {
|
||||
var headers = {};
|
||||
if (!rawHeaders) return headers;
|
||||
for (var pair of rawHeaders.entries()) {
|
||||
headers[pair[0]] = pair[1];
|
||||
}
|
||||
|
||||
return headers;
|
||||
},
|
||||
|
||||
finishRequest: function finishRequest(res, emitter) {
|
||||
var buffer;
|
||||
try {
|
||||
buffer = new AWS.util.Buffer(res);
|
||||
} catch (e) {}
|
||||
|
||||
if (buffer) emitter.emit('data', buffer);
|
||||
emitter.emit('end');
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @api private
|
||||
*/
|
||||
AWS.HttpClient.prototype = AWS.XHRClient.prototype;
|
||||
|
||||
/**
|
||||
* @api private
|
||||
*/
|
||||
AWS.HttpClient.streamsApiVersion = 1;
|
Loading…
Reference in New Issue