Compare commits

..

5 Commits

Author SHA1 Message Date
John Crepezzi 32df3370e2 Fix header name 8 years ago
John Crepezzi d81195856a Log ip 8 years ago
John Crepezzi 6ed427658e Remove npm 8 years ago
John Crepezzi 08eddc7e80 Added pg 8 years ago
John Crepezzi d040dedc6e Production uses postgres 8 years ago
  1. 8
      .dockerignore
  2. 2
      .eslintignore
  3. 25
      .eslintrc.json
  4. 1
      .github/CODEOWNERS
  5. 30
      .github/workflows/close-inactive.yaml
  6. 1
      .gitignore
  7. 68
      Dockerfile
  8. 196
      README.md
  9. 4
      about.md
  10. 3
      config.js
  11. 12
      docker-compose.yaml
  12. 108
      docker-entrypoint.js
  13. 9
      docker-entrypoint.sh
  14. 41
      lib/document_handler.js
  15. 56
      lib/document_stores/amazon-s3.js
  16. 89
      lib/document_stores/google-datastore.js
  17. 93
      lib/document_stores/memcached.js
  18. 88
      lib/document_stores/mongo.js
  19. 21
      lib/document_stores/postgres.js
  20. 11
      lib/document_stores/redis.js
  21. 46
      lib/document_stores/rethinkdb.js
  22. 32
      lib/key_generators/dictionary.js
  23. 46
      lib/key_generators/phonetic.js
  24. 31
      lib/key_generators/random.js
  25. 1652
      package-lock.json
  26. 22
      package.json
  27. 57
      server.js
  28. 8
      spec/document_handler_spec.js
  29. 16
      spec/redis_document_store_spec.js
  30. 5
      static/application.css
  31. 21
      static/application.js
  32. 2
      static/application.min.js
  33. 8
      static/highlight.min.js
  34. 34
      test/key_generators/dictionary_spec.js
  35. 35
      test/key_generators/phonetic_spec.js
  36. 24
      test/key_generators/random_spec.js

@ -1,8 +0,0 @@
Dockerfile
.git
npm-debug.log
node_modules
*.swp
*.swo
data
*.DS_Store

@ -1,2 +0,0 @@
**/*.min.js
config.js

@ -1,25 +0,0 @@
{
"env": {
"es6": true,
"node": true
},
"extends": "eslint:recommended",
"rules": {
"indent": [
"error",
2
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"single"
],
"semi": [
"error",
"always"
]
}
}

@ -1 +0,0 @@
* @toptal/site-acquisition-eng

@ -1,30 +0,0 @@
name: Close inactive issues and PRs
on:
workflow_dispatch:
schedule:
- cron: "30 1 * * *"
jobs:
close-stale:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v3
with:
days-before-stale: 30
days-before-close: 14
stale-issue-label: "stale"
stale-pr-label: "stale"
exempt-issue-labels: backlog,triage,nostale
exempt-pr-labels: backlog,triage,nostale
stale-pr-message: "This PR is stale because it has been open for 30 days with no activity."
close-pr-message: "This PR was closed because it has been inactive for 14 days since being marked as stale."
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity."
close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale."
repo-token: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored

@ -4,4 +4,3 @@ node_modules
*.swo *.swo
data data
*.DS_Store *.DS_Store
docker-compose.override.yml

@ -1,68 +0,0 @@
FROM node:14.8.0-stretch
RUN mkdir -p /usr/src/app && \
chown node:node /usr/src/app
USER node:node
WORKDIR /usr/src/app
COPY --chown=node:node . .
RUN npm install && \
npm install redis@0.8.1 && \
npm install pg@4.1.1 && \
npm install memcached@2.2.2 && \
npm install aws-sdk@2.738.0 && \
npm install rethinkdbdash@2.3.31
ENV STORAGE_TYPE=memcached \
STORAGE_HOST=127.0.0.1 \
STORAGE_PORT=11211\
STORAGE_EXPIRE_SECONDS=2592000\
STORAGE_DB=2 \
STORAGE_AWS_BUCKET= \
STORAGE_AWS_REGION= \
STORAGE_USENAME= \
STORAGE_PASSWORD= \
STORAGE_FILEPATH=
ENV LOGGING_LEVEL=verbose \
LOGGING_TYPE=Console \
LOGGING_COLORIZE=true
ENV HOST=0.0.0.0\
PORT=7777\
KEY_LENGTH=10\
MAX_LENGTH=400000\
STATIC_MAX_AGE=86400\
RECOMPRESS_STATIC_ASSETS=true
ENV KEYGENERATOR_TYPE=phonetic \
KEYGENERATOR_KEYSPACE=
ENV RATELIMITS_NORMAL_TOTAL_REQUESTS=500\
RATELIMITS_NORMAL_EVERY_MILLISECONDS=60000 \
RATELIMITS_WHITELIST_TOTAL_REQUESTS= \
RATELIMITS_WHITELIST_EVERY_MILLISECONDS= \
# comma separated list for the whitelisted \
RATELIMITS_WHITELIST=example1.whitelist,example2.whitelist \
\
RATELIMITS_BLACKLIST_TOTAL_REQUESTS= \
RATELIMITS_BLACKLIST_EVERY_MILLISECONDS= \
# comma separated list for the blacklisted \
RATELIMITS_BLACKLIST=example1.blacklist,example2.blacklist
ENV DOCUMENTS=about=./about.md
EXPOSE ${PORT}
STOPSIGNAL SIGINT
ENTRYPOINT [ "bash", "docker-entrypoint.sh" ]
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s \
--retries=3 CMD [ "sh", "-c", "echo -n 'curl localhost:7777... '; \
(\
curl -sf localhost:7777 > /dev/null\
) && echo OK || (\
echo Fail && exit 2\
)"]
CMD ["npm", "start"]

@ -1,6 +1,6 @@
# Haste # Haste
haste is an open-source pastebin software written in node.js, which is easily Haste is an open-source pastebin software written in node.js, which is easily
installable in any network. It can be backed by either redis or filesystem, installable in any network. It can be backed by either redis or filesystem,
and has a very easy adapter interface for other stores. A publicly available and has a very easy adapter interface for other stores. A publicly available
version can be found at [hastebin.com](http://hastebin.com) version can be found at [hastebin.com](http://hastebin.com)
@ -31,16 +31,16 @@ STDOUT. Check the README there for more details and usages.
1. Download the package, and expand it 1. Download the package, and expand it
2. Explore the settings inside of config.js, but the defaults should be good 2. Explore the settings inside of config.js, but the defaults should be good
3. `npm install` 3. `npm install`
4. `npm start` (you may specify an optional `<config-path>` as well) 4. `npm start`
## Settings ## Settings
* `host` - the host the server runs on (default localhost) * `host` - the host the server runs on (default localhost)
* `port` - the port the server runs on (default 7777) * `port` - the port the server runs on (default 7777)
* `keyLength` - the length of the keys to user (default 10) * `keyLength` - the length of the keys to user (default 10)
* `maxLength` - maximum length of a paste (default 400000) * `maxLength` - maximum length of a paste (default none)
* `staticMaxAge` - max age for static assets (86400) * `staticMaxAge` - max age for static assets (86400)
* `recompressStaticAssets` - whether or not to compile static js assets (true) * `recompressStatisAssets` - whether or not to compile static js assets (true)
* `documents` - static documents to serve (ex: http://hastebin.com/about.com) * `documents` - static documents to serve (ex: http://hastebin.com/about.com)
in addition to static assets. These will never expire. in addition to static assets. These will never expire.
* `storage` - storage options (see below) * `storage` - storage options (see below)
@ -52,7 +52,7 @@ STDOUT. Check the README there for more details and usages.
When present, the `rateLimits` option enables built-in rate limiting courtesy When present, the `rateLimits` option enables built-in rate limiting courtesy
of `connect-ratelimit`. Any of the options supported by that library can be of `connect-ratelimit`. Any of the options supported by that library can be
used and set in `config.js`. used and set in `config.json`.
See the README for [connect-ratelimit](https://github.com/dharmafly/connect-ratelimit) See the README for [connect-ratelimit](https://github.com/dharmafly/connect-ratelimit)
for more information! for more information!
@ -97,9 +97,7 @@ something like:
} }
``` ```
where `path` represents where you want the files stored. Where `path` represents where you want the files stored
File storage currently does not support paste expiration, you can follow [#191](https://github.com/seejohnrun/haste-server/issues/191) for status updates.
### Redis ### Redis
@ -154,33 +152,11 @@ or post.
All of which are optional except `type` with very logical default values. All of which are optional except `type` with very logical default values.
### MongoDB
To use mongodb storage you must install the 'mongodb' package in npm
`npm install mongodb`
Once you've done that, your config section should look like:
``` json
{
"type": "mongo",
"connectionUrl": "mongodb://localhost:27017/database"
}
```
You can also just set the environment variable for `DATABASE_URL` to your database connection url.
Unlike with postgres you do NOT have to create the table in your mongo database prior to running.
You can also set an `expire` option to the number of seconds to expire keys in.
This is off by default, but will constantly kick back expirations on each view or post.
### Memcached ### Memcached
To use memcache storage you must install the `memcached` package via npm To use memcached storage you must install the `memcache` package via npm
`npm install memcached` `npm install memcache`
Once you've done that, your config section should look like: Once you've done that, your config section should look like:
@ -198,162 +174,6 @@ forward on GETs.
All of which are optional except `type` with very logical default values. All of which are optional except `type` with very logical default values.
### RethinkDB
To use the RethinkDB storage system, you must install the `rethinkdbdash` package via npm
`npm install rethinkdbdash`
Once you've done that, your config section should look like this:
``` json
{
"type": "rethinkdb",
"host": "127.0.0.1",
"port": 28015,
"db": "haste"
}
```
In order for this to work, the database must be pre-created before the script is ran.
Also, you must create an `uploads` table, which will store all the data for uploads.
You can optionally add the `user` and `password` properties to use a user system.
### Google Datastore
To use the Google Datastore storage system, you must install the `@google-cloud/datastore` package via npm
`npm install @google-cloud/datastore`
Once you've done that, your config section should look like this:
``` json
{
"type": "google-datastore"
}
```
Authentication is handled automatically by [Google Cloud service account credentials](https://cloud.google.com/docs/authentication/getting-started), by providing authentication details to the GOOGLE_APPLICATION_CREDENTIALS environmental variable.
### Amazon S3
To use [Amazon S3](https://aws.amazon.com/s3/) as a storage system, you must
install the `aws-sdk` package via npm:
`npm install aws-sdk`
Once you've done that, your config section should look like this:
```json
{
"type": "amazon-s3",
"bucket": "your-bucket-name",
"region": "us-east-1"
}
```
Authentication is handled automatically by the client. Check
[Amazon's documentation](https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/setting-credentials-node.html)
for more information. You will need to grant your role these permissions to
your bucket:
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:GetObject",
"s3:PutObject"
],
"Effect": "Allow",
"Resource": "arn:aws:s3:::your-bucket-name-goes-here/*"
}
]
}
```
## Docker
### Build image
```bash
docker build --tag haste-server .
```
### Run container
For this example we will run haste-server, and connect it to a redis server
```bash
docker run --name haste-server-container --env STORAGE_TYPE=redis --env STORAGE_HOST=redis-server --env STORAGE_PORT=6379 haste-server
```
### Use docker-compose example
There is an example `docker-compose.yml` which runs haste-server together with memcached
```bash
docker-compose up
```
### Configuration
The docker image is configured using environmental variables as you can see in the example above.
Here is a list of all the environment variables
### Storage
| Name | Default value | Description |
| :--------------------: | :-----------: | :-----------------------------------------------------------------------------------------------------------: |
| STORAGE_TYPE | memcached | Type of storage . Accepted values: "memcached","redis","postgres","rethinkdb", "amazon-s3", and "file" |
| STORAGE_HOST | 127.0.0.1 | Storage host. Applicable for types: memcached, redis, postgres, and rethinkdb |
| STORAGE_PORT | 11211 | Port on the storage host. Applicable for types: memcached, redis, postgres, and rethinkdb |
| STORAGE_EXPIRE_SECONDS | 2592000 | Number of seconds to expire keys in. Applicable for types. Redis, postgres, memcached. `expire` option to the |
| STORAGE_DB | 2 | The name of the database. Applicable for redis, postgres, and rethinkdb |
| STORAGE_PASSWORD | | Password for database. Applicable for redis, postges, rethinkdb . |
| STORAGE_USERNAME | | Database username. Applicable for postgres, and rethinkdb |
| STORAGE_AWS_BUCKET | | Applicable for amazon-s3. This is the name of the S3 bucket |
| STORAGE_AWS_REGION | | Applicable for amazon-s3. The region in which the bucket is located |
| STORAGE_FILEPATH | | Path to file to save data to. Applicable for type file |
### Logging
| Name | Default value | Description |
| :---------------: | :-----------: | :---------: |
| LOGGING_LEVEL | verbose | |
| LOGGING_TYPE= | Console |
| LOGGING_COLORIZE= | true |
### Basics
| Name | Default value | Description |
| :----------------------: | :--------------: | :---------------------------------------------------------------------------------------: |
| HOST | 0.0.0.0 | The hostname which the server answers on |
| PORT | 7777 | The port on which the server is running |
| KEY_LENGTH | 10 | the length of the keys to user |
| MAX_LENGTH | 400000 | maximum length of a paste |
| STATIC_MAX_AGE | 86400 | max age for static assets |
| RECOMPRESS_STATIC_ASSETS | true | whether or not to compile static js assets |
| KEYGENERATOR_TYPE | phonetic | Type of key generator. Acceptable values: "phonetic", or "random" |
| KEYGENERATOR_KEYSPACE | | keySpace argument is a string of acceptable characters |
| DOCUMENTS | about=./about.md | Comma separated list of static documents to serve. ex: \n about=./about.md,home=./home.md |
### Rate limits
| Name | Default value | Description |
| :----------------------------------: | :-----------------------------------: | :--------------------------------------------------------------------------------------: |
| RATELIMITS_NORMAL_TOTAL_REQUESTS | 500 | By default anyone uncategorized will be subject to 500 requests in the defined timespan. |
| RATELIMITS_NORMAL_EVERY_MILLISECONDS | 60000 | The timespan to allow the total requests for uncategorized users |
| RATELIMITS_WHITELIST_TOTAL_REQUESTS | | By default client names in the whitelist will not have their requests limited. |
| RATELIMITS_WHITELIST_EVERY_SECONDS | | By default client names in the whitelist will not have their requests limited. |
| RATELIMITS_WHITELIST | example1.whitelist,example2.whitelist | Comma separated list of the clients which are in the whitelist pool |
| RATELIMITS_BLACKLIST_TOTAL_REQUESTS | | By default client names in the blacklist will be subject to 0 requests per hours. |
| RATELIMITS_BLACKLIST_EVERY_SECONDS | | By default client names in the blacklist will be subject to 0 requests per hours |
| RATELIMITS_BLACKLIST | example1.blacklist,example2.blacklist | Comma separated list of the clients which are in the blacklistpool. |
## Author ## Author
John Crepezzi <john.crepezzi@gmail.com> John Crepezzi <john.crepezzi@gmail.com>

@ -19,13 +19,13 @@ Most of the time I want to show you some text, it's coming from my current
console session. We should make it really easy to take code from the console console session. We should make it really easy to take code from the console
and send it to people. and send it to people.
`cat something | haste` # https://hastebin.com/1238193 `cat something | haste` # http://hastebin.com/1238193
You can even take this a step further, and cut out the last step of copying the You can even take this a step further, and cut out the last step of copying the
URL with: URL with:
* osx: `cat something | haste | pbcopy` * osx: `cat something | haste | pbcopy`
* linux: `cat something | haste | xsel -b` * linux: `cat something | haste | xsel`
* windows: check out [WinHaste](https://github.com/ajryan/WinHaste) * windows: check out [WinHaste](https://github.com/ajryan/WinHaste)
After running that, the STDOUT output of `cat something` will show up at a URL After running that, the STDOUT output of `cat something` will show up at a URL

@ -33,7 +33,8 @@
}, },
"storage": { "storage": {
"type": "file" "type": "postgres",
"expire": 2592000
}, },
"documents": { "documents": {

@ -1,12 +0,0 @@
version: '3.0'
services:
haste-server:
build: .
environment:
- STORAGE_TYPE=memcached
- STORAGE_HOST=memcached
- STORAGE_PORT=11211
ports:
- 7777:7777
memcached:
image: memcached:latest

@ -1,108 +0,0 @@
const {
HOST,
PORT,
KEY_LENGTH,
MAX_LENGTH,
STATIC_MAX_AGE,
RECOMPRESS_STATIC_ASSETS,
STORAGE_TYPE,
STORAGE_HOST,
STORAGE_PORT,
STORAGE_EXPIRE_SECONDS,
STORAGE_DB,
STORAGE_AWS_BUCKET,
STORAGE_AWS_REGION,
STORAGE_PASSWORD,
STORAGE_USERNAME,
STORAGE_FILEPATH,
LOGGING_LEVEL,
LOGGING_TYPE,
LOGGING_COLORIZE,
KEYGENERATOR_TYPE,
KEY_GENERATOR_KEYSPACE,
RATE_LIMITS_NORMAL_TOTAL_REQUESTS,
RATE_LIMITS_NORMAL_EVERY_MILLISECONDS,
RATE_LIMITS_WHITELIST_TOTAL_REQUESTS,
RATE_LIMITS_WHITELIST_EVERY_MILLISECONDS,
RATE_LIMITS_WHITELIST,
RATE_LIMITS_BLACKLIST_TOTAL_REQUESTS,
RATE_LIMITS_BLACKLIST_EVERY_MILLISECONDS,
RATE_LIMITS_BLACKLIST,
DOCUMENTS,
} = process.env;
const config = {
host: HOST,
port: Number(PORT),
keyLength: Number(KEY_LENGTH),
maxLength: Number(MAX_LENGTH),
staticMaxAge: Number(STATIC_MAX_AGE),
recompressStaticAssets: RECOMPRESS_STATIC_ASSETS,
logging: [
{
level: LOGGING_LEVEL,
type: LOGGING_TYPE,
colorize: LOGGING_COLORIZE,
},
],
keyGenerator: {
type: KEYGENERATOR_TYPE,
keyspace: KEY_GENERATOR_KEYSPACE,
},
rateLimits: {
whitelist: RATE_LIMITS_WHITELIST ? RATE_LIMITS_WHITELIST.split(",") : [],
blacklist: RATE_LIMITS_BLACKLIST ? RATE_LIMITS_BLACKLIST.split(",") : [],
categories: {
normal: {
totalRequests: RATE_LIMITS_NORMAL_TOTAL_REQUESTS,
every: RATE_LIMITS_NORMAL_EVERY_MILLISECONDS,
},
whitelist:
RATE_LIMITS_WHITELIST_EVERY_MILLISECONDS ||
RATE_LIMITS_WHITELIST_TOTAL_REQUESTS
? {
totalRequests: RATE_LIMITS_WHITELIST_TOTAL_REQUESTS,
every: RATE_LIMITS_WHITELIST_EVERY_MILLISECONDS,
}
: null,
blacklist:
RATE_LIMITS_BLACKLIST_EVERY_MILLISECONDS ||
RATE_LIMITS_BLACKLIST_TOTAL_REQUESTS
? {
totalRequests: RATE_LIMITS_WHITELIST_TOTAL_REQUESTS,
every: RATE_LIMITS_BLACKLIST_EVERY_MILLISECONDS,
}
: null,
},
},
storage: {
type: STORAGE_TYPE,
host: STORAGE_HOST,
port: Number(STORAGE_PORT),
expire: Number(STORAGE_EXPIRE_SECONDS),
bucket: STORAGE_AWS_BUCKET,
region: STORAGE_AWS_REGION,
connectionUrl: `postgres://${STORAGE_USERNAME}:${STORAGE_PASSWORD}@${STORAGE_HOST}:${STORAGE_PORT}/${STORAGE_DB}`,
db: STORAGE_DB,
user: STORAGE_USERNAME,
password: STORAGE_PASSWORD,
path: STORAGE_FILEPATH,
},
documents: DOCUMENTS
? DOCUMENTS.split(",").reduce((acc, item) => {
const keyAndValueArray = item.replace(/\s/g, "").split("=");
return { ...acc, [keyAndValueArray[0]]: keyAndValueArray[1] };
}, {})
: null,
};
console.log(JSON.stringify(config));

@ -1,9 +0,0 @@
#!/bin/bash
# We use this file to translate environmental variables to .env files used by the application
set -e
node ./docker-entrypoint.js > ./config.js
exec "$@"

@ -16,55 +16,33 @@ var DocumentHandler = function(options) {
DocumentHandler.defaultKeyLength = 10; DocumentHandler.defaultKeyLength = 10;
// Handle retrieving a document // Handle retrieving a document
DocumentHandler.prototype.handleGet = function(request, response, config) { DocumentHandler.prototype.handleGet = function(key, response, skipExpire) {
const key = request.params.id.split('.')[0];
const skipExpire = !!config.documents[key];
this.store.get(key, function(ret) { this.store.get(key, function(ret) {
if (ret) { if (ret) {
winston.verbose('retrieved document', { key: key }); winston.verbose('retrieved document', { key: key });
response.writeHead(200, { 'content-type': 'application/json' }); response.writeHead(200, { 'content-type': 'application/json' });
if (request.method === 'HEAD') { response.end(JSON.stringify({ data: ret, key: key }));
response.end();
} else {
response.end(JSON.stringify({ data: ret, key: key }));
}
} }
else { else {
winston.warn('document not found', { key: key }); winston.warn('document not found', { key: key });
response.writeHead(404, { 'content-type': 'application/json' }); response.writeHead(404, { 'content-type': 'application/json' });
if (request.method === 'HEAD') { response.end(JSON.stringify({ message: 'Document not found.' }));
response.end();
} else {
response.end(JSON.stringify({ message: 'Document not found.' }));
}
} }
}, skipExpire); }, skipExpire);
}; };
// Handle retrieving the raw version of a document // Handle retrieving the raw version of a document
DocumentHandler.prototype.handleRawGet = function(request, response, config) { DocumentHandler.prototype.handleRawGet = function(key, response, skipExpire) {
const key = request.params.id.split('.')[0];
const skipExpire = !!config.documents[key];
this.store.get(key, function(ret) { this.store.get(key, function(ret) {
if (ret) { if (ret) {
winston.verbose('retrieved raw document', { key: key }); winston.verbose('retrieved raw document', { key: key });
response.writeHead(200, { 'content-type': 'text/plain; charset=UTF-8' }); response.writeHead(200, { 'content-type': 'text/plain' });
if (request.method === 'HEAD') { response.end(ret);
response.end();
} else {
response.end(ret);
}
} }
else { else {
winston.warn('raw document not found', { key: key }); winston.warn('raw document not found', { key: key });
response.writeHead(404, { 'content-type': 'application/json' }); response.writeHead(404, { 'content-type': 'application/json' });
if (request.method === 'HEAD') { response.end(JSON.stringify({ message: 'Document not found.' }));
response.end();
} else {
response.end(JSON.stringify({ message: 'Document not found.' }));
}
} }
}, skipExpire); }, skipExpire);
}; };
@ -91,7 +69,8 @@ DocumentHandler.prototype.handlePost = function (request, response) {
_this.chooseKey(function (key) { _this.chooseKey(function (key) {
_this.store.set(key, buffer, function (res) { _this.store.set(key, buffer, function (res) {
if (res) { if (res) {
winston.verbose('added document', { key: key }); var ip = request.headers['x-forwarded-for'] || request.ip;
winston.verbose('added document', { key: key, ip: ip });
response.writeHead(200, { 'content-type': 'application/json' }); response.writeHead(200, { 'content-type': 'application/json' });
response.end(JSON.stringify({ key: key })); response.end(JSON.stringify({ key: key }));
} }
@ -145,7 +124,7 @@ DocumentHandler.prototype.chooseKey = function(callback) {
} else { } else {
callback(key); callback(key);
} }
}, true); // Don't bump expirations when key searching });
}; };
DocumentHandler.prototype.acceptableKey = function() { DocumentHandler.prototype.acceptableKey = function() {

@ -1,56 +0,0 @@
/*global require,module,process*/
var AWS = require('aws-sdk');
var winston = require('winston');
var AmazonS3DocumentStore = function(options) {
this.expire = options.expire;
this.bucket = options.bucket;
this.client = new AWS.S3({region: options.region});
};
AmazonS3DocumentStore.prototype.get = function(key, callback, skipExpire) {
var _this = this;
var req = {
Bucket: _this.bucket,
Key: key
};
_this.client.getObject(req, function(err, data) {
if(err) {
callback(false);
}
else {
callback(data.Body.toString('utf-8'));
if (_this.expire && !skipExpire) {
winston.warn('amazon s3 store cannot set expirations on keys');
}
}
});
}
AmazonS3DocumentStore.prototype.set = function(key, data, callback, skipExpire) {
var _this = this;
var req = {
Bucket: _this.bucket,
Key: key,
Body: data,
ContentType: 'text/plain'
};
_this.client.putObject(req, function(err, data) {
if (err) {
callback(false);
}
else {
callback(true);
if (_this.expire && !skipExpire) {
winston.warn('amazon s3 store cannot set expirations on keys');
}
}
});
}
module.exports = AmazonS3DocumentStore;

@ -1,89 +0,0 @@
/*global require,module,process*/
const Datastore = require('@google-cloud/datastore');
const winston = require('winston');
class GoogleDatastoreDocumentStore {
// Create a new store with options
constructor(options) {
this.kind = "Haste";
this.expire = options.expire;
this.datastore = new Datastore();
}
// Save file in a key
set(key, data, callback, skipExpire) {
var expireTime = (skipExpire || this.expire === undefined) ? null : new Date(Date.now() + this.expire * 1000);
var taskKey = this.datastore.key([this.kind, key])
var task = {
key: taskKey,
data: [
{
name: 'value',
value: data,
excludeFromIndexes: true
},
{
name: 'expiration',
value: expireTime
}
]
};
this.datastore.insert(task).then(() => {
callback(true);
})
.catch(err => {
callback(false);
});
}
// Get a file from a key
get(key, callback, skipExpire) {
var taskKey = this.datastore.key([this.kind, key])
this.datastore.get(taskKey).then((entity) => {
if (skipExpire || entity[0]["expiration"] == null) {
callback(entity[0]["value"]);
}
else {
// check for expiry
if (entity[0]["expiration"] < new Date()) {
winston.info("document expired", {key: key, expiration: entity[0]["expiration"], check: new Date(null)});
callback(false);
}
else {
// update expiry
var task = {
key: taskKey,
data: [
{
name: 'value',
value: entity[0]["value"],
excludeFromIndexes: true
},
{
name: 'expiration',
value: new Date(Date.now() + this.expire * 1000)
}
]
};
this.datastore.update(task).then(() => {
})
.catch(err => {
winston.error("failed to update expiration", {error: err});
});
callback(entity[0]["value"]);
}
}
})
.catch(err => {
winston.error("Error retrieving value from Google Datastore", {error: err});
callback(false);
});
}
}
module.exports = GoogleDatastoreDocumentStore;

@ -1,54 +1,45 @@
const memcached = require('memcached'); var memcached = require('memcache');
const winston = require('winston'); var winston = require('winston');
class MemcachedDocumentStore { // Create a new store with options
var MemcachedDocumentStore = function(options) {
// Create a new store with options this.expire = options.expire;
constructor(options) { if (!MemcachedDocumentStore.client) {
this.expire = options.expire; MemcachedDocumentStore.connect(options);
const host = options.host || '127.0.0.1';
const port = options.port || 11211;
const url = `${host}:${port}`;
this.connect(url);
}
// Create a connection
connect(url) {
this.client = new memcached(url);
winston.info(`connecting to memcached on ${url}`);
this.client.on('failure', function(error) {
winston.info('error connecting to memcached', {error});
});
} }
};
// Save file in a key
set(key, data, callback, skipExpire) { // Create a connection
this.client.set(key, data, skipExpire ? 0 : this.expire || 0, (error) => { MemcachedDocumentStore.connect = function(options) {
callback(!error); var host = options.host || '127.0.0.1';
}); var port = options.port || 11211;
} this.client = new memcached.Client(port, host);
this.client.connect();
// Get a file from a key this.client.on('connect', function() {
get(key, callback, skipExpire) { winston.info('connected to memcached on ' + host + ':' + port);
this.client.get(key, (error, data) => { });
const value = error ? false : data; this.client.on('error', function(e) {
winston.info('error connecting to memcached', { error: e });
callback(value); });
};
// Update the key so that the expiration is pushed forward
if (value && !skipExpire) { // Save file in a key
this.set(key, data, (updateSucceeded) => { MemcachedDocumentStore.prototype.set =
if (!updateSucceeded) { function(key, data, callback, skipExpire) {
winston.error('failed to update expiration on GET', {key}); MemcachedDocumentStore.client.set(key, data, function(err, reply) {
} err ? callback(false) : callback(true);
}, skipExpire); }, skipExpire ? 0 : this.expire);
} };
});
} // Get a file from a key
MemcachedDocumentStore.prototype.get = function(key, callback, skipExpire) {
} var _this = this;
MemcachedDocumentStore.client.get(key, function(err, reply) {
callback(err ? false : reply);
if (_this.expire && !skipExpire) {
winston.warn('store does not currently push forward expirations on GET');
}
});
};
module.exports = MemcachedDocumentStore; module.exports = MemcachedDocumentStore;

@ -1,88 +0,0 @@
var MongoClient = require('mongodb').MongoClient,
winston = require('winston');
var MongoDocumentStore = function (options) {
this.expire = options.expire;
this.connectionUrl = process.env.DATABASE_URl || options.connectionUrl;
};
MongoDocumentStore.prototype.set = function (key, data, callback, skipExpire) {
var now = Math.floor(new Date().getTime() / 1000),
that = this;
this.safeConnect(function (err, db) {
if (err)
return callback(false);
db.collection('entries').update({
'entry_id': key,
$or: [
{ expiration: -1 },
{ expiration: { $gt: now } }
]
}, {
'entry_id': key,
'value': data,
'expiration': that.expire && !skipExpire ? that.expire + now : -1
}, {
upsert: true
}, function (err, existing) {
if (err) {
winston.error('error persisting value to mongodb', { error: err });
return callback(false);
}
callback(true);
});
});
};
MongoDocumentStore.prototype.get = function (key, callback, skipExpire) {
var now = Math.floor(new Date().getTime() / 1000),
that = this;
this.safeConnect(function (err, db) {
if (err)
return callback(false);
db.collection('entries').findOne({
'entry_id': key,
$or: [
{ expiration: -1 },
{ expiration: { $gt: now } }
]
}, function (err, entry) {
if (err) {
winston.error('error persisting value to mongodb', { error: err });
return callback(false);
}
callback(entry === null ? false : entry.value);
if (entry !== null && entry.expiration !== -1 && that.expire && !skipExpire) {
db.collection('entries').update({
'entry_id': key
}, {
$set: {
'expiration': that.expire + now
}
}, function (err, result) { });
}
});
});
};
MongoDocumentStore.prototype.safeConnect = function (callback) {
MongoClient.connect(this.connectionUrl, function (err, db) {
if (err) {
winston.error('error connecting to mongodb', { error: err });
callback(err);
} else {
callback(undefined, db);
}
});
};
module.exports = MongoDocumentStore;

@ -1,16 +1,14 @@
/*global require,module,process*/ /*global require,module,process*/
var postgres = require('pg');
var winston = require('winston'); var winston = require('winston');
const {Pool} = require('pg');
// create table entries (id serial primary key, key varchar(255) not null, value text not null, expiration int, unique(key)); // create table entries (id serial primary key, key varchar(255) not null, value text not null, expiration int, unique(key));
// A postgres document store // A postgres document store
var PostgresDocumentStore = function (options) { var PostgresDocumentStore = function (options) {
this.expireJS = parseInt(options.expire, 10); this.expireJS = options.expire;
this.connectionUrl = process.env.DATABASE_URL || options.connectionUrl;
const connectionString = process.env.DATABASE_URL || options.connectionUrl;
this.pool = new Pool({connectionString});
}; };
PostgresDocumentStore.prototype = { PostgresDocumentStore.prototype = {
@ -25,7 +23,7 @@ PostgresDocumentStore.prototype = {
key, key,
data, data,
that.expireJS && !skipExpire ? that.expireJS + now : null that.expireJS && !skipExpire ? that.expireJS + now : null
], function (err) { ], function (err, result) {
if (err) { if (err) {
winston.error('error persisting value to postgres', { error: err }); winston.error('error persisting value to postgres', { error: err });
return callback(false); return callback(false);
@ -52,7 +50,7 @@ PostgresDocumentStore.prototype = {
client.query('UPDATE entries SET expiration = $1 WHERE ID = $2', [ client.query('UPDATE entries SET expiration = $1 WHERE ID = $2', [
that.expireJS + now, that.expireJS + now,
result.rows[0].id result.rows[0].id
], function (err) { ], function (err, result) {
if (!err) { if (!err) {
done(); done();
} }
@ -66,15 +64,16 @@ PostgresDocumentStore.prototype = {
// A connection wrapper // A connection wrapper
safeConnect: function (callback) { safeConnect: function (callback) {
this.pool.connect((error, client, done) => { postgres.connect(this.connectionUrl, function (err, client, done) {
if (error) { if (err) {
winston.error('error connecting to postgres', {error}); winston.error('error connecting to postgres', { error: err });
callback(error); callback(err);
} else { } else {
callback(undefined, client, done); callback(undefined, client, done);
} }
}); });
} }
}; };
module.exports = PostgresDocumentStore; module.exports = PostgresDocumentStore;

@ -29,12 +29,7 @@ RedisDocumentStore.connect = function(options) {
if (options.password) { if (options.password) {
RedisDocumentStore.client.auth(options.password); RedisDocumentStore.client.auth(options.password);
} }
RedisDocumentStore.client.select(index, function(err, reply) {
RedisDocumentStore.client.on('error', function(err) {
winston.error('redis disconnected', err);
});
RedisDocumentStore.client.select(index, function(err) {
if (err) { if (err) {
winston.error( winston.error(
'error connecting to redis index ' + index, 'error connecting to redis index ' + index,
@ -51,7 +46,7 @@ RedisDocumentStore.connect = function(options) {
// Save file in a key // Save file in a key
RedisDocumentStore.prototype.set = function(key, data, callback, skipExpire) { RedisDocumentStore.prototype.set = function(key, data, callback, skipExpire) {
var _this = this; var _this = this;
RedisDocumentStore.client.set(key, data, function(err) { RedisDocumentStore.client.set(key, data, function(err, reply) {
if (err) { if (err) {
callback(false); callback(false);
} }
@ -67,7 +62,7 @@ RedisDocumentStore.prototype.set = function(key, data, callback, skipExpire) {
// Expire a key in expire time if set // Expire a key in expire time if set
RedisDocumentStore.prototype.setExpiration = function(key) { RedisDocumentStore.prototype.setExpiration = function(key) {
if (this.expire) { if (this.expire) {
RedisDocumentStore.client.expire(key, this.expire, function(err) { RedisDocumentStore.client.expire(key, this.expire, function(err, reply) {
if (err) { if (err) {
winston.error('failed to set expiry on key: ' + key); winston.error('failed to set expiry on key: ' + key);
} }

@ -1,46 +0,0 @@
const crypto = require('crypto');
const rethink = require('rethinkdbdash');
const winston = require('winston');
const md5 = (str) => {
const md5sum = crypto.createHash('md5');
md5sum.update(str);
return md5sum.digest('hex');
};
class RethinkDBStore {
constructor(options) {
this.client = rethink({
silent: true,
host: options.host || '127.0.0.1',
port: options.port || 28015,
db: options.db || 'haste',
user: options.user || 'admin',
password: options.password || ''
});
}
set(key, data, callback) {
this.client.table('uploads').insert({ id: md5(key), data: data }).run((error) => {
if (error) {
callback(false);
winston.error('failed to insert to table', error);
return;
}
callback(true);
});
}
get(key, callback) {
this.client.table('uploads').get(md5(key)).run((error, result) => {
if (error || !result) {
callback(false);
if (error) winston.error('failed to insert to table', error);
return;
}
callback(result.data);
});
}
}
module.exports = RethinkDBStore;

@ -1,32 +0,0 @@
const fs = require('fs');
module.exports = class DictionaryGenerator {
constructor(options, readyCallback) {
// Check options format
if (!options) throw Error('No options passed to generator');
if (!options.path) throw Error('No dictionary path specified in options');
// Load dictionary
fs.readFile(options.path, 'utf8', (err, data) => {
if (err) throw err;
this.dictionary = data.split(/[\n\r]+/);
if (readyCallback) readyCallback();
});
}
// Generates a dictionary-based key, of keyLength words
createKey(keyLength) {
let text = '';
for (let i = 0; i < keyLength; i++) {
const index = Math.floor(Math.random() * this.dictionary.length);
text += this.dictionary[index];
}
return text;
}
};

@ -1,27 +1,33 @@
// Draws inspiration from pwgen and http://tools.arantius.com/password // Draws inspiration from pwgen and http://tools.arantius.com/password
var PhoneticKeyGenerator = function(options) {
const randOf = (collection) => { // No options
return () => {
return collection[Math.floor(Math.random() * collection.length)];
};
}; };
// Helper methods to get an random vowel or consonant // Generate a phonetic key
const randVowel = randOf('aeiou'); PhoneticKeyGenerator.prototype.createKey = function(keyLength) {
const randConsonant = randOf('bcdfghjklmnpqrstvwxyz'); var text = '';
var start = Math.round(Math.random());
module.exports = class PhoneticKeyGenerator { for (var i = 0; i < keyLength; i++) {
text += (i % 2 == start) ? this.randConsonant() : this.randVowel();
// Generate a phonetic key of alternating consonant & vowel }
createKey(keyLength) { return text;
let text = ''; };
const start = Math.round(Math.random());
for (let i = 0; i < keyLength; i++) { PhoneticKeyGenerator.consonants = 'bcdfghjklmnpqrstvwxyz';
text += (i % 2 == start) ? randConsonant() : randVowel(); PhoneticKeyGenerator.vowels = 'aeiou';
}
return text; // Get an random vowel
} PhoneticKeyGenerator.prototype.randVowel = function() {
return PhoneticKeyGenerator.vowels[
Math.floor(Math.random() * PhoneticKeyGenerator.vowels.length)
];
};
// Get an random consonant
PhoneticKeyGenerator.prototype.randConsonant = function() {
return PhoneticKeyGenerator.consonants[
Math.floor(Math.random() * PhoneticKeyGenerator.consonants.length)
];
}; };
module.exports = PhoneticKeyGenerator;

@ -1,20 +1,19 @@
module.exports = class RandomKeyGenerator { var RandomKeyGenerator = function(options) {
if (!options) {
// Initialize a new generator with the given keySpace options = {};
constructor(options = {}) {
this.keyspace = options.keyspace || 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
} }
this.keyspace = options.keyspace || 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
};
// Generate a key of the given length // Generate a random key
createKey(keyLength) { RandomKeyGenerator.prototype.createKey = function(keyLength) {
var text = ''; var text = '';
var index;
for (var i = 0; i < keyLength; i++) { for (var i = 0; i < keyLength; i++) {
const index = Math.floor(Math.random() * this.keyspace.length); index = Math.floor(Math.random() * this.keyspace.length);
text += this.keyspace.charAt(index); text += this.keyspace.charAt(index);
}
return text;
} }
return text;
}; };
module.exports = RandomKeyGenerator;

1652
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -14,21 +14,23 @@
}, },
"main": "haste", "main": "haste",
"dependencies": { "dependencies": {
"busboy": "0.2.4",
"connect": "^3.7.0",
"connect-ratelimit": "0.0.7", "connect-ratelimit": "0.0.7",
"connect-route": "0.1.5", "connect-route": "0.1.5",
"pg": "^8.0.0", "connect": "3.4.1",
"redis": "0.8.1", "st": "1.1.0",
"redis-url": "0.1.0", "winston": "0.6.2",
"st": "^2.0.0", "uglify-js": "1.3.3",
"uglify-js": "3.1.6", "busboy": "0.2.4",
"winston": "^2.0.0" "pg": "4.1.1"
}, },
"devDependencies": { "devDependencies": {
"mocha": "^8.1.3" "mocha": "*",
"should": "*"
}, },
"bundledDependencies": [], "bundledDependencies": [],
"engines": {
"node": "0.10.35"
},
"bin": { "bin": {
"haste-server": "./server.js" "haste-server": "./server.js"
}, },
@ -42,6 +44,6 @@
}, },
"scripts": { "scripts": {
"start": "node server.js", "start": "node server.js",
"test": "mocha --recursive" "test": "mocha -r should spec/*"
} }
} }

@ -1,7 +1,7 @@
var http = require('http'); var http = require('http');
var url = require('url');
var fs = require('fs'); var fs = require('fs');
var uglify = require('uglify-js');
var winston = require('winston'); var winston = require('winston');
var connect = require('connect'); var connect = require('connect');
var route = require('connect-route'); var route = require('connect-route');
@ -11,8 +11,7 @@ var connect_rate_limit = require('connect-ratelimit');
var DocumentHandler = require('./lib/document_handler'); var DocumentHandler = require('./lib/document_handler');
// Load the configuration and set some defaults // Load the configuration and set some defaults
const configPath = process.argv.length <= 2 ? 'config.js' : process.argv[2]; var config = JSON.parse(fs.readFileSync('./config.js', 'utf8'));
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
config.port = process.env.PORT || config.port || 7777; config.port = process.env.PORT || config.port || 7777;
config.host = process.env.HOST || config.host || 'localhost'; config.host = process.env.HOST || config.host || 'localhost';
@ -20,10 +19,7 @@ config.host = process.env.HOST || config.host || 'localhost';
if (config.logging) { if (config.logging) {
try { try {
winston.remove(winston.transports.Console); winston.remove(winston.transports.Console);
} catch(e) { } catch(er) { }
/* was not present */
}
var detail, type; var detail, type;
for (var i = 0; i < config.logging.length; i++) { for (var i = 0; i < config.logging.length; i++) {
detail = config.logging[i]; detail = config.logging[i];
@ -56,14 +52,21 @@ else {
// Compress the static javascript assets // Compress the static javascript assets
if (config.recompressStaticAssets) { if (config.recompressStaticAssets) {
var jsp = require("uglify-js").parser;
var pro = require("uglify-js").uglify;
var list = fs.readdirSync('./static'); var list = fs.readdirSync('./static');
for (var j = 0; j < list.length; j++) { for (var i = 0; i < list.length; i++) {
var item = list[j]; var item = list[i];
if ((item.indexOf('.js') === item.length - 3) && (item.indexOf('.min.js') === -1)) { var orig_code, ast;
var dest = item.substring(0, item.length - 3) + '.min' + item.substring(item.length - 3); if ((item.indexOf('.js') === item.length - 3) &&
var orig_code = fs.readFileSync('./static/' + item, 'utf8'); (item.indexOf('.min.js') === -1)) {
dest = item.substring(0, item.length - 3) + '.min' +
fs.writeFileSync('./static/' + dest, uglify.minify(orig_code).code, 'utf8'); item.substring(item.length - 3);
orig_code = fs.readFileSync('./static/' + item, 'utf8');
ast = jsp.parse(orig_code);
ast = pro.ast_mangle(ast);
ast = pro.ast_squeeze(ast);
fs.writeFileSync('./static/' + dest, pro.gen_code(ast), 'utf8');
winston.info('compressed ' + item + ' into ' + dest); winston.info('compressed ' + item + ' into ' + dest);
} }
} }
@ -110,28 +113,20 @@ if (config.rateLimits) {
// first look at API calls // first look at API calls
app.use(route(function(router) { app.use(route(function(router) {
// get raw documents - support getting with extension // get raw documents - support getting with extension
router.get('/raw/:id', function(request, response, next) {
router.get('/raw/:id', function(request, response) { var key = request.params.id.split('.')[0];
return documentHandler.handleRawGet(request, response, config); var skipExpire = !!config.documents[key];
}); return documentHandler.handleRawGet(key, response, skipExpire);
router.head('/raw/:id', function(request, response) {
return documentHandler.handleRawGet(request, response, config);
}); });
// add documents // add documents
router.post('/documents', function(request, response, next) {
router.post('/documents', function(request, response) {
return documentHandler.handlePost(request, response); return documentHandler.handlePost(request, response);
}); });
// get documents // get documents
router.get('/documents/:id', function(request, response) { router.get('/documents/:id', function(request, response, next) {
return documentHandler.handleGet(request, response, config); var key = request.params.id.split('.')[0];
}); var skipExpire = !!config.documents[key];
return documentHandler.handleGet(key, response, skipExpire);
router.head('/documents/:id', function(request, response) {
return documentHandler.handleGet(request, response, config);
}); });
})); }));

@ -1,7 +1,3 @@
/* global describe, it */
var assert = require('assert');
var DocumentHandler = require('../lib/document_handler'); var DocumentHandler = require('../lib/document_handler');
var Generator = require('../lib/key_generators/random'); var Generator = require('../lib/key_generators/random');
@ -12,13 +8,13 @@ describe('document_handler', function() {
it('should choose a key of the proper length', function() { it('should choose a key of the proper length', function() {
var gen = new Generator(); var gen = new Generator();
var dh = new DocumentHandler({ keyLength: 6, keyGenerator: gen }); var dh = new DocumentHandler({ keyLength: 6, keyGenerator: gen });
assert.equal(6, dh.acceptableKey().length); dh.acceptableKey().length.should.equal(6);
}); });
it('should choose a default key length', function() { it('should choose a default key length', function() {
var gen = new Generator(); var gen = new Generator();
var dh = new DocumentHandler({ keyGenerator: gen }); var dh = new DocumentHandler({ keyGenerator: gen });
assert.equal(dh.keyLength, DocumentHandler.defaultKeyLength); dh.keyLength.should.equal(DocumentHandler.defaultKeyLength);
}); });
}); });

@ -1,12 +1,8 @@
/* global it, describe, afterEach */ var RedisDocumentStore = require('../lib/document_stores/redis');
var assert = require('assert');
var winston = require('winston'); var winston = require('winston');
winston.remove(winston.transports.Console); winston.remove(winston.transports.Console);
var RedisDocumentStore = require('../lib/document_stores/redis');
describe('redis_document_store', function() { describe('redis_document_store', function() {
/* reconnect to redis on each test */ /* reconnect to redis on each test */
@ -16,14 +12,14 @@ describe('redis_document_store', function() {
RedisDocumentStore.client = false; RedisDocumentStore.client = false;
} }
}); });
describe('set', function() { describe('set', function() {
it('should be able to set a key and have an expiration set', function(done) { it('should be able to set a key and have an expiration set', function(done) {
var store = new RedisDocumentStore({ expire: 10 }); var store = new RedisDocumentStore({ expire: 10 });
store.set('hello1', 'world', function() { store.set('hello1', 'world', function() {
RedisDocumentStore.client.ttl('hello1', function(err, res) { RedisDocumentStore.client.ttl('hello1', function(err, res) {
assert.ok(res > 1); res.should.be.above(1);
done(); done();
}); });
}); });
@ -33,7 +29,7 @@ describe('redis_document_store', function() {
var store = new RedisDocumentStore({ expire: 10 }); var store = new RedisDocumentStore({ expire: 10 });
store.set('hello2', 'world', function() { store.set('hello2', 'world', function() {
RedisDocumentStore.client.ttl('hello2', function(err, res) { RedisDocumentStore.client.ttl('hello2', function(err, res) {
assert.equal(-1, res); res.should.equal(-1);
done(); done();
}); });
}, true); }, true);
@ -41,9 +37,9 @@ describe('redis_document_store', function() {
it('should not set an expiration when expiration is off', function(done) { it('should not set an expiration when expiration is off', function(done) {
var store = new RedisDocumentStore({ expire: false }); var store = new RedisDocumentStore({ expire: false });
store.set('hello3', 'world', function() { store.set('hello3', 'world', function(worked) {
RedisDocumentStore.client.ttl('hello3', function(err, res) { RedisDocumentStore.client.ttl('hello3', function(err, res) {
assert.equal(-1, res); res.should.equal(-1);
done(); done();
}); });
}); });

@ -17,8 +17,6 @@ textarea {
outline: none; outline: none;
resize: none; resize: none;
font-size: 13px; font-size: 13px;
margin-top: 0;
margin-bottom: 0;
} }
/* the line numbers */ /* the line numbers */
@ -33,7 +31,6 @@ textarea {
font-size: 13px; font-size: 13px;
font-family: monospace; font-family: monospace;
text-align: right; text-align: right;
user-select: none;
} }
/* code box when locked */ /* code box when locked */
@ -45,6 +42,7 @@ textarea {
border: 0px; border: 0px;
outline: none; outline: none;
font-size: 13px; font-size: 13px;
padding-right: 360px;
overflow: inherit; overflow: inherit;
} }
@ -120,7 +118,6 @@ textarea {
font-size: 12px; font-size: 12px;
line-height: 14px; line-height: 14px;
padding: 10px 15px; padding: 10px 15px;
user-select: none;
} }
#box3 .label, #messages li { #box3 .label, #messages li {

@ -1,5 +1,3 @@
/* global $, hljs, window, document */
///// represents a single document ///// represents a single document
var haste_document = function() { var haste_document = function() {
@ -44,10 +42,10 @@ haste_document.prototype.load = function(key, callback, lang) {
value: high.value, value: high.value,
key: key, key: key,
language: high.language || lang, language: high.language || lang,
lineCount: res.data.split('\n').length lineCount: res.data.split("\n").length
}); });
}, },
error: function() { error: function(err) {
callback(false); callback(false);
} }
}); });
@ -64,7 +62,7 @@ haste_document.prototype.save = function(data, callback) {
type: 'post', type: 'post',
data: data, data: data,
dataType: 'json', dataType: 'json',
contentType: 'text/plain; charset=utf-8', contentType: 'application/json; charset=utf-8',
success: function(res) { success: function(res) {
_this.locked = true; _this.locked = true;
_this.key = res.key; _this.key = res.key;
@ -73,7 +71,7 @@ haste_document.prototype.save = function(data, callback) {
value: high.value, value: high.value,
key: res.key, key: res.key,
language: high.language, language: high.language,
lineCount: data.split('\n').length lineCount: data.split("\n").length
}); });
}, },
error: function(res) { error: function(res) {
@ -170,7 +168,8 @@ haste.extensionMap = {
lua: 'lua', pas: 'delphi', java: 'java', cpp: 'cpp', cc: 'cpp', m: 'objectivec', lua: 'lua', pas: 'delphi', java: 'java', cpp: 'cpp', cc: 'cpp', m: 'objectivec',
vala: 'vala', sql: 'sql', sm: 'smalltalk', lisp: 'lisp', ini: 'ini', vala: 'vala', sql: 'sql', sm: 'smalltalk', lisp: 'lisp', ini: 'ini',
diff: 'diff', bash: 'bash', sh: 'bash', tex: 'tex', erl: 'erlang', hs: 'haskell', diff: 'diff', bash: 'bash', sh: 'bash', tex: 'tex', erl: 'erlang', hs: 'haskell',
md: 'markdown', txt: '', coffee: 'coffee', swift: 'swift' md: 'markdown', txt: '', coffee: 'coffee', json: 'javascript',
swift: 'swift'
}; };
// Look up the extension preferred for a type // Look up the extension preferred for a type
@ -277,7 +276,7 @@ haste.prototype.configureButtons = function() {
$where: $('#box2 .new'), $where: $('#box2 .new'),
label: 'New', label: 'New',
shortcut: function(evt) { shortcut: function(evt) {
return evt.ctrlKey && evt.keyCode === 78; return evt.ctrlKey && evt.keyCode === 78
}, },
shortcutDescription: 'control + n', shortcutDescription: 'control + n',
action: function() { action: function() {
@ -332,14 +331,14 @@ haste.prototype.configureButton = function(options) {
} }
}); });
// Show the label // Show the label
options.$where.mouseenter(function() { options.$where.mouseenter(function(evt) {
$('#box3 .label').text(options.label); $('#box3 .label').text(options.label);
$('#box3 .shortcut').text(options.shortcutDescription || ''); $('#box3 .shortcut').text(options.shortcutDescription || '');
$('#box3').show(); $('#box3').show();
$(this).append($('#pointer').remove().show()); $(this).append($('#pointer').remove().show());
}); });
// Hide the label // Hide the label
options.$where.mouseleave(function() { options.$where.mouseleave(function(evt) {
$('#box3').hide(); $('#box3').hide();
$('#pointer').hide(); $('#pointer').hide();
}); });
@ -372,7 +371,7 @@ $(function() {
// For browsers like Internet Explorer // For browsers like Internet Explorer
if (document.selection) { if (document.selection) {
this.focus(); this.focus();
var sel = document.selection.createRange(); sel = document.selection.createRange();
sel.text = myValue; sel.text = myValue;
this.focus(); this.focus();
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -1,34 +0,0 @@
/* global describe, it */
const assert = require('assert');
const fs = require('fs');
const Generator = require('../../lib/key_generators/dictionary');
describe('DictionaryGenerator', function() {
describe('options', function() {
it('should throw an error if given no options', () => {
assert.throws(() => {
new Generator();
}, Error);
});
it('should throw an error if given no path', () => {
assert.throws(() => {
new Generator({});
}, Error);
});
});
describe('generation', function() {
it('should return a key of the proper number of words from the given dictionary', () => {
const path = '/tmp/haste-server-test-dictionary';
const words = ['cat'];
fs.writeFileSync(path, words.join('\n'));
const gen = new Generator({path}, () => {
assert.equal('catcatcat', gen.createKey(3));
});
});
});
});

@ -1,35 +0,0 @@
/* global describe, it */
const assert = require('assert');
const Generator = require('../../lib/key_generators/phonetic');
const vowels = 'aeiou';
const consonants = 'bcdfghjklmnpqrstvwxyz';
describe('PhoneticKeyGenerator', () => {
describe('generation', () => {
it('should return a key of the proper length', () => {
const gen = new Generator();
assert.equal(6, gen.createKey(6).length);
});
it('should alternate consonants and vowels', () => {
const gen = new Generator();
const key = gen.createKey(3);
// if it starts with a consonant, we expect cvc
// if it starts with a vowel, we expect vcv
if(consonants.includes(key[0])) {
assert.ok(consonants.includes(key[0]));
assert.ok(consonants.includes(key[2]));
assert.ok(vowels.includes(key[1]));
} else {
assert.ok(vowels.includes(key[0]));
assert.ok(vowels.includes(key[2]));
assert.ok(consonants.includes(key[1]));
}
});
});
});

@ -1,24 +0,0 @@
/* global describe, it */
const assert = require('assert');
const Generator = require('../../lib/key_generators/random');
describe('RandomKeyGenerator', () => {
describe('generation', () => {
it('should return a key of the proper length', () => {
const gen = new Generator();
assert.equal(gen.createKey(6).length, 6);
});
it('should use a key from the given keyset if given', () => {
const gen = new Generator({keyspace: 'A'});
assert.equal(gen.createKey(6), 'AAAAAA');
});
it('should not use a key from the given keyset if not given', () => {
const gen = new Generator({keyspace: 'A'});
assert.ok(!gen.createKey(6).includes('B'));
});
});
});
Loading…
Cancel
Save