Compare commits

..

139 Commits

Author SHA1 Message Date
Davo be5c886c97 modified: README.md 3 years ago
Nebojsa Videnov 787d839086
Add CODEOWNERS for github workflows (#421) 3 years ago
NebulaBC 9e921d5909
Update about.md (#419) 3 years ago
Leopere 68f6fe2b96
Don't track docker-compose.override.yml (#416) 3 years ago
Filipe Chagas 7286385833 Add GHA workflow to mark and close inactive PRs and Issues 3 years ago
Linus J 3dcc43578b
Parse the expiration time as an INT (#392) 3 years ago
Bryce 1ea6b6e99d
FIXED: Document post failing because expire parameter is string. (#366) 3 years ago
Leopere 7c613bc788
Delete superfluous network definition (#404) 3 years ago
Bruno Saboia cb4809195b
Improve tests (#407) 3 years ago
Konstantin Rybakov 00d84614c2
Merge pull request #389 from Lockszmith/Fix-Dockerfile 3 years ago
Konstantin Rybakov 52e7cef7ef
Merge pull request #402 from Sueqkjs/master 3 years ago
Sueqkjs fbff1bc201
fixed spell 3 years ago
Denis Berezin 7af15cc32d
Merge pull request #384 from brunosaboia/remove-trailing-spaces 4 years ago
Denis Berezin 7f397ce753
Merge pull request #398 from toptal/npm-audit-issues 4 years ago
Denis Berezin 8f8b039f65 Fix high severity npm issues 4 years ago
Denis Berezin eeaf2d7b18
Merge pull request #393 from jmartin84/391 4 years ago
Justen Martin db0b7d6444
fixed STORAGE_USERNAME typo in dockerfile 4 years ago
lksz db6e7603f9 Fixes #376 + .dockerignore improvement 4 years ago
Bruno Saboia ad5d7549d7 Remove trailing whitespaces 4 years ago
John Crepezzi 5d2965ffc5
Merge pull request #350 from seejohnrun/specify-config-on-boot 5 years ago
John Crepezzi f255928af7 Allow setting config.js alternative on boot 5 years ago
John Crepezzi a108dbadc5
Merge pull request #349 from seejohnrun/add-head-support 5 years ago
John Crepezzi c409aca080 Add support for HEAD requests 5 years ago
John Crepezzi 219424550b Fix local name 5 years ago
John Crepezzi f147acb51c Switch to using pg.Pool 5 years ago
John Crepezzi 9a692ed652 Get the client working as expected with pg 8 5 years ago
John Crepezzi 3a17c86a0f Upgrade pg to the most recent version 5 years ago
John Crepezzi 677a22987a
Merge pull request #122 from Roundaround/mongodb 5 years ago
John Crepezzi 89d912c6ff
Merge pull request #347 from seejohnrun/fix-memcached 5 years ago
John Crepezzi 4cac6713ef Fix memcached client fetch for key not found 5 years ago
John Crepezzi f3b0de745b
Merge pull request #200 from kevinhaendel/master 5 years ago
John Crepezzi cc8a99752f
Merge pull request #271 from mklkj/fix-content-type 5 years ago
John Crepezzi 6853d077e7 Merge branch 'master' into fix-content-type 5 years ago
John Crepezzi 80a2b6f0dd
Merge pull request #241 from meseta/master 5 years ago
John Crepezzi 4f68b3d7d6 Merge branch 'master' into meseta/master 5 years ago
John Crepezzi ef0ca40533 Downgrade pg for now 5 years ago
John Crepezzi f372ef18de
Merge pull request #345 from seejohnrun/fix-json-highlighting 5 years ago
John Crepezzi 181a3a2bfa Use the now-separate json mode for json highlighting 5 years ago
John Crepezzi 61d08afb3b
Merge pull request #344 from seejohnrun/upgrade-highlight-js 5 years ago
John Crepezzi 1ba025328d Update highlight JS to the most recent version 5 years ago
John Crepezzi a79fb39f54 Merge branch 'master' of github.com:seejohnrun/haste-server 5 years ago
John Crepezzi 3a72d74537 Fix security vulnerabilities from outdated packages 5 years ago
John Crepezzi e9ae74b7a9
Merge pull request #322 from sethsmoe/patch-1 5 years ago
John Crepezzi c305e9a83d
Merge pull request #342 from seejohnrun/dependabot/npm_and_yarn/bl-4.0.3 5 years ago
dependabot[bot] 16bce4c83d
Bump bl from 4.0.2 to 4.0.3 5 years ago
John Crepezzi 661997cd73
Merge pull request #334 from ourforks/master 5 years ago
John Crepezzi 159f989d08
Merge pull request #335 from emillen/docker-support 5 years ago
emil-lengman 139df62ec4 add newline to stop github complaining 5 years ago
emil-lengman bae6387bb7 forgot to rename some vars 5 years ago
emil-lengman bb7b9571a7 write some documentation for the Docker solution 5 years ago
emil-lengman a4dc29fb2b its supposed to be milliseconds 5 years ago
emil-lengman 342f56ce1a use same password and username env vars for all types 5 years ago
emil-lengman 05ecc90764 add file path 5 years ago
emil-lengman 69cf505a90 remove pg connect string, add rethink user and password 5 years ago
emil-lengman 9f41993566 also install rethinkdb and aws-sdk 5 years ago
emil-lengman 5c9311fb85 remove unused import 5 years ago
emil-lengman 5a8d52a5e3 add healthcheck, and stopsignal, plus export the correct port 5 years ago
emil-lengman 0f145b4444 pin versions 5 years ago
emil-lengman aef4bb5edb add dockerignore file 5 years ago
emil-lengman 36c854ef1b move creating the config file to a js file 5 years ago
emil-lengman edd428ff37 fix some names for env vars 5 years ago
emil-lengman 0612ba001e basic docker-compose for running the project together with memcached 5 years ago
emil-lengman 064680003d basic dockerfile with default env vars 5 years ago
emil-lengman 655f2af45a script for turning env-vars into config.js 5 years ago
Reece Dunham ce03749c2f Update dependencies to reduce security risk 5 years ago
epdn f6084b4339
remove 1px margin from textarea, fixes useless scrollbar 5 years ago
John Crepezzi 9b0a5ff0a3
Merge pull request #291 from j3parker/s3-document-store 5 years ago
Jacob Parker 1fff48568f Document the IAM permissions 6 years ago
Jacob Parker b4c666fbcf Add an Amazon S3 document store 6 years ago
John Crepezzi b866c33c93
Merge pull request #173 from sebastiansterk/master 6 years ago
Mikołaj Pich 035cf0e91e
Fix content type 6 years ago
John Crepezzi f3838ab4a8
Merge pull request #251 from seejohnrun/handle-redis-disconnect 7 years ago
John Crepezzi bf2b1c957a Handle redis error and re-establish connection 7 years ago
Yuan Gao 86bbc1899d
Update README.md 7 years ago
Yuan Gao d41d7491d4
rename to google-datastore, and use Date.now() 7 years ago
Yuan 5fb43eb67c added condition for this.expire not defined 7 years ago
Yuan 1eeef4ede4 restored using null 7 years ago
Yuan ebc749c5e0 updated readme 7 years ago
Yuan b0bbb72f35 updated to use Date(null) 7 years ago
Yuan 2213c3874a updated readme 7 years ago
Yuan 6ebd72a86c updated readme 7 years ago
Yuan b6814a1445 bugfixes 7 years ago
Yuan e3d18efdc6 added npm package 7 years ago
Yuan 869fb65738 added googledatastore handler 7 years ago
Yuan Gao 56b939124e
Merge pull request #2 from seejohnrun/master 7 years ago
John Crepezzi ee1c1c0856
Merge pull request #231 from seejohnrun/ensure-raw-utf8 7 years ago
John Crepezzi b087ac8dd1 Added charset to raw content type 7 years ago
John Crepezzi d922667f56
Merge pull request #221 from PassTheMayo/patch-1 7 years ago
Jacob Gunther 5f6fefa7a6
Fixed unnecessary logging when document not found 7 years ago
John Crepezzi faa7e679ca
Merge pull request #216 from PassTheMayo/master 7 years ago
Jacob Gunther cd3bf26dbe
Use local method for md5 7 years ago
Jacob Gunther 830dc1bc43 Use uploads table 7 years ago
Jacob Gunther dc0f151a7f Fixed bug in RethinkDB document store and use classes 7 years ago
John Crepezzi 7f625e22f7
Merge pull request #203 from seejohnrun/rewrite_memcached 7 years ago
John Crepezzi 528b7b07a8
Merge pull request #215 from Razzeee/patch-1 7 years ago
Razzeee 2b81e67ce7
Update docs to real defaults 7 years ago
John Crepezzi 827e7b51b5 Rewrite the memcached client 7 years ago
Kevin Händel 16d529e935
Added "user-select" option to line numbers & messages 7 years ago
John Crepezzi ad7702aaf4
Merge pull request #194 from szepeviktor/patch-1 7 years ago
Viktor Szépe f5fbc8d19e
Change to HTTPS in about.md 7 years ago
John Crepezzi 0a8923bf12
Merge pull request #192 from C0rn3j/master 7 years ago
Martin 4d572a2ec0
convert relative path to absolute 7 years ago
Martin d9a53d3e6e
Add note about paste expiration, cosmetic fixes. 7 years ago
John Crepezzi 8da37ea5de
Merge pull request #190 from PassTheMayo/patch-1 8 years ago
Jacob Gunther ff0fccd6c2
Oh noes! I didn't even notice that I had a typo... 8 years ago
John Crepezzi 63c4576633
Merge pull request #189 from PassTheMayo/master 8 years ago
Jacob Gunther b31d143bcd
Revert config.js to previous state 8 years ago
Jacob Gunther 0d8aec8d61
Oops, forgot to fix that file name 8 years ago
Jacob Gunther 1f9fdd205d
Undid changes to server.js 8 years ago
Jacob Gunther cdd0cf3739
Fixed requested changes to RethinkDB handler 8 years ago
PassTheMayo ba5c6b8d16 Added RethinkDB storage option & fixed config to use proper JSON 8 years ago
John Crepezzi cfef588283
Merge pull request #181 from seejohnrun/simplify_uglify 8 years ago
John Crepezzi 318c5f7ba6 Upgrade uglify and simplify usage 8 years ago
John Crepezzi ee03e7cd78
Merge pull request #180 from seejohnrun/es6_generators 8 years ago
John Crepezzi 3b6934e348 Phonetic key generator to es6 and add some tests 8 years ago
John Crepezzi f161cc33b4 Added tests and converted dictionary key generator to es6 8 years ago
John Crepezzi 40f1f2588e Update some es6 8 years ago
John Crepezzi e4e025f67e Convert random generator to es6 and add some specs for it directly 8 years ago
John Crepezzi e12805a8aa
Merge pull request #179 from seejohnrun/upgrade_testing 8 years ago
John Crepezzi e76c845f16 Upgrade testing libraries 8 years ago
John Crepezzi 072418695e
Merge pull request #178 from seejohnrun/upgrade_highlight 8 years ago
John Crepezzi 584b66bc66 Upgrade highlight.js 8 years ago
Sebastian Sterk f8db455f74 removed padding for #box for correct view 8 years ago
John Crepezzi f19c5d1049 Fix typo in README 8 years ago
John Crepezzi c5b859ec98 Bump node engine version & fix asset compression on start 8 years ago
John Crepezzi 2ee93a7409 Merge pull request #160 from seejohnrun/complete_eslint 8 years ago
John Crepezzi bf1dbb68b8 Fix eslint 8 years ago
John Crepezzi cf28e23d8e Merge pull request #159 from seejohnrun/add_eslint 8 years ago
John Crepezzi 5939dec185 Added eslint and fixed an issue from #158 8 years ago
John Crepezzi 3ed1d775ac Merge pull request #158 from KlasafGeijerstam/master 8 years ago
John Crepezzi 87b1c76aaf One more 8 years ago
John Crepezzi 4599203bdf A few style nit-picks 8 years ago
Klas af Geijerstam d66bc9a6c4 Removed unused lines 8 years ago
Klas af Geijerstam 80f0618736 Updated dictionary.js 8 years ago
Klas af Geijerstam ac2bceefbb Added missing ) 8 years ago
Klas af Geijerstam dbf4f6b5dd Removed usage of random-js 8 years ago
Klas af Geijerstam 8e9205cecc Update dictionary.js 8 years ago
Klas af Geijerstam e54a860172 Added dictionary.js 8 years ago
Evan Steinkerchner d3db5e2a5d Added mongodb document store adapter 9 years ago
  1. 8
      .dockerignore
  2. 2
      .eslintignore
  3. 25
      .eslintrc.json
  4. 1
      .github/CODEOWNERS
  5. 30
      .github/workflows/close-inactive.yaml
  6. 1
      .gitignore
  7. 68
      Dockerfile
  8. 196
      README.md
  9. 4
      about.md
  10. 3
      config.js
  11. 12
      docker-compose.yaml
  12. 108
      docker-entrypoint.js
  13. 9
      docker-entrypoint.sh
  14. 41
      lib/document_handler.js
  15. 56
      lib/document_stores/amazon-s3.js
  16. 89
      lib/document_stores/google-datastore.js
  17. 93
      lib/document_stores/memcached.js
  18. 88
      lib/document_stores/mongo.js
  19. 21
      lib/document_stores/postgres.js
  20. 11
      lib/document_stores/redis.js
  21. 46
      lib/document_stores/rethinkdb.js
  22. 32
      lib/key_generators/dictionary.js
  23. 46
      lib/key_generators/phonetic.js
  24. 31
      lib/key_generators/random.js
  25. 1652
      package-lock.json
  26. 22
      package.json
  27. 57
      server.js
  28. 5
      static/application.css
  29. 21
      static/application.js
  30. 2
      static/application.min.js
  31. 8
      static/highlight.min.js
  32. 8
      test/document_handler_spec.js
  33. 34
      test/key_generators/dictionary_spec.js
  34. 35
      test/key_generators/phonetic_spec.js
  35. 24
      test/key_generators/random_spec.js
  36. 16
      test/redis_document_store_spec.js

@ -0,0 +1,8 @@
Dockerfile
.git
npm-debug.log
node_modules
*.swp
*.swo
data
*.DS_Store

@ -0,0 +1,2 @@
**/*.min.js
config.js

@ -0,0 +1,25 @@
{
"env": {
"es6": true,
"node": true
},
"extends": "eslint:recommended",
"rules": {
"indent": [
"error",
2
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"single"
],
"semi": [
"error",
"always"
]
}
}

@ -0,0 +1 @@
* @toptal/site-acquisition-eng

@ -0,0 +1,30 @@
name: Close inactive issues and PRs
on:
workflow_dispatch:
schedule:
- cron: "30 1 * * *"
jobs:
close-stale:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v3
with:
days-before-stale: 30
days-before-close: 14
stale-issue-label: "stale"
stale-pr-label: "stale"
exempt-issue-labels: backlog,triage,nostale
exempt-pr-labels: backlog,triage,nostale
stale-pr-message: "This PR is stale because it has been open for 30 days with no activity."
close-pr-message: "This PR was closed because it has been inactive for 14 days since being marked as stale."
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity."
close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale."
repo-token: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored

@ -4,3 +4,4 @@ node_modules
*.swo *.swo
data data
*.DS_Store *.DS_Store
docker-compose.override.yml

@ -0,0 +1,68 @@
FROM node:14.8.0-stretch
RUN mkdir -p /usr/src/app && \
chown node:node /usr/src/app
USER node:node
WORKDIR /usr/src/app
COPY --chown=node:node . .
RUN npm install && \
npm install redis@0.8.1 && \
npm install pg@4.1.1 && \
npm install memcached@2.2.2 && \
npm install aws-sdk@2.738.0 && \
npm install rethinkdbdash@2.3.31
ENV STORAGE_TYPE=memcached \
STORAGE_HOST=127.0.0.1 \
STORAGE_PORT=11211\
STORAGE_EXPIRE_SECONDS=2592000\
STORAGE_DB=2 \
STORAGE_AWS_BUCKET= \
STORAGE_AWS_REGION= \
STORAGE_USENAME= \
STORAGE_PASSWORD= \
STORAGE_FILEPATH=
ENV LOGGING_LEVEL=verbose \
LOGGING_TYPE=Console \
LOGGING_COLORIZE=true
ENV HOST=0.0.0.0\
PORT=7777\
KEY_LENGTH=10\
MAX_LENGTH=400000\
STATIC_MAX_AGE=86400\
RECOMPRESS_STATIC_ASSETS=true
ENV KEYGENERATOR_TYPE=phonetic \
KEYGENERATOR_KEYSPACE=
ENV RATELIMITS_NORMAL_TOTAL_REQUESTS=500\
RATELIMITS_NORMAL_EVERY_MILLISECONDS=60000 \
RATELIMITS_WHITELIST_TOTAL_REQUESTS= \
RATELIMITS_WHITELIST_EVERY_MILLISECONDS= \
# comma separated list for the whitelisted \
RATELIMITS_WHITELIST=example1.whitelist,example2.whitelist \
\
RATELIMITS_BLACKLIST_TOTAL_REQUESTS= \
RATELIMITS_BLACKLIST_EVERY_MILLISECONDS= \
# comma separated list for the blacklisted \
RATELIMITS_BLACKLIST=example1.blacklist,example2.blacklist
ENV DOCUMENTS=about=./about.md
EXPOSE ${PORT}
STOPSIGNAL SIGINT
ENTRYPOINT [ "bash", "docker-entrypoint.sh" ]
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s \
--retries=3 CMD [ "sh", "-c", "echo -n 'curl localhost:7777... '; \
(\
curl -sf localhost:7777 > /dev/null\
) && echo OK || (\
echo Fail && exit 2\
)"]
CMD ["npm", "start"]

@ -1,6 +1,6 @@
# Haste # Haste
Haste is an open-source pastebin software written in node.js, which is easily haste is an open-source pastebin software written in node.js, which is easily
installable in any network. It can be backed by either redis or filesystem, installable in any network. It can be backed by either redis or filesystem,
and has a very easy adapter interface for other stores. A publicly available and has a very easy adapter interface for other stores. A publicly available
version can be found at [hastebin.com](http://hastebin.com) version can be found at [hastebin.com](http://hastebin.com)
@ -31,16 +31,16 @@ STDOUT. Check the README there for more details and usages.
1. Download the package, and expand it 1. Download the package, and expand it
2. Explore the settings inside of config.js, but the defaults should be good 2. Explore the settings inside of config.js, but the defaults should be good
3. `npm install` 3. `npm install`
4. `npm start` 4. `npm start` (you may specify an optional `<config-path>` as well)
## Settings ## Settings
* `host` - the host the server runs on (default localhost) * `host` - the host the server runs on (default localhost)
* `port` - the port the server runs on (default 7777) * `port` - the port the server runs on (default 7777)
* `keyLength` - the length of the keys to user (default 10) * `keyLength` - the length of the keys to user (default 10)
* `maxLength` - maximum length of a paste (default none) * `maxLength` - maximum length of a paste (default 400000)
* `staticMaxAge` - max age for static assets (86400) * `staticMaxAge` - max age for static assets (86400)
* `recompressStatisAssets` - whether or not to compile static js assets (true) * `recompressStaticAssets` - whether or not to compile static js assets (true)
* `documents` - static documents to serve (ex: http://hastebin.com/about.com) * `documents` - static documents to serve (ex: http://hastebin.com/about.com)
in addition to static assets. These will never expire. in addition to static assets. These will never expire.
* `storage` - storage options (see below) * `storage` - storage options (see below)
@ -52,7 +52,7 @@ STDOUT. Check the README there for more details and usages.
When present, the `rateLimits` option enables built-in rate limiting courtesy When present, the `rateLimits` option enables built-in rate limiting courtesy
of `connect-ratelimit`. Any of the options supported by that library can be of `connect-ratelimit`. Any of the options supported by that library can be
used and set in `config.json`. used and set in `config.js`.
See the README for [connect-ratelimit](https://github.com/dharmafly/connect-ratelimit) See the README for [connect-ratelimit](https://github.com/dharmafly/connect-ratelimit)
for more information! for more information!
@ -97,7 +97,9 @@ something like:
} }
``` ```
Where `path` represents where you want the files stored where `path` represents where you want the files stored.
File storage currently does not support paste expiration, you can follow [#191](https://github.com/seejohnrun/haste-server/issues/191) for status updates.
### Redis ### Redis
@ -152,11 +154,33 @@ or post.
All of which are optional except `type` with very logical default values. All of which are optional except `type` with very logical default values.
### MongoDB
To use mongodb storage you must install the 'mongodb' package in npm
`npm install mongodb`
Once you've done that, your config section should look like:
``` json
{
"type": "mongo",
"connectionUrl": "mongodb://localhost:27017/database"
}
```
You can also just set the environment variable for `DATABASE_URL` to your database connection url.
Unlike with postgres you do NOT have to create the table in your mongo database prior to running.
You can also set an `expire` option to the number of seconds to expire keys in.
This is off by default, but will constantly kick back expirations on each view or post.
### Memcached ### Memcached
To use memcached storage you must install the `memcache` package via npm To use memcache storage you must install the `memcached` package via npm
`npm install memcache` `npm install memcached`
Once you've done that, your config section should look like: Once you've done that, your config section should look like:
@ -174,6 +198,162 @@ forward on GETs.
All of which are optional except `type` with very logical default values. All of which are optional except `type` with very logical default values.
### RethinkDB
To use the RethinkDB storage system, you must install the `rethinkdbdash` package via npm
`npm install rethinkdbdash`
Once you've done that, your config section should look like this:
``` json
{
"type": "rethinkdb",
"host": "127.0.0.1",
"port": 28015,
"db": "haste"
}
```
In order for this to work, the database must be pre-created before the script is ran.
Also, you must create an `uploads` table, which will store all the data for uploads.
You can optionally add the `user` and `password` properties to use a user system.
### Google Datastore
To use the Google Datastore storage system, you must install the `@google-cloud/datastore` package via npm
`npm install @google-cloud/datastore`
Once you've done that, your config section should look like this:
``` json
{
"type": "google-datastore"
}
```
Authentication is handled automatically by [Google Cloud service account credentials](https://cloud.google.com/docs/authentication/getting-started), by providing authentication details to the GOOGLE_APPLICATION_CREDENTIALS environmental variable.
### Amazon S3
To use [Amazon S3](https://aws.amazon.com/s3/) as a storage system, you must
install the `aws-sdk` package via npm:
`npm install aws-sdk`
Once you've done that, your config section should look like this:
```json
{
"type": "amazon-s3",
"bucket": "your-bucket-name",
"region": "us-east-1"
}
```
Authentication is handled automatically by the client. Check
[Amazon's documentation](https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/setting-credentials-node.html)
for more information. You will need to grant your role these permissions to
your bucket:
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:GetObject",
"s3:PutObject"
],
"Effect": "Allow",
"Resource": "arn:aws:s3:::your-bucket-name-goes-here/*"
}
]
}
```
## Docker
### Build image
```bash
docker build --tag haste-server .
```
### Run container
For this example we will run haste-server, and connect it to a redis server
```bash
docker run --name haste-server-container --env STORAGE_TYPE=redis --env STORAGE_HOST=redis-server --env STORAGE_PORT=6379 haste-server
```
### Use docker-compose example
There is an example `docker-compose.yml` which runs haste-server together with memcached
```bash
docker-compose up
```
### Configuration
The docker image is configured using environmental variables as you can see in the example above.
Here is a list of all the environment variables
### Storage
| Name | Default value | Description |
| :--------------------: | :-----------: | :-----------------------------------------------------------------------------------------------------------: |
| STORAGE_TYPE | memcached | Type of storage . Accepted values: "memcached","redis","postgres","rethinkdb", "amazon-s3", and "file" |
| STORAGE_HOST | 127.0.0.1 | Storage host. Applicable for types: memcached, redis, postgres, and rethinkdb |
| STORAGE_PORT | 11211 | Port on the storage host. Applicable for types: memcached, redis, postgres, and rethinkdb |
| STORAGE_EXPIRE_SECONDS | 2592000 | Number of seconds to expire keys in. Applicable for types. Redis, postgres, memcached. `expire` option to the |
| STORAGE_DB | 2 | The name of the database. Applicable for redis, postgres, and rethinkdb |
| STORAGE_PASSWORD | | Password for database. Applicable for redis, postges, rethinkdb . |
| STORAGE_USERNAME | | Database username. Applicable for postgres, and rethinkdb |
| STORAGE_AWS_BUCKET | | Applicable for amazon-s3. This is the name of the S3 bucket |
| STORAGE_AWS_REGION | | Applicable for amazon-s3. The region in which the bucket is located |
| STORAGE_FILEPATH | | Path to file to save data to. Applicable for type file |
### Logging
| Name | Default value | Description |
| :---------------: | :-----------: | :---------: |
| LOGGING_LEVEL | verbose | |
| LOGGING_TYPE= | Console |
| LOGGING_COLORIZE= | true |
### Basics
| Name | Default value | Description |
| :----------------------: | :--------------: | :---------------------------------------------------------------------------------------: |
| HOST | 0.0.0.0 | The hostname which the server answers on |
| PORT | 7777 | The port on which the server is running |
| KEY_LENGTH | 10 | the length of the keys to user |
| MAX_LENGTH | 400000 | maximum length of a paste |
| STATIC_MAX_AGE | 86400 | max age for static assets |
| RECOMPRESS_STATIC_ASSETS | true | whether or not to compile static js assets |
| KEYGENERATOR_TYPE | phonetic | Type of key generator. Acceptable values: "phonetic", or "random" |
| KEYGENERATOR_KEYSPACE | | keySpace argument is a string of acceptable characters |
| DOCUMENTS | about=./about.md | Comma separated list of static documents to serve. ex: \n about=./about.md,home=./home.md |
### Rate limits
| Name | Default value | Description |
| :----------------------------------: | :-----------------------------------: | :--------------------------------------------------------------------------------------: |
| RATELIMITS_NORMAL_TOTAL_REQUESTS | 500 | By default anyone uncategorized will be subject to 500 requests in the defined timespan. |
| RATELIMITS_NORMAL_EVERY_MILLISECONDS | 60000 | The timespan to allow the total requests for uncategorized users |
| RATELIMITS_WHITELIST_TOTAL_REQUESTS | | By default client names in the whitelist will not have their requests limited. |
| RATELIMITS_WHITELIST_EVERY_SECONDS | | By default client names in the whitelist will not have their requests limited. |
| RATELIMITS_WHITELIST | example1.whitelist,example2.whitelist | Comma separated list of the clients which are in the whitelist pool |
| RATELIMITS_BLACKLIST_TOTAL_REQUESTS | | By default client names in the blacklist will be subject to 0 requests per hours. |
| RATELIMITS_BLACKLIST_EVERY_SECONDS | | By default client names in the blacklist will be subject to 0 requests per hours |
| RATELIMITS_BLACKLIST | example1.blacklist,example2.blacklist | Comma separated list of the clients which are in the blacklistpool. |
## Author ## Author
John Crepezzi <john.crepezzi@gmail.com> John Crepezzi <john.crepezzi@gmail.com>

@ -19,13 +19,13 @@ Most of the time I want to show you some text, it's coming from my current
console session. We should make it really easy to take code from the console console session. We should make it really easy to take code from the console
and send it to people. and send it to people.
`cat something | haste` # http://hastebin.com/1238193 `cat something | haste` # https://hastebin.com/1238193
You can even take this a step further, and cut out the last step of copying the You can even take this a step further, and cut out the last step of copying the
URL with: URL with:
* osx: `cat something | haste | pbcopy` * osx: `cat something | haste | pbcopy`
* linux: `cat something | haste | xsel` * linux: `cat something | haste | xsel -b`
* windows: check out [WinHaste](https://github.com/ajryan/WinHaste) * windows: check out [WinHaste](https://github.com/ajryan/WinHaste)
After running that, the STDOUT output of `cat something` will show up at a URL After running that, the STDOUT output of `cat something` will show up at a URL

@ -33,8 +33,7 @@
}, },
"storage": { "storage": {
"type": "postgres", "type": "file"
"expire": 2592000
}, },
"documents": { "documents": {

@ -0,0 +1,12 @@
version: '3.0'
services:
haste-server:
build: .
environment:
- STORAGE_TYPE=memcached
- STORAGE_HOST=memcached
- STORAGE_PORT=11211
ports:
- 7777:7777
memcached:
image: memcached:latest

@ -0,0 +1,108 @@
const {
HOST,
PORT,
KEY_LENGTH,
MAX_LENGTH,
STATIC_MAX_AGE,
RECOMPRESS_STATIC_ASSETS,
STORAGE_TYPE,
STORAGE_HOST,
STORAGE_PORT,
STORAGE_EXPIRE_SECONDS,
STORAGE_DB,
STORAGE_AWS_BUCKET,
STORAGE_AWS_REGION,
STORAGE_PASSWORD,
STORAGE_USERNAME,
STORAGE_FILEPATH,
LOGGING_LEVEL,
LOGGING_TYPE,
LOGGING_COLORIZE,
KEYGENERATOR_TYPE,
KEY_GENERATOR_KEYSPACE,
RATE_LIMITS_NORMAL_TOTAL_REQUESTS,
RATE_LIMITS_NORMAL_EVERY_MILLISECONDS,
RATE_LIMITS_WHITELIST_TOTAL_REQUESTS,
RATE_LIMITS_WHITELIST_EVERY_MILLISECONDS,
RATE_LIMITS_WHITELIST,
RATE_LIMITS_BLACKLIST_TOTAL_REQUESTS,
RATE_LIMITS_BLACKLIST_EVERY_MILLISECONDS,
RATE_LIMITS_BLACKLIST,
DOCUMENTS,
} = process.env;
const config = {
host: HOST,
port: Number(PORT),
keyLength: Number(KEY_LENGTH),
maxLength: Number(MAX_LENGTH),
staticMaxAge: Number(STATIC_MAX_AGE),
recompressStaticAssets: RECOMPRESS_STATIC_ASSETS,
logging: [
{
level: LOGGING_LEVEL,
type: LOGGING_TYPE,
colorize: LOGGING_COLORIZE,
},
],
keyGenerator: {
type: KEYGENERATOR_TYPE,
keyspace: KEY_GENERATOR_KEYSPACE,
},
rateLimits: {
whitelist: RATE_LIMITS_WHITELIST ? RATE_LIMITS_WHITELIST.split(",") : [],
blacklist: RATE_LIMITS_BLACKLIST ? RATE_LIMITS_BLACKLIST.split(",") : [],
categories: {
normal: {
totalRequests: RATE_LIMITS_NORMAL_TOTAL_REQUESTS,
every: RATE_LIMITS_NORMAL_EVERY_MILLISECONDS,
},
whitelist:
RATE_LIMITS_WHITELIST_EVERY_MILLISECONDS ||
RATE_LIMITS_WHITELIST_TOTAL_REQUESTS
? {
totalRequests: RATE_LIMITS_WHITELIST_TOTAL_REQUESTS,
every: RATE_LIMITS_WHITELIST_EVERY_MILLISECONDS,
}
: null,
blacklist:
RATE_LIMITS_BLACKLIST_EVERY_MILLISECONDS ||
RATE_LIMITS_BLACKLIST_TOTAL_REQUESTS
? {
totalRequests: RATE_LIMITS_WHITELIST_TOTAL_REQUESTS,
every: RATE_LIMITS_BLACKLIST_EVERY_MILLISECONDS,
}
: null,
},
},
storage: {
type: STORAGE_TYPE,
host: STORAGE_HOST,
port: Number(STORAGE_PORT),
expire: Number(STORAGE_EXPIRE_SECONDS),
bucket: STORAGE_AWS_BUCKET,
region: STORAGE_AWS_REGION,
connectionUrl: `postgres://${STORAGE_USERNAME}:${STORAGE_PASSWORD}@${STORAGE_HOST}:${STORAGE_PORT}/${STORAGE_DB}`,
db: STORAGE_DB,
user: STORAGE_USERNAME,
password: STORAGE_PASSWORD,
path: STORAGE_FILEPATH,
},
documents: DOCUMENTS
? DOCUMENTS.split(",").reduce((acc, item) => {
const keyAndValueArray = item.replace(/\s/g, "").split("=");
return { ...acc, [keyAndValueArray[0]]: keyAndValueArray[1] };
}, {})
: null,
};
console.log(JSON.stringify(config));

@ -0,0 +1,9 @@
#!/bin/bash
# We use this file to translate environmental variables to .env files used by the application
set -e
node ./docker-entrypoint.js > ./config.js
exec "$@"

@ -16,33 +16,55 @@ var DocumentHandler = function(options) {
DocumentHandler.defaultKeyLength = 10; DocumentHandler.defaultKeyLength = 10;
// Handle retrieving a document // Handle retrieving a document
DocumentHandler.prototype.handleGet = function(key, response, skipExpire) { DocumentHandler.prototype.handleGet = function(request, response, config) {
const key = request.params.id.split('.')[0];
const skipExpire = !!config.documents[key];
this.store.get(key, function(ret) { this.store.get(key, function(ret) {
if (ret) { if (ret) {
winston.verbose('retrieved document', { key: key }); winston.verbose('retrieved document', { key: key });
response.writeHead(200, { 'content-type': 'application/json' }); response.writeHead(200, { 'content-type': 'application/json' });
response.end(JSON.stringify({ data: ret, key: key })); if (request.method === 'HEAD') {
response.end();
} else {
response.end(JSON.stringify({ data: ret, key: key }));
}
} }
else { else {
winston.warn('document not found', { key: key }); winston.warn('document not found', { key: key });
response.writeHead(404, { 'content-type': 'application/json' }); response.writeHead(404, { 'content-type': 'application/json' });
response.end(JSON.stringify({ message: 'Document not found.' })); if (request.method === 'HEAD') {
response.end();
} else {
response.end(JSON.stringify({ message: 'Document not found.' }));
}
} }
}, skipExpire); }, skipExpire);
}; };
// Handle retrieving the raw version of a document // Handle retrieving the raw version of a document
DocumentHandler.prototype.handleRawGet = function(key, response, skipExpire) { DocumentHandler.prototype.handleRawGet = function(request, response, config) {
const key = request.params.id.split('.')[0];
const skipExpire = !!config.documents[key];
this.store.get(key, function(ret) { this.store.get(key, function(ret) {
if (ret) { if (ret) {
winston.verbose('retrieved raw document', { key: key }); winston.verbose('retrieved raw document', { key: key });
response.writeHead(200, { 'content-type': 'text/plain' }); response.writeHead(200, { 'content-type': 'text/plain; charset=UTF-8' });
response.end(ret); if (request.method === 'HEAD') {
response.end();
} else {
response.end(ret);
}
} }
else { else {
winston.warn('raw document not found', { key: key }); winston.warn('raw document not found', { key: key });
response.writeHead(404, { 'content-type': 'application/json' }); response.writeHead(404, { 'content-type': 'application/json' });
response.end(JSON.stringify({ message: 'Document not found.' })); if (request.method === 'HEAD') {
response.end();
} else {
response.end(JSON.stringify({ message: 'Document not found.' }));
}
} }
}, skipExpire); }, skipExpire);
}; };
@ -69,8 +91,7 @@ DocumentHandler.prototype.handlePost = function (request, response) {
_this.chooseKey(function (key) { _this.chooseKey(function (key) {
_this.store.set(key, buffer, function (res) { _this.store.set(key, buffer, function (res) {
if (res) { if (res) {
var ip = request.headers['x-forwarded-for'] || request.ip; winston.verbose('added document', { key: key });
winston.verbose('added document', { key: key, ip: ip });
response.writeHead(200, { 'content-type': 'application/json' }); response.writeHead(200, { 'content-type': 'application/json' });
response.end(JSON.stringify({ key: key })); response.end(JSON.stringify({ key: key }));
} }
@ -124,7 +145,7 @@ DocumentHandler.prototype.chooseKey = function(callback) {
} else { } else {
callback(key); callback(key);
} }
}); }, true); // Don't bump expirations when key searching
}; };
DocumentHandler.prototype.acceptableKey = function() { DocumentHandler.prototype.acceptableKey = function() {

@ -0,0 +1,56 @@
/*global require,module,process*/
var AWS = require('aws-sdk');
var winston = require('winston');
var AmazonS3DocumentStore = function(options) {
this.expire = options.expire;
this.bucket = options.bucket;
this.client = new AWS.S3({region: options.region});
};
AmazonS3DocumentStore.prototype.get = function(key, callback, skipExpire) {
var _this = this;
var req = {
Bucket: _this.bucket,
Key: key
};
_this.client.getObject(req, function(err, data) {
if(err) {
callback(false);
}
else {
callback(data.Body.toString('utf-8'));
if (_this.expire && !skipExpire) {
winston.warn('amazon s3 store cannot set expirations on keys');
}
}
});
}
AmazonS3DocumentStore.prototype.set = function(key, data, callback, skipExpire) {
var _this = this;
var req = {
Bucket: _this.bucket,
Key: key,
Body: data,
ContentType: 'text/plain'
};
_this.client.putObject(req, function(err, data) {
if (err) {
callback(false);
}
else {
callback(true);
if (_this.expire && !skipExpire) {
winston.warn('amazon s3 store cannot set expirations on keys');
}
}
});
}
module.exports = AmazonS3DocumentStore;

@ -0,0 +1,89 @@
/*global require,module,process*/
const Datastore = require('@google-cloud/datastore');
const winston = require('winston');
class GoogleDatastoreDocumentStore {
// Create a new store with options
constructor(options) {
this.kind = "Haste";
this.expire = options.expire;
this.datastore = new Datastore();
}
// Save file in a key
set(key, data, callback, skipExpire) {
var expireTime = (skipExpire || this.expire === undefined) ? null : new Date(Date.now() + this.expire * 1000);
var taskKey = this.datastore.key([this.kind, key])
var task = {
key: taskKey,
data: [
{
name: 'value',
value: data,
excludeFromIndexes: true
},
{
name: 'expiration',
value: expireTime
}
]
};
this.datastore.insert(task).then(() => {
callback(true);
})
.catch(err => {
callback(false);
});
}
// Get a file from a key
get(key, callback, skipExpire) {
var taskKey = this.datastore.key([this.kind, key])
this.datastore.get(taskKey).then((entity) => {
if (skipExpire || entity[0]["expiration"] == null) {
callback(entity[0]["value"]);
}
else {
// check for expiry
if (entity[0]["expiration"] < new Date()) {
winston.info("document expired", {key: key, expiration: entity[0]["expiration"], check: new Date(null)});
callback(false);
}
else {
// update expiry
var task = {
key: taskKey,
data: [
{
name: 'value',
value: entity[0]["value"],
excludeFromIndexes: true
},
{
name: 'expiration',
value: new Date(Date.now() + this.expire * 1000)
}
]
};
this.datastore.update(task).then(() => {
})
.catch(err => {
winston.error("failed to update expiration", {error: err});
});
callback(entity[0]["value"]);
}
}
})
.catch(err => {
winston.error("Error retrieving value from Google Datastore", {error: err});
callback(false);
});
}
}
module.exports = GoogleDatastoreDocumentStore;

@ -1,45 +1,54 @@
var memcached = require('memcache'); const memcached = require('memcached');
var winston = require('winston'); const winston = require('winston');
// Create a new store with options class MemcachedDocumentStore {
var MemcachedDocumentStore = function(options) {
this.expire = options.expire; // Create a new store with options
if (!MemcachedDocumentStore.client) { constructor(options) {
MemcachedDocumentStore.connect(options); this.expire = options.expire;
const host = options.host || '127.0.0.1';
const port = options.port || 11211;
const url = `${host}:${port}`;
this.connect(url);
}
// Create a connection
connect(url) {
this.client = new memcached(url);
winston.info(`connecting to memcached on ${url}`);
this.client.on('failure', function(error) {
winston.info('error connecting to memcached', {error});
});
} }
};
// Save file in a key
// Create a connection set(key, data, callback, skipExpire) {
MemcachedDocumentStore.connect = function(options) { this.client.set(key, data, skipExpire ? 0 : this.expire || 0, (error) => {
var host = options.host || '127.0.0.1'; callback(!error);
var port = options.port || 11211; });
this.client = new memcached.Client(port, host); }
this.client.connect();
this.client.on('connect', function() { // Get a file from a key
winston.info('connected to memcached on ' + host + ':' + port); get(key, callback, skipExpire) {
}); this.client.get(key, (error, data) => {
this.client.on('error', function(e) { const value = error ? false : data;
winston.info('error connecting to memcached', { error: e });
}); callback(value);
};
// Update the key so that the expiration is pushed forward
// Save file in a key if (value && !skipExpire) {
MemcachedDocumentStore.prototype.set = this.set(key, data, (updateSucceeded) => {
function(key, data, callback, skipExpire) { if (!updateSucceeded) {
MemcachedDocumentStore.client.set(key, data, function(err, reply) { winston.error('failed to update expiration on GET', {key});
err ? callback(false) : callback(true); }
}, skipExpire ? 0 : this.expire); }, skipExpire);
}; }
});
// Get a file from a key }
MemcachedDocumentStore.prototype.get = function(key, callback, skipExpire) {
var _this = this; }
MemcachedDocumentStore.client.get(key, function(err, reply) {
callback(err ? false : reply);
if (_this.expire && !skipExpire) {
winston.warn('store does not currently push forward expirations on GET');
}
});
};
module.exports = MemcachedDocumentStore; module.exports = MemcachedDocumentStore;

@ -0,0 +1,88 @@
var MongoClient = require('mongodb').MongoClient,
winston = require('winston');
var MongoDocumentStore = function (options) {
this.expire = options.expire;
this.connectionUrl = process.env.DATABASE_URl || options.connectionUrl;
};
MongoDocumentStore.prototype.set = function (key, data, callback, skipExpire) {
var now = Math.floor(new Date().getTime() / 1000),
that = this;
this.safeConnect(function (err, db) {
if (err)
return callback(false);
db.collection('entries').update({
'entry_id': key,
$or: [
{ expiration: -1 },
{ expiration: { $gt: now } }
]
}, {
'entry_id': key,
'value': data,
'expiration': that.expire && !skipExpire ? that.expire + now : -1
}, {
upsert: true
}, function (err, existing) {
if (err) {
winston.error('error persisting value to mongodb', { error: err });
return callback(false);
}
callback(true);
});
});
};
MongoDocumentStore.prototype.get = function (key, callback, skipExpire) {
var now = Math.floor(new Date().getTime() / 1000),
that = this;
this.safeConnect(function (err, db) {
if (err)
return callback(false);
db.collection('entries').findOne({
'entry_id': key,
$or: [
{ expiration: -1 },
{ expiration: { $gt: now } }
]
}, function (err, entry) {
if (err) {
winston.error('error persisting value to mongodb', { error: err });
return callback(false);
}
callback(entry === null ? false : entry.value);
if (entry !== null && entry.expiration !== -1 && that.expire && !skipExpire) {
db.collection('entries').update({
'entry_id': key
}, {
$set: {
'expiration': that.expire + now
}
}, function (err, result) { });
}
});
});
};
MongoDocumentStore.prototype.safeConnect = function (callback) {
MongoClient.connect(this.connectionUrl, function (err, db) {
if (err) {
winston.error('error connecting to mongodb', { error: err });
callback(err);
} else {
callback(undefined, db);
}
});
};
module.exports = MongoDocumentStore;

@ -1,14 +1,16 @@
/*global require,module,process*/ /*global require,module,process*/
var postgres = require('pg');
var winston = require('winston'); var winston = require('winston');
const {Pool} = require('pg');
// create table entries (id serial primary key, key varchar(255) not null, value text not null, expiration int, unique(key)); // create table entries (id serial primary key, key varchar(255) not null, value text not null, expiration int, unique(key));
// A postgres document store // A postgres document store
var PostgresDocumentStore = function (options) { var PostgresDocumentStore = function (options) {
this.expireJS = options.expire; this.expireJS = parseInt(options.expire, 10);
this.connectionUrl = process.env.DATABASE_URL || options.connectionUrl;
const connectionString = process.env.DATABASE_URL || options.connectionUrl;
this.pool = new Pool({connectionString});
}; };
PostgresDocumentStore.prototype = { PostgresDocumentStore.prototype = {
@ -23,7 +25,7 @@ PostgresDocumentStore.prototype = {
key, key,
data, data,
that.expireJS && !skipExpire ? that.expireJS + now : null that.expireJS && !skipExpire ? that.expireJS + now : null
], function (err, result) { ], function (err) {
if (err) { if (err) {
winston.error('error persisting value to postgres', { error: err }); winston.error('error persisting value to postgres', { error: err });
return callback(false); return callback(false);
@ -50,7 +52,7 @@ PostgresDocumentStore.prototype = {
client.query('UPDATE entries SET expiration = $1 WHERE ID = $2', [ client.query('UPDATE entries SET expiration = $1 WHERE ID = $2', [
that.expireJS + now, that.expireJS + now,
result.rows[0].id result.rows[0].id
], function (err, result) { ], function (err) {
if (!err) { if (!err) {
done(); done();
} }
@ -64,16 +66,15 @@ PostgresDocumentStore.prototype = {
// A connection wrapper // A connection wrapper
safeConnect: function (callback) { safeConnect: function (callback) {
postgres.connect(this.connectionUrl, function (err, client, done) { this.pool.connect((error, client, done) => {
if (err) { if (error) {
winston.error('error connecting to postgres', { error: err }); winston.error('error connecting to postgres', {error});
callback(err); callback(error);
} else { } else {
callback(undefined, client, done); callback(undefined, client, done);
} }
}); });
} }
}; };
module.exports = PostgresDocumentStore; module.exports = PostgresDocumentStore;

@ -29,7 +29,12 @@ RedisDocumentStore.connect = function(options) {
if (options.password) { if (options.password) {
RedisDocumentStore.client.auth(options.password); RedisDocumentStore.client.auth(options.password);
} }
RedisDocumentStore.client.select(index, function(err, reply) {
RedisDocumentStore.client.on('error', function(err) {
winston.error('redis disconnected', err);
});
RedisDocumentStore.client.select(index, function(err) {
if (err) { if (err) {
winston.error( winston.error(
'error connecting to redis index ' + index, 'error connecting to redis index ' + index,
@ -46,7 +51,7 @@ RedisDocumentStore.connect = function(options) {
// Save file in a key // Save file in a key
RedisDocumentStore.prototype.set = function(key, data, callback, skipExpire) { RedisDocumentStore.prototype.set = function(key, data, callback, skipExpire) {
var _this = this; var _this = this;
RedisDocumentStore.client.set(key, data, function(err, reply) { RedisDocumentStore.client.set(key, data, function(err) {
if (err) { if (err) {
callback(false); callback(false);
} }
@ -62,7 +67,7 @@ RedisDocumentStore.prototype.set = function(key, data, callback, skipExpire) {
// Expire a key in expire time if set // Expire a key in expire time if set
RedisDocumentStore.prototype.setExpiration = function(key) { RedisDocumentStore.prototype.setExpiration = function(key) {
if (this.expire) { if (this.expire) {
RedisDocumentStore.client.expire(key, this.expire, function(err, reply) { RedisDocumentStore.client.expire(key, this.expire, function(err) {
if (err) { if (err) {
winston.error('failed to set expiry on key: ' + key); winston.error('failed to set expiry on key: ' + key);
} }

@ -0,0 +1,46 @@
const crypto = require('crypto');
const rethink = require('rethinkdbdash');
const winston = require('winston');
const md5 = (str) => {
const md5sum = crypto.createHash('md5');
md5sum.update(str);
return md5sum.digest('hex');
};
class RethinkDBStore {
constructor(options) {
this.client = rethink({
silent: true,
host: options.host || '127.0.0.1',
port: options.port || 28015,
db: options.db || 'haste',
user: options.user || 'admin',
password: options.password || ''
});
}
set(key, data, callback) {
this.client.table('uploads').insert({ id: md5(key), data: data }).run((error) => {
if (error) {
callback(false);
winston.error('failed to insert to table', error);
return;
}
callback(true);
});
}
get(key, callback) {
this.client.table('uploads').get(md5(key)).run((error, result) => {
if (error || !result) {
callback(false);
if (error) winston.error('failed to insert to table', error);
return;
}
callback(result.data);
});
}
}
module.exports = RethinkDBStore;

@ -0,0 +1,32 @@
const fs = require('fs');
module.exports = class DictionaryGenerator {
constructor(options, readyCallback) {
// Check options format
if (!options) throw Error('No options passed to generator');
if (!options.path) throw Error('No dictionary path specified in options');
// Load dictionary
fs.readFile(options.path, 'utf8', (err, data) => {
if (err) throw err;
this.dictionary = data.split(/[\n\r]+/);
if (readyCallback) readyCallback();
});
}
// Generates a dictionary-based key, of keyLength words
createKey(keyLength) {
let text = '';
for (let i = 0; i < keyLength; i++) {
const index = Math.floor(Math.random() * this.dictionary.length);
text += this.dictionary[index];
}
return text;
}
};

@ -1,33 +1,27 @@
// Draws inspiration from pwgen and http://tools.arantius.com/password // Draws inspiration from pwgen and http://tools.arantius.com/password
var PhoneticKeyGenerator = function(options) {
// No options
};
// Generate a phonetic key const randOf = (collection) => {
PhoneticKeyGenerator.prototype.createKey = function(keyLength) { return () => {
var text = ''; return collection[Math.floor(Math.random() * collection.length)];
var start = Math.round(Math.random()); };
for (var i = 0; i < keyLength; i++) {
text += (i % 2 == start) ? this.randConsonant() : this.randVowel();
}
return text;
}; };
PhoneticKeyGenerator.consonants = 'bcdfghjklmnpqrstvwxyz'; // Helper methods to get an random vowel or consonant
PhoneticKeyGenerator.vowels = 'aeiou'; const randVowel = randOf('aeiou');
const randConsonant = randOf('bcdfghjklmnpqrstvwxyz');
// Get an random vowel module.exports = class PhoneticKeyGenerator {
PhoneticKeyGenerator.prototype.randVowel = function() {
return PhoneticKeyGenerator.vowels[
Math.floor(Math.random() * PhoneticKeyGenerator.vowels.length)
];
};
// Get an random consonant // Generate a phonetic key of alternating consonant & vowel
PhoneticKeyGenerator.prototype.randConsonant = function() { createKey(keyLength) {
return PhoneticKeyGenerator.consonants[ let text = '';
Math.floor(Math.random() * PhoneticKeyGenerator.consonants.length) const start = Math.round(Math.random());
];
};
module.exports = PhoneticKeyGenerator; for (let i = 0; i < keyLength; i++) {
text += (i % 2 == start) ? randConsonant() : randVowel();
}
return text;
}
};

@ -1,19 +1,20 @@
var RandomKeyGenerator = function(options) { module.exports = class RandomKeyGenerator {
if (!options) {
options = {}; // Initialize a new generator with the given keySpace
constructor(options = {}) {
this.keyspace = options.keyspace || 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
} }
this.keyspace = options.keyspace || 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
};
// Generate a random key // Generate a key of the given length
RandomKeyGenerator.prototype.createKey = function(keyLength) { createKey(keyLength) {
var text = ''; var text = '';
var index;
for (var i = 0; i < keyLength; i++) { for (var i = 0; i < keyLength; i++) {
index = Math.floor(Math.random() * this.keyspace.length); const index = Math.floor(Math.random() * this.keyspace.length);
text += this.keyspace.charAt(index); text += this.keyspace.charAt(index);
}
return text;
} }
return text;
};
module.exports = RandomKeyGenerator; };

1652
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -14,23 +14,21 @@
}, },
"main": "haste", "main": "haste",
"dependencies": { "dependencies": {
"busboy": "0.2.4",
"connect": "^3.7.0",
"connect-ratelimit": "0.0.7", "connect-ratelimit": "0.0.7",
"connect-route": "0.1.5", "connect-route": "0.1.5",
"connect": "3.4.1", "pg": "^8.0.0",
"st": "1.1.0", "redis": "0.8.1",
"winston": "0.6.2", "redis-url": "0.1.0",
"uglify-js": "1.3.3", "st": "^2.0.0",
"busboy": "0.2.4", "uglify-js": "3.1.6",
"pg": "4.1.1" "winston": "^2.0.0"
}, },
"devDependencies": { "devDependencies": {
"mocha": "*", "mocha": "^8.1.3"
"should": "*"
}, },
"bundledDependencies": [], "bundledDependencies": [],
"engines": {
"node": "0.10.35"
},
"bin": { "bin": {
"haste-server": "./server.js" "haste-server": "./server.js"
}, },
@ -44,6 +42,6 @@
}, },
"scripts": { "scripts": {
"start": "node server.js", "start": "node server.js",
"test": "mocha -r should spec/*" "test": "mocha --recursive"
} }
} }

@ -1,7 +1,7 @@
var http = require('http'); var http = require('http');
var url = require('url');
var fs = require('fs'); var fs = require('fs');
var uglify = require('uglify-js');
var winston = require('winston'); var winston = require('winston');
var connect = require('connect'); var connect = require('connect');
var route = require('connect-route'); var route = require('connect-route');
@ -11,7 +11,8 @@ var connect_rate_limit = require('connect-ratelimit');
var DocumentHandler = require('./lib/document_handler'); var DocumentHandler = require('./lib/document_handler');
// Load the configuration and set some defaults // Load the configuration and set some defaults
var config = JSON.parse(fs.readFileSync('./config.js', 'utf8')); const configPath = process.argv.length <= 2 ? 'config.js' : process.argv[2];
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
config.port = process.env.PORT || config.port || 7777; config.port = process.env.PORT || config.port || 7777;
config.host = process.env.HOST || config.host || 'localhost'; config.host = process.env.HOST || config.host || 'localhost';
@ -19,7 +20,10 @@ config.host = process.env.HOST || config.host || 'localhost';
if (config.logging) { if (config.logging) {
try { try {
winston.remove(winston.transports.Console); winston.remove(winston.transports.Console);
} catch(er) { } } catch(e) {
/* was not present */
}
var detail, type; var detail, type;
for (var i = 0; i < config.logging.length; i++) { for (var i = 0; i < config.logging.length; i++) {
detail = config.logging[i]; detail = config.logging[i];
@ -52,21 +56,14 @@ else {
// Compress the static javascript assets // Compress the static javascript assets
if (config.recompressStaticAssets) { if (config.recompressStaticAssets) {
var jsp = require("uglify-js").parser;
var pro = require("uglify-js").uglify;
var list = fs.readdirSync('./static'); var list = fs.readdirSync('./static');
for (var i = 0; i < list.length; i++) { for (var j = 0; j < list.length; j++) {
var item = list[i]; var item = list[j];
var orig_code, ast; if ((item.indexOf('.js') === item.length - 3) && (item.indexOf('.min.js') === -1)) {
if ((item.indexOf('.js') === item.length - 3) && var dest = item.substring(0, item.length - 3) + '.min' + item.substring(item.length - 3);
(item.indexOf('.min.js') === -1)) { var orig_code = fs.readFileSync('./static/' + item, 'utf8');
dest = item.substring(0, item.length - 3) + '.min' +
item.substring(item.length - 3); fs.writeFileSync('./static/' + dest, uglify.minify(orig_code).code, 'utf8');
orig_code = fs.readFileSync('./static/' + item, 'utf8');
ast = jsp.parse(orig_code);
ast = pro.ast_mangle(ast);
ast = pro.ast_squeeze(ast);
fs.writeFileSync('./static/' + dest, pro.gen_code(ast), 'utf8');
winston.info('compressed ' + item + ' into ' + dest); winston.info('compressed ' + item + ' into ' + dest);
} }
} }
@ -113,20 +110,28 @@ if (config.rateLimits) {
// first look at API calls // first look at API calls
app.use(route(function(router) { app.use(route(function(router) {
// get raw documents - support getting with extension // get raw documents - support getting with extension
router.get('/raw/:id', function(request, response, next) {
var key = request.params.id.split('.')[0]; router.get('/raw/:id', function(request, response) {
var skipExpire = !!config.documents[key]; return documentHandler.handleRawGet(request, response, config);
return documentHandler.handleRawGet(key, response, skipExpire); });
router.head('/raw/:id', function(request, response) {
return documentHandler.handleRawGet(request, response, config);
}); });
// add documents // add documents
router.post('/documents', function(request, response, next) {
router.post('/documents', function(request, response) {
return documentHandler.handlePost(request, response); return documentHandler.handlePost(request, response);
}); });
// get documents // get documents
router.get('/documents/:id', function(request, response, next) { router.get('/documents/:id', function(request, response) {
var key = request.params.id.split('.')[0]; return documentHandler.handleGet(request, response, config);
var skipExpire = !!config.documents[key]; });
return documentHandler.handleGet(key, response, skipExpire);
router.head('/documents/:id', function(request, response) {
return documentHandler.handleGet(request, response, config);
}); });
})); }));

@ -17,6 +17,8 @@ textarea {
outline: none; outline: none;
resize: none; resize: none;
font-size: 13px; font-size: 13px;
margin-top: 0;
margin-bottom: 0;
} }
/* the line numbers */ /* the line numbers */
@ -31,6 +33,7 @@ textarea {
font-size: 13px; font-size: 13px;
font-family: monospace; font-family: monospace;
text-align: right; text-align: right;
user-select: none;
} }
/* code box when locked */ /* code box when locked */
@ -42,7 +45,6 @@ textarea {
border: 0px; border: 0px;
outline: none; outline: none;
font-size: 13px; font-size: 13px;
padding-right: 360px;
overflow: inherit; overflow: inherit;
} }
@ -118,6 +120,7 @@ textarea {
font-size: 12px; font-size: 12px;
line-height: 14px; line-height: 14px;
padding: 10px 15px; padding: 10px 15px;
user-select: none;
} }
#box3 .label, #messages li { #box3 .label, #messages li {

@ -1,3 +1,5 @@
/* global $, hljs, window, document */
///// represents a single document ///// represents a single document
var haste_document = function() { var haste_document = function() {
@ -42,10 +44,10 @@ haste_document.prototype.load = function(key, callback, lang) {
value: high.value, value: high.value,
key: key, key: key,
language: high.language || lang, language: high.language || lang,
lineCount: res.data.split("\n").length lineCount: res.data.split('\n').length
}); });
}, },
error: function(err) { error: function() {
callback(false); callback(false);
} }
}); });
@ -62,7 +64,7 @@ haste_document.prototype.save = function(data, callback) {
type: 'post', type: 'post',
data: data, data: data,
dataType: 'json', dataType: 'json',
contentType: 'application/json; charset=utf-8', contentType: 'text/plain; charset=utf-8',
success: function(res) { success: function(res) {
_this.locked = true; _this.locked = true;
_this.key = res.key; _this.key = res.key;
@ -71,7 +73,7 @@ haste_document.prototype.save = function(data, callback) {
value: high.value, value: high.value,
key: res.key, key: res.key,
language: high.language, language: high.language,
lineCount: data.split("\n").length lineCount: data.split('\n').length
}); });
}, },
error: function(res) { error: function(res) {
@ -168,8 +170,7 @@ haste.extensionMap = {
lua: 'lua', pas: 'delphi', java: 'java', cpp: 'cpp', cc: 'cpp', m: 'objectivec', lua: 'lua', pas: 'delphi', java: 'java', cpp: 'cpp', cc: 'cpp', m: 'objectivec',
vala: 'vala', sql: 'sql', sm: 'smalltalk', lisp: 'lisp', ini: 'ini', vala: 'vala', sql: 'sql', sm: 'smalltalk', lisp: 'lisp', ini: 'ini',
diff: 'diff', bash: 'bash', sh: 'bash', tex: 'tex', erl: 'erlang', hs: 'haskell', diff: 'diff', bash: 'bash', sh: 'bash', tex: 'tex', erl: 'erlang', hs: 'haskell',
md: 'markdown', txt: '', coffee: 'coffee', json: 'javascript', md: 'markdown', txt: '', coffee: 'coffee', swift: 'swift'
swift: 'swift'
}; };
// Look up the extension preferred for a type // Look up the extension preferred for a type
@ -276,7 +277,7 @@ haste.prototype.configureButtons = function() {
$where: $('#box2 .new'), $where: $('#box2 .new'),
label: 'New', label: 'New',
shortcut: function(evt) { shortcut: function(evt) {
return evt.ctrlKey && evt.keyCode === 78 return evt.ctrlKey && evt.keyCode === 78;
}, },
shortcutDescription: 'control + n', shortcutDescription: 'control + n',
action: function() { action: function() {
@ -331,14 +332,14 @@ haste.prototype.configureButton = function(options) {
} }
}); });
// Show the label // Show the label
options.$where.mouseenter(function(evt) { options.$where.mouseenter(function() {
$('#box3 .label').text(options.label); $('#box3 .label').text(options.label);
$('#box3 .shortcut').text(options.shortcutDescription || ''); $('#box3 .shortcut').text(options.shortcutDescription || '');
$('#box3').show(); $('#box3').show();
$(this).append($('#pointer').remove().show()); $(this).append($('#pointer').remove().show());
}); });
// Hide the label // Hide the label
options.$where.mouseleave(function(evt) { options.$where.mouseleave(function() {
$('#box3').hide(); $('#box3').hide();
$('#pointer').hide(); $('#pointer').hide();
}); });
@ -371,7 +372,7 @@ $(function() {
// For browsers like Internet Explorer // For browsers like Internet Explorer
if (document.selection) { if (document.selection) {
this.focus(); this.focus();
sel = document.selection.createRange(); var sel = document.selection.createRange();
sel.text = myValue; sel.text = myValue;
this.focus(); this.focus();
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -1,3 +1,7 @@
/* global describe, it */
var assert = require('assert');
var DocumentHandler = require('../lib/document_handler'); var DocumentHandler = require('../lib/document_handler');
var Generator = require('../lib/key_generators/random'); var Generator = require('../lib/key_generators/random');
@ -8,13 +12,13 @@ describe('document_handler', function() {
it('should choose a key of the proper length', function() { it('should choose a key of the proper length', function() {
var gen = new Generator(); var gen = new Generator();
var dh = new DocumentHandler({ keyLength: 6, keyGenerator: gen }); var dh = new DocumentHandler({ keyLength: 6, keyGenerator: gen });
dh.acceptableKey().length.should.equal(6); assert.equal(6, dh.acceptableKey().length);
}); });
it('should choose a default key length', function() { it('should choose a default key length', function() {
var gen = new Generator(); var gen = new Generator();
var dh = new DocumentHandler({ keyGenerator: gen }); var dh = new DocumentHandler({ keyGenerator: gen });
dh.keyLength.should.equal(DocumentHandler.defaultKeyLength); assert.equal(dh.keyLength, DocumentHandler.defaultKeyLength);
}); });
}); });

@ -0,0 +1,34 @@
/* global describe, it */
const assert = require('assert');
const fs = require('fs');
const Generator = require('../../lib/key_generators/dictionary');
describe('DictionaryGenerator', function() {
describe('options', function() {
it('should throw an error if given no options', () => {
assert.throws(() => {
new Generator();
}, Error);
});
it('should throw an error if given no path', () => {
assert.throws(() => {
new Generator({});
}, Error);
});
});
describe('generation', function() {
it('should return a key of the proper number of words from the given dictionary', () => {
const path = '/tmp/haste-server-test-dictionary';
const words = ['cat'];
fs.writeFileSync(path, words.join('\n'));
const gen = new Generator({path}, () => {
assert.equal('catcatcat', gen.createKey(3));
});
});
});
});

@ -0,0 +1,35 @@
/* global describe, it */
const assert = require('assert');
const Generator = require('../../lib/key_generators/phonetic');
const vowels = 'aeiou';
const consonants = 'bcdfghjklmnpqrstvwxyz';
describe('PhoneticKeyGenerator', () => {
describe('generation', () => {
it('should return a key of the proper length', () => {
const gen = new Generator();
assert.equal(6, gen.createKey(6).length);
});
it('should alternate consonants and vowels', () => {
const gen = new Generator();
const key = gen.createKey(3);
// if it starts with a consonant, we expect cvc
// if it starts with a vowel, we expect vcv
if(consonants.includes(key[0])) {
assert.ok(consonants.includes(key[0]));
assert.ok(consonants.includes(key[2]));
assert.ok(vowels.includes(key[1]));
} else {
assert.ok(vowels.includes(key[0]));
assert.ok(vowels.includes(key[2]));
assert.ok(consonants.includes(key[1]));
}
});
});
});

@ -0,0 +1,24 @@
/* global describe, it */
const assert = require('assert');
const Generator = require('../../lib/key_generators/random');
describe('RandomKeyGenerator', () => {
describe('generation', () => {
it('should return a key of the proper length', () => {
const gen = new Generator();
assert.equal(gen.createKey(6).length, 6);
});
it('should use a key from the given keyset if given', () => {
const gen = new Generator({keyspace: 'A'});
assert.equal(gen.createKey(6), 'AAAAAA');
});
it('should not use a key from the given keyset if not given', () => {
const gen = new Generator({keyspace: 'A'});
assert.ok(!gen.createKey(6).includes('B'));
});
});
});

@ -1,8 +1,12 @@
var RedisDocumentStore = require('../lib/document_stores/redis'); /* global it, describe, afterEach */
var assert = require('assert');
var winston = require('winston'); var winston = require('winston');
winston.remove(winston.transports.Console); winston.remove(winston.transports.Console);
var RedisDocumentStore = require('../lib/document_stores/redis');
describe('redis_document_store', function() { describe('redis_document_store', function() {
/* reconnect to redis on each test */ /* reconnect to redis on each test */
@ -12,14 +16,14 @@ describe('redis_document_store', function() {
RedisDocumentStore.client = false; RedisDocumentStore.client = false;
} }
}); });
describe('set', function() { describe('set', function() {
it('should be able to set a key and have an expiration set', function(done) { it('should be able to set a key and have an expiration set', function(done) {
var store = new RedisDocumentStore({ expire: 10 }); var store = new RedisDocumentStore({ expire: 10 });
store.set('hello1', 'world', function() { store.set('hello1', 'world', function() {
RedisDocumentStore.client.ttl('hello1', function(err, res) { RedisDocumentStore.client.ttl('hello1', function(err, res) {
res.should.be.above(1); assert.ok(res > 1);
done(); done();
}); });
}); });
@ -29,7 +33,7 @@ describe('redis_document_store', function() {
var store = new RedisDocumentStore({ expire: 10 }); var store = new RedisDocumentStore({ expire: 10 });
store.set('hello2', 'world', function() { store.set('hello2', 'world', function() {
RedisDocumentStore.client.ttl('hello2', function(err, res) { RedisDocumentStore.client.ttl('hello2', function(err, res) {
res.should.equal(-1); assert.equal(-1, res);
done(); done();
}); });
}, true); }, true);
@ -37,9 +41,9 @@ describe('redis_document_store', function() {
it('should not set an expiration when expiration is off', function(done) { it('should not set an expiration when expiration is off', function(done) {
var store = new RedisDocumentStore({ expire: false }); var store = new RedisDocumentStore({ expire: false });
store.set('hello3', 'world', function(worked) { store.set('hello3', 'world', function() {
RedisDocumentStore.client.ttl('hello3', function(err, res) { RedisDocumentStore.client.ttl('hello3', function(err, res) {
res.should.equal(-1); assert.equal(-1, res);
done(); done();
}); });
}); });
Loading…
Cancel
Save