Switch to Hugo
Move website over to using Huge and Gokarna theme. Actually update the website in eight years.
This commit is contained in:
parent
fa7b8eba40
commit
4bc93904a7
@ -1,13 +0,0 @@
|
|||||||
# http://editorconfig.org
|
|
||||||
root = true
|
|
||||||
|
|
||||||
[*]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
||||||
end_of_line = lf
|
|
||||||
charset = utf-8
|
|
||||||
trim_trailing_whitespace = true
|
|
||||||
insert_final_newline = true
|
|
||||||
|
|
||||||
[*.md]
|
|
||||||
trim_trailing_whitespace = false
|
|
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
public
|
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
[submodule "themes/gokarna"]
|
||||||
|
path = themes/gokarna
|
||||||
|
url = https://github.com/526avijitgupta/gokarna
|
17
.jshintrc
17
.jshintrc
@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"node": true,
|
|
||||||
"esnext": true,
|
|
||||||
"bitwise": true,
|
|
||||||
"camelcase": true,
|
|
||||||
"curly": true,
|
|
||||||
"eqeqeq": true,
|
|
||||||
"immed": true,
|
|
||||||
"indent": 2,
|
|
||||||
"latedef": true,
|
|
||||||
"newcap": true,
|
|
||||||
"noarg": true,
|
|
||||||
"quotmark": "single",
|
|
||||||
"undef": true,
|
|
||||||
"unused": true,
|
|
||||||
"strict": true
|
|
||||||
}
|
|
@ -1,2 +1,2 @@
|
|||||||
# Vertinext
|
# Vertinext
|
||||||
Source of my Personal Website. Currently this uses React and React-Router.
|
Source of my Personal Website. Now using Hugo with the Gokarna theme.
|
||||||
|
5
archetypes/default.md
Normal file
5
archetypes/default.md
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
+++
|
||||||
|
title = '{{ replace .File.ContentBaseName "-" " " | title }}'
|
||||||
|
date = {{ .Date }}
|
||||||
|
draft = true
|
||||||
|
+++
|
17
content/pages/_index.md
Normal file
17
content/pages/_index.md
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
+++
|
||||||
|
date = 2024-02-26T01:11:55-05:00
|
||||||
|
type = 'page'
|
||||||
|
image = 'Test'
|
||||||
|
+++
|
||||||
|
|
||||||
|
# Projects
|
||||||
|
A list of projects I've worked on that I like to showcase.
|
||||||
|
|
||||||
|
## [PyMoe](https://github.com/ccubed/PyMoe)
|
||||||
|
PyMoe started out as a library I used in my AngelBot project to interact with the APIs available at various websites focusing on Anime, Manga, Light Novels, Web Novels, and Visual Novels. It has now become it's own independent project supporting the latest versions of Python 3 and recently updated to include several new services. It has over eighty stars and is currently used in over six hundred projects on Github.
|
||||||
|
|
||||||
|
## [Earl](https://github.com/ccubed/Earl)
|
||||||
|
Earl also started out as a library to be used in Angelbot, however it later grew into a more general use project. Earl is a C++ extension for Python built against CPython to allow native packing and unpacking of the External Term Format used by Erlang. At the time, this was created to make parsing the ETF data received by Discord faster. Bots made in other languages had a slight speed advantage at parsing ETF. This extension leveled the playing field by utilizing C++ to very quickly serialize ETF into and out of Python objects.
|
||||||
|
|
||||||
|
## [Downloop](https://github.com/ccubed/Downloop)
|
||||||
|
Downloop was a hobby project to create a Python image host with a built in web interface and API that would allow anyone to programmatically create and distribute a link to a hosted image. Downloop was focused on privacy and kept no data on who uploaded what. Images were not stored by name making them completely anonymous and all images were returned as binary data. It was able to shard itself automatically to distribute load, direct requests to the correct shard, and separated all stored image data into different root database tables based on shard. This meant that no one shard had a copy of all the images and that shards didn't know what existed between them. It also meant that we had no way to recover an image link if someone lost their initial link given upon image upload.
|
53
hugo.toml
Normal file
53
hugo.toml
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
baseURL = 'https://www.vertinext.com'
|
||||||
|
defaultContentLanguage = 'en-us'
|
||||||
|
languageCode = 'en-us'
|
||||||
|
title = 'Charles Click'
|
||||||
|
theme = 'gokarna'
|
||||||
|
enableRobotsTXT = true
|
||||||
|
|
||||||
|
[menu]
|
||||||
|
[[menu.main]]
|
||||||
|
identifier = 'home'
|
||||||
|
url = '/'
|
||||||
|
pre = '<span data-feather="home"></span>'
|
||||||
|
post = ''
|
||||||
|
name = 'Home'
|
||||||
|
weight = 1
|
||||||
|
[[menu.main]]
|
||||||
|
identifier = 'projects'
|
||||||
|
url = '/pages/'
|
||||||
|
pre = '<span data-feather="folder"></span>'
|
||||||
|
post = ''
|
||||||
|
name = 'Projects'
|
||||||
|
weight = 2
|
||||||
|
[[menu.main]]
|
||||||
|
identifier = 'github'
|
||||||
|
url = 'https://www.github.com/ccubed'
|
||||||
|
weight = 3
|
||||||
|
pre = '<span data-feather="github"></span>'
|
||||||
|
name = 'Github'
|
||||||
|
[[menu.main]]
|
||||||
|
identifier = 'keybase'
|
||||||
|
url = 'https://keybase.io/cclick'
|
||||||
|
weight = 4
|
||||||
|
pre = '<span data-feather="key"></span>'
|
||||||
|
name = 'Keybase'
|
||||||
|
[[menu.main]]
|
||||||
|
identifier = 'resume'
|
||||||
|
url = 'https://drive.google.com/file/d/1xIWfZk8b3xa9ygUxurgAyUwHblPE_CrB/view?usp=sharing'
|
||||||
|
weight = 5
|
||||||
|
pre = '<span data-feather="briefcase"></span>'
|
||||||
|
name = 'Resume'
|
||||||
|
|
||||||
|
[params]
|
||||||
|
avatarURL = '/images/Avatar.jpg'
|
||||||
|
avatarSize = 'size-m'
|
||||||
|
description = 'Sometimes I ask the computer to do things'
|
||||||
|
footer = 'Charles Click @ vertinext.com'
|
||||||
|
socialIcons = [
|
||||||
|
{name = 'linkedin', url = 'https://www.linkedin.com/in/charles-click-b078a959/'},
|
||||||
|
{name = 'mastodon', url = 'https://mastodon.social/@RoryEjinn'},
|
||||||
|
{name = 'twitch', url = 'https://www.twitch.tv/teshiko'},
|
||||||
|
{name = 'email', url = 'ccubed.techno@gmail.com'}
|
||||||
|
]
|
||||||
|
metaKeywords = ['vertinext', 'charles click', 'portfolio', 'cooper']
|
@ -1,14 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import Spinner from 'react-spinkit';
|
|
||||||
|
|
||||||
class BlogPost extends React.Component {
|
|
||||||
|
|
||||||
render(){
|
|
||||||
|
|
||||||
return(<h3>Specific Post</h3>);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = BlogPost;
|
|
@ -1,36 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import Spinner from 'react-spinkit';
|
|
||||||
|
|
||||||
class Gititem extends React.Component {
|
|
||||||
|
|
||||||
constructor(props){
|
|
||||||
|
|
||||||
super();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
render() {
|
|
||||||
|
|
||||||
var repoNodes = [];
|
|
||||||
|
|
||||||
if( this.props.data.length > 0 ){
|
|
||||||
|
|
||||||
this.props.data.forEach(function(repo){
|
|
||||||
|
|
||||||
repoNodes.push(<li className="collection-item avatar">
|
|
||||||
<img className="circle" src={repo.owner.avatar_url} alt={repo.owner.login}></img>
|
|
||||||
<span className="title"><a href={repo.html_url}>{repo.name}</a></span>
|
|
||||||
<p>{repo.description ? repo.description : "No description given."}</p>
|
|
||||||
<a href={repo.forks_url} className="secondary-content"><i className="fa fa-code-fork fa-2x"></i></a>
|
|
||||||
</li>);
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return repoNodes.length > 0 ? (<ul className="collection">{repoNodes}</ul>) : (<Spinner spinnerName='three-bounce' />);
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Gititem;
|
|
@ -1,42 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import GitItem from './GitItem';
|
|
||||||
|
|
||||||
class GitListing extends React.Component {
|
|
||||||
|
|
||||||
constructor(props){
|
|
||||||
|
|
||||||
super(props);
|
|
||||||
this.state = {data: []};
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
loadGitRepos(){
|
|
||||||
|
|
||||||
$.ajax({
|
|
||||||
url: "https://api.github.com/users/ccubed/repos",
|
|
||||||
dataType: 'json',
|
|
||||||
success: (data) =>{
|
|
||||||
this.setState({data: data});
|
|
||||||
},
|
|
||||||
error: (xhr, status, err) => {
|
|
||||||
console.error("api.github.com", status, err.toString());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
componentDidMount(){
|
|
||||||
|
|
||||||
this.loadGitRepos();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
render(){
|
|
||||||
|
|
||||||
return (<div className="container"><div className="row"><div className="col s6 m12"><GitItem data={this.state.data} /></div></div></div>)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = GitListing;
|
|
@ -1,49 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
|
|
||||||
class HostLists extends React.Component {
|
|
||||||
|
|
||||||
render(){
|
|
||||||
|
|
||||||
return(
|
|
||||||
|
|
||||||
<table className="highlight centered">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th data-field="url">URL</th>
|
|
||||||
<th data-field="desc">Description</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td><a href="https://www.vertinext.com">www.vertinext.com</a></td>
|
|
||||||
<td>This Website</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><a href="https://kingsmouth.vertinext.com">kingsmouth.vertinext.com</a></td>
|
|
||||||
<td>Website built for a now defunct text game. Acted as an admin interface.</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><a href="https://safehaven.vertinext.com">safehaven.vertinext.com</a></td>
|
|
||||||
<td>Wiki for a text game</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><a href="https://welltrackr.vertinext.com">welltrackr.vertinext.com</a></td>
|
|
||||||
<td>Mock health tracking website made for family that never got used</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><a href="http://git.vertinext.com">git.vertinext.com</a></td>
|
|
||||||
<td>Personal gitlab server</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><a href="http://radialblur.vertinext.com">radialblur.vertinext.com</a></td>
|
|
||||||
<td>Django website for a text based game</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = HostLists;
|
|
@ -1,21 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import MenuItem from './MenuItem';
|
|
||||||
|
|
||||||
class Menu extends React.Component {
|
|
||||||
render() {
|
|
||||||
return (
|
|
||||||
<nav>
|
|
||||||
<div class="nav-wrapper">
|
|
||||||
<ul class="left">
|
|
||||||
<MenuItem route="app" text="Home" />
|
|
||||||
<MenuItem route="gitrepos" text="Github Repos" />
|
|
||||||
<MenuItem route="hosting" text="Hosting" />
|
|
||||||
<MenuItem route="blog" text="Blog" />
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</nav>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Menu;
|
|
@ -1,16 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import {Link} from 'react-router';
|
|
||||||
|
|
||||||
class MenuItem extends React.Component {
|
|
||||||
render() {
|
|
||||||
return (
|
|
||||||
<li>
|
|
||||||
<Link to={this.props.route}>
|
|
||||||
{this.props.text}
|
|
||||||
</Link>
|
|
||||||
</li>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = MenuItem;
|
|
@ -1,14 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import Spinner from 'react-spinkit';
|
|
||||||
|
|
||||||
class RecentPosts extends React.Component {
|
|
||||||
|
|
||||||
render(){
|
|
||||||
|
|
||||||
return(<h3>Recent Posts</h3>);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = RecentPosts;
|
|
@ -1,5 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import Router from 'react-router';
|
|
||||||
import routes from './Routes';
|
|
||||||
|
|
||||||
Router.run(routes, (Handler) => React.render(<Handler /> , document.body));
|
|
@ -1,18 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import Router from 'react-router';
|
|
||||||
var {RouteHandler} = Router;
|
|
||||||
|
|
||||||
import Menu from '../Components/Menu';
|
|
||||||
|
|
||||||
class App extends React.Component {
|
|
||||||
render() {
|
|
||||||
return (
|
|
||||||
<div>
|
|
||||||
<Menu />
|
|
||||||
<RouteHandler/>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = App;
|
|
@ -1,20 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import Router from 'react-router';
|
|
||||||
var {DefaultRoute, Route} = Router;
|
|
||||||
|
|
||||||
import Layout from './Layout';
|
|
||||||
import Home from '../Views/Home';
|
|
||||||
import Gitrepos from '../Views/Gitrepos';
|
|
||||||
import Hosts from '../Views/Hosts';
|
|
||||||
import Blog from '../Views/Blog';
|
|
||||||
|
|
||||||
var routes = (
|
|
||||||
<Route name="app" path="/" handler={Layout}>
|
|
||||||
<Route name="gitrepos" path="gitrepos" handler={Gitrepos}/>
|
|
||||||
<Route name="hosting" path="hosting" handler={Hosts}/>
|
|
||||||
<Route name="blog" path="blog" handler={Blog} />
|
|
||||||
<DefaultRoute handler={Home}/>
|
|
||||||
</Route>
|
|
||||||
);
|
|
||||||
|
|
||||||
module.exports = routes;
|
|
@ -1,13 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import BlogPost from '../Components/BlogPost';
|
|
||||||
import RecentPosts from '../Components/RecentPosts';
|
|
||||||
|
|
||||||
class Blog extends React.Component {
|
|
||||||
render() {
|
|
||||||
return (
|
|
||||||
<RecentPosts />
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Blog;
|
|
@ -1,12 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import GitListing from '../Components/GitListing';
|
|
||||||
|
|
||||||
class Gitrepos extends React.Component {
|
|
||||||
render() {
|
|
||||||
return (
|
|
||||||
<GitListing />
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Gitrepos;
|
|
@ -1,30 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
|
|
||||||
class Home extends React.Component {
|
|
||||||
render() {
|
|
||||||
return (
|
|
||||||
<div className="container">
|
|
||||||
<div className="row">
|
|
||||||
<div className="col s12">
|
|
||||||
<div className="card hoverable">
|
|
||||||
<div className="card-content red-text">
|
|
||||||
<span className="card-title red-text">Charles C Click</span><br />
|
|
||||||
Email: <a href="mailto:CharlesClick@vertinext.com">CharlesClick@vertinext.com</a><br />
|
|
||||||
Skillset: Web Development<br />
|
|
||||||
This Website: <a href="https://facebook.github.io/react/">React</a> and <a href="http://materializecss.com/">Materialize</a><br />
|
|
||||||
<blockquote>
|
|
||||||
I am a coder and web developer in Knoxville, TN, who also hosts websites, games and other services through a personal server. There is a list of my Github repositories and currently hosted games and services available on other pages.
|
|
||||||
</blockquote>
|
|
||||||
</div>
|
|
||||||
<div className="card-action red-text">
|
|
||||||
<a href="https://docs.google.com/document/d/1ykS2_34-GQd0SbrjpG9NbBvq40L62qWxGJc43KAjOD8/edit?usp=sharing">View Resume</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Home;
|
|
@ -1,18 +0,0 @@
|
|||||||
import React from 'react';
|
|
||||||
import HostLists from '../Components/HostLists';
|
|
||||||
|
|
||||||
class Hosts extends React.Component {
|
|
||||||
render() {
|
|
||||||
return (
|
|
||||||
<div className="container">
|
|
||||||
<div className="row">
|
|
||||||
<div className="col s12">
|
|
||||||
<HostLists />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Hosts;
|
|
@ -1 +0,0 @@
|
|||||||
import './Core/App';
|
|
@ -1,21 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
<meta name="google-site-verification" content="FW6N6eQoOr6Tc6m7knz7Ls4w9J4-7Y_kVXSTpiU_rQ8" />
|
|
||||||
<meta name="description" content="Personal site of Charles Click Cataloging coding exploits" />
|
|
||||||
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
|
|
||||||
<link type="text/css" rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.1/css/materialize.min.css" media="screen,projection"/>
|
|
||||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.min.css">
|
|
||||||
<title>Charles Click</title>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<script src="app.js"></script>
|
|
||||||
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
|
|
||||||
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.1/js/materialize.min.js"></script>
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
24120
src/build/app.js
24120
src/build/app.js
File diff suppressed because it is too large
Load Diff
@ -1,21 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
<meta name="google-site-verification" content="FW6N6eQoOr6Tc6m7knz7Ls4w9J4-7Y_kVXSTpiU_rQ8" />
|
|
||||||
<meta name="description" content="Personal site of Charles Click Cataloging coding exploits" />
|
|
||||||
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
|
|
||||||
<link type="text/css" rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.1/css/materialize.min.css" media="screen,projection"/>
|
|
||||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.min.css">
|
|
||||||
<title>Charles Click</title>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<script src="app.js"></script>
|
|
||||||
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
|
|
||||||
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.1/js/materialize.min.js"></script>
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
@ -1,43 +0,0 @@
|
|||||||
var browserify = require('browserify');
|
|
||||||
var gulp = require('gulp');
|
|
||||||
var source = require('vinyl-source-stream');
|
|
||||||
var browserSync = require('browser-sync');
|
|
||||||
var rename = require('gulp-rename');
|
|
||||||
var babelify = require("babelify");
|
|
||||||
|
|
||||||
var opts = {
|
|
||||||
mainJsInput: './App/app.js',
|
|
||||||
mainJsOutput: 'app.js',
|
|
||||||
buildFolder: './build/',
|
|
||||||
indexHtml: './App/index.html',
|
|
||||||
watchedFiles: [
|
|
||||||
'./App/**/*'
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
gulp.task('index', function() {
|
|
||||||
gulp.src(opts.indexHtml)
|
|
||||||
.pipe(gulp.dest(opts.buildFolder));
|
|
||||||
});
|
|
||||||
|
|
||||||
gulp.task('compile', function() {
|
|
||||||
var b = browserify();
|
|
||||||
b.transform(babelify);
|
|
||||||
b.add(opts.mainJsInput);
|
|
||||||
return b.bundle()
|
|
||||||
.pipe(source(opts.mainJsInput))
|
|
||||||
.pipe(rename(opts.mainJsOutput))
|
|
||||||
.pipe(gulp.dest(opts.buildFolder));
|
|
||||||
});
|
|
||||||
|
|
||||||
gulp.task('browser-sync', function() {
|
|
||||||
browserSync({
|
|
||||||
server: {
|
|
||||||
baseDir: opts.buildFolder
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
gulp.task('default', ['browser-sync', 'compile', 'index'], function() {
|
|
||||||
gulp.watch(opts.watchedFiles, ['compile', 'index', browserSync.reload]);
|
|
||||||
});
|
|
1
src/node_modules/.bin/JSONStream
generated
vendored
1
src/node_modules/.bin/JSONStream
generated
vendored
@ -1 +0,0 @@
|
|||||||
../JSONStream/index.js
|
|
1
src/node_modules/.bin/acorn
generated
vendored
1
src/node_modules/.bin/acorn
generated
vendored
@ -1 +0,0 @@
|
|||||||
../acorn/bin/acorn
|
|
1
src/node_modules/.bin/browser-pack
generated
vendored
1
src/node_modules/.bin/browser-pack
generated
vendored
@ -1 +0,0 @@
|
|||||||
../browser-pack/bin/cmd.js
|
|
1
src/node_modules/.bin/browser-sync
generated
vendored
1
src/node_modules/.bin/browser-sync
generated
vendored
@ -1 +0,0 @@
|
|||||||
../browser-sync/bin/browser-sync.js
|
|
1
src/node_modules/.bin/browserify
generated
vendored
1
src/node_modules/.bin/browserify
generated
vendored
@ -1 +0,0 @@
|
|||||||
../browserify/bin/cmd.js
|
|
1
src/node_modules/.bin/commonize
generated
vendored
1
src/node_modules/.bin/commonize
generated
vendored
@ -1 +0,0 @@
|
|||||||
../commoner/bin/commonize
|
|
1
src/node_modules/.bin/cssesc
generated
vendored
1
src/node_modules/.bin/cssesc
generated
vendored
@ -1 +0,0 @@
|
|||||||
../cssesc/bin/cssesc
|
|
1
src/node_modules/.bin/dateformat
generated
vendored
1
src/node_modules/.bin/dateformat
generated
vendored
@ -1 +0,0 @@
|
|||||||
../dateformat/bin/cli.js
|
|
1
src/node_modules/.bin/defs
generated
vendored
1
src/node_modules/.bin/defs
generated
vendored
@ -1 +0,0 @@
|
|||||||
../defs/build/es5/defs
|
|
1
src/node_modules/.bin/deps-sort
generated
vendored
1
src/node_modules/.bin/deps-sort
generated
vendored
@ -1 +0,0 @@
|
|||||||
../deps-sort/bin/cmd.js
|
|
1
src/node_modules/.bin/detect-indent
generated
vendored
1
src/node_modules/.bin/detect-indent
generated
vendored
@ -1 +0,0 @@
|
|||||||
../detect-indent/cli.js
|
|
1
src/node_modules/.bin/dev-ip
generated
vendored
1
src/node_modules/.bin/dev-ip
generated
vendored
@ -1 +0,0 @@
|
|||||||
../dev-ip/lib/dev-ip.js
|
|
1
src/node_modules/.bin/envify
generated
vendored
1
src/node_modules/.bin/envify
generated
vendored
@ -1 +0,0 @@
|
|||||||
../envify/bin/envify
|
|
1
src/node_modules/.bin/esparse
generated
vendored
1
src/node_modules/.bin/esparse
generated
vendored
@ -1 +0,0 @@
|
|||||||
../esprima-fb/bin/esparse.js
|
|
1
src/node_modules/.bin/esvalidate
generated
vendored
1
src/node_modules/.bin/esvalidate
generated
vendored
@ -1 +0,0 @@
|
|||||||
../esprima-fb/bin/esvalidate.js
|
|
1
src/node_modules/.bin/express
generated
vendored
1
src/node_modules/.bin/express
generated
vendored
@ -1 +0,0 @@
|
|||||||
../express/bin/express
|
|
1
src/node_modules/.bin/foxy
generated
vendored
1
src/node_modules/.bin/foxy
generated
vendored
@ -1 +0,0 @@
|
|||||||
../foxy/cli.js
|
|
1
src/node_modules/.bin/gulp
generated
vendored
1
src/node_modules/.bin/gulp
generated
vendored
@ -1 +0,0 @@
|
|||||||
../gulp/bin/gulp.js
|
|
1
src/node_modules/.bin/har-validator
generated
vendored
1
src/node_modules/.bin/har-validator
generated
vendored
@ -1 +0,0 @@
|
|||||||
../har-validator/bin/har-validator
|
|
1
src/node_modules/.bin/indent-string
generated
vendored
1
src/node_modules/.bin/indent-string
generated
vendored
@ -1 +0,0 @@
|
|||||||
../indent-string/cli.js
|
|
1
src/node_modules/.bin/insert-module-globals
generated
vendored
1
src/node_modules/.bin/insert-module-globals
generated
vendored
@ -1 +0,0 @@
|
|||||||
../insert-module-globals/bin/cmd.js
|
|
1
src/node_modules/.bin/jsesc
generated
vendored
1
src/node_modules/.bin/jsesc
generated
vendored
@ -1 +0,0 @@
|
|||||||
../jsesc/bin/jsesc
|
|
1
src/node_modules/.bin/json5
generated
vendored
1
src/node_modules/.bin/json5
generated
vendored
@ -1 +0,0 @@
|
|||||||
../json5/lib/cli.js
|
|
1
src/node_modules/.bin/leven
generated
vendored
1
src/node_modules/.bin/leven
generated
vendored
@ -1 +0,0 @@
|
|||||||
../leven/cli.js
|
|
1
src/node_modules/.bin/lt
generated
vendored
1
src/node_modules/.bin/lt
generated
vendored
@ -1 +0,0 @@
|
|||||||
../localtunnel/bin/client
|
|
1
src/node_modules/.bin/miller-rabin
generated
vendored
1
src/node_modules/.bin/miller-rabin
generated
vendored
@ -1 +0,0 @@
|
|||||||
../miller-rabin/bin/miller-rabin
|
|
1
src/node_modules/.bin/mkdirp
generated
vendored
1
src/node_modules/.bin/mkdirp
generated
vendored
@ -1 +0,0 @@
|
|||||||
../mkdirp/bin/cmd.js
|
|
1
src/node_modules/.bin/module-deps
generated
vendored
1
src/node_modules/.bin/module-deps
generated
vendored
@ -1 +0,0 @@
|
|||||||
../module-deps/bin/cmd.js
|
|
1
src/node_modules/.bin/nopt
generated
vendored
1
src/node_modules/.bin/nopt
generated
vendored
@ -1 +0,0 @@
|
|||||||
../nopt/bin/nopt.js
|
|
1
src/node_modules/.bin/regenerator
generated
vendored
1
src/node_modules/.bin/regenerator
generated
vendored
@ -1 +0,0 @@
|
|||||||
../regenerator/bin/regenerator
|
|
1
src/node_modules/.bin/regexpu
generated
vendored
1
src/node_modules/.bin/regexpu
generated
vendored
@ -1 +0,0 @@
|
|||||||
../regexpu/bin/regexpu
|
|
1
src/node_modules/.bin/regjsparser
generated
vendored
1
src/node_modules/.bin/regjsparser
generated
vendored
@ -1 +0,0 @@
|
|||||||
../regjsparser/bin/parser
|
|
1
src/node_modules/.bin/repeating
generated
vendored
1
src/node_modules/.bin/repeating
generated
vendored
@ -1 +0,0 @@
|
|||||||
../repeating/cli.js
|
|
1
src/node_modules/.bin/rimraf
generated
vendored
1
src/node_modules/.bin/rimraf
generated
vendored
@ -1 +0,0 @@
|
|||||||
../rimraf/bin.js
|
|
1
src/node_modules/.bin/semver
generated
vendored
1
src/node_modules/.bin/semver
generated
vendored
@ -1 +0,0 @@
|
|||||||
../semver/bin/semver
|
|
1
src/node_modules/.bin/sha.js
generated
vendored
1
src/node_modules/.bin/sha.js
generated
vendored
@ -1 +0,0 @@
|
|||||||
../sha.js/bin.js
|
|
1
src/node_modules/.bin/strip-bom
generated
vendored
1
src/node_modules/.bin/strip-bom
generated
vendored
@ -1 +0,0 @@
|
|||||||
../strip-bom/cli.js
|
|
1
src/node_modules/.bin/throttleproxy
generated
vendored
1
src/node_modules/.bin/throttleproxy
generated
vendored
@ -1 +0,0 @@
|
|||||||
../stream-throttle/bin/throttleproxy.js
|
|
1
src/node_modules/.bin/umd
generated
vendored
1
src/node_modules/.bin/umd
generated
vendored
@ -1 +0,0 @@
|
|||||||
../umd/bin/cli.js
|
|
1
src/node_modules/.bin/user-home
generated
vendored
1
src/node_modules/.bin/user-home
generated
vendored
@ -1 +0,0 @@
|
|||||||
../user-home/cli.js
|
|
1
src/node_modules/.bin/uuid
generated
vendored
1
src/node_modules/.bin/uuid
generated
vendored
@ -1 +0,0 @@
|
|||||||
../node-uuid/bin/uuid
|
|
1
src/node_modules/.bin/weinre
generated
vendored
1
src/node_modules/.bin/weinre
generated
vendored
@ -1 +0,0 @@
|
|||||||
../weinre/weinre
|
|
1
src/node_modules/.bin/window-size
generated
vendored
1
src/node_modules/.bin/window-size
generated
vendored
@ -1 +0,0 @@
|
|||||||
../window-size/cli.js
|
|
2
src/node_modules/Base64/.npmignore
generated
vendored
2
src/node_modules/Base64/.npmignore
generated
vendored
@ -1,2 +0,0 @@
|
|||||||
/coverage/
|
|
||||||
/node_modules/
|
|
7
src/node_modules/Base64/.travis.yml
generated
vendored
7
src/node_modules/Base64/.travis.yml
generated
vendored
@ -1,7 +0,0 @@
|
|||||||
language: node_js
|
|
||||||
node_js:
|
|
||||||
- "0.8"
|
|
||||||
- "0.10"
|
|
||||||
- "0.11"
|
|
||||||
install: make setup
|
|
||||||
script: make test
|
|
14
src/node_modules/Base64/LICENSE
generated
vendored
14
src/node_modules/Base64/LICENSE
generated
vendored
@ -1,14 +0,0 @@
|
|||||||
|
|
||||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
|
||||||
Version 2, December 2004
|
|
||||||
|
|
||||||
Copyright (c) 2011..2012 David Chambers <dc@hashify.me>
|
|
||||||
|
|
||||||
Everyone is permitted to copy and distribute verbatim or modified
|
|
||||||
copies of this license document, and changing it is allowed as long
|
|
||||||
as the name is changed.
|
|
||||||
|
|
||||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
|
||||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
|
||||||
|
|
||||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
|
42
src/node_modules/Base64/Makefile
generated
vendored
42
src/node_modules/Base64/Makefile
generated
vendored
@ -1,42 +0,0 @@
|
|||||||
ISTANBUL = node_modules/.bin/istanbul
|
|
||||||
UGLIFYJS = node_modules/.bin/uglifyjs
|
|
||||||
XYZ = node_modules/.bin/xyz --message X.Y.Z --tag X.Y.Z
|
|
||||||
|
|
||||||
SRC = base64.js
|
|
||||||
MIN = $(patsubst %.js,%.min.js,$(SRC))
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: all
|
|
||||||
all: $(MIN)
|
|
||||||
|
|
||||||
%.min.js: %.js
|
|
||||||
$(UGLIFYJS) $< --compress --mangle > $@
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: bytes
|
|
||||||
bytes: base64.min.js
|
|
||||||
gzip --best --stdout $< | wc -c | tr -d ' '
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: clean
|
|
||||||
clean:
|
|
||||||
rm -f -- $(MIN)
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: release-major release-minor release-patch
|
|
||||||
release-major:
|
|
||||||
$(XYZ) --increment major
|
|
||||||
release-minor:
|
|
||||||
$(XYZ) --increment minor
|
|
||||||
release-patch:
|
|
||||||
$(XYZ) --increment patch
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: setup
|
|
||||||
setup:
|
|
||||||
npm install
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: test
|
|
||||||
test:
|
|
||||||
$(ISTANBUL) cover node_modules/.bin/_mocha -- --compilers coffee:coffee-script/register
|
|
34
src/node_modules/Base64/README.md
generated
vendored
34
src/node_modules/Base64/README.md
generated
vendored
@ -1,34 +0,0 @@
|
|||||||
# Base64.js
|
|
||||||
|
|
||||||
≈ 500 byte* polyfill for browsers which don't provide [`window.btoa`][1] and
|
|
||||||
[`window.atob`][2].
|
|
||||||
|
|
||||||
Although the script does no harm in browsers which do provide these functions,
|
|
||||||
a conditional script loader such as [yepnope][3] can prevent unnecessary HTTP
|
|
||||||
requests.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
yepnope({
|
|
||||||
test: window.btoa && window.atob,
|
|
||||||
nope: 'base64.js',
|
|
||||||
callback: function () {
|
|
||||||
// `btoa` and `atob` are now safe to use
|
|
||||||
}
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
Base64.js stems from a [gist][4] by [yahiko][5].
|
|
||||||
|
|
||||||
### Running the test suite
|
|
||||||
|
|
||||||
make setup
|
|
||||||
make test
|
|
||||||
|
|
||||||
\* Minified and gzipped. Run `make bytes` to verify.
|
|
||||||
|
|
||||||
|
|
||||||
[1]: https://developer.mozilla.org/en/DOM/window.btoa
|
|
||||||
[2]: https://developer.mozilla.org/en/DOM/window.atob
|
|
||||||
[3]: http://yepnopejs.com/
|
|
||||||
[4]: https://gist.github.com/229984
|
|
||||||
[5]: https://github.com/yahiko
|
|
60
src/node_modules/Base64/base64.js
generated
vendored
60
src/node_modules/Base64/base64.js
generated
vendored
@ -1,60 +0,0 @@
|
|||||||
;(function () {
|
|
||||||
|
|
||||||
var object = typeof exports != 'undefined' ? exports : this; // #8: web workers
|
|
||||||
var chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
|
|
||||||
|
|
||||||
function InvalidCharacterError(message) {
|
|
||||||
this.message = message;
|
|
||||||
}
|
|
||||||
InvalidCharacterError.prototype = new Error;
|
|
||||||
InvalidCharacterError.prototype.name = 'InvalidCharacterError';
|
|
||||||
|
|
||||||
// encoder
|
|
||||||
// [https://gist.github.com/999166] by [https://github.com/nignag]
|
|
||||||
object.btoa || (
|
|
||||||
object.btoa = function (input) {
|
|
||||||
for (
|
|
||||||
// initialize result and counter
|
|
||||||
var block, charCode, idx = 0, map = chars, output = '';
|
|
||||||
// if the next input index does not exist:
|
|
||||||
// change the mapping table to "="
|
|
||||||
// check if d has no fractional digits
|
|
||||||
input.charAt(idx | 0) || (map = '=', idx % 1);
|
|
||||||
// "8 - idx % 1 * 8" generates the sequence 2, 4, 6, 8
|
|
||||||
output += map.charAt(63 & block >> 8 - idx % 1 * 8)
|
|
||||||
) {
|
|
||||||
charCode = input.charCodeAt(idx += 3/4);
|
|
||||||
if (charCode > 0xFF) {
|
|
||||||
throw new InvalidCharacterError("'btoa' failed: The string to be encoded contains characters outside of the Latin1 range.");
|
|
||||||
}
|
|
||||||
block = block << 8 | charCode;
|
|
||||||
}
|
|
||||||
return output;
|
|
||||||
});
|
|
||||||
|
|
||||||
// decoder
|
|
||||||
// [https://gist.github.com/1020396] by [https://github.com/atk]
|
|
||||||
object.atob || (
|
|
||||||
object.atob = function (input) {
|
|
||||||
input = input.replace(/=+$/, '');
|
|
||||||
if (input.length % 4 == 1) {
|
|
||||||
throw new InvalidCharacterError("'atob' failed: The string to be decoded is not correctly encoded.");
|
|
||||||
}
|
|
||||||
for (
|
|
||||||
// initialize result and counters
|
|
||||||
var bc = 0, bs, buffer, idx = 0, output = '';
|
|
||||||
// get next character
|
|
||||||
buffer = input.charAt(idx++);
|
|
||||||
// character found in table? initialize bit storage and add its ascii value;
|
|
||||||
~buffer && (bs = bc % 4 ? bs * 64 + buffer : buffer,
|
|
||||||
// and if not first of each 4 characters,
|
|
||||||
// convert the first 8 bits to one ascii character
|
|
||||||
bc++ % 4) ? output += String.fromCharCode(255 & bs >> (-2 * bc & 6)) : 0
|
|
||||||
) {
|
|
||||||
// try to find character in table (0-63, not found => -1)
|
|
||||||
buffer = chars.indexOf(buffer);
|
|
||||||
}
|
|
||||||
return output;
|
|
||||||
});
|
|
||||||
|
|
||||||
}());
|
|
1
src/node_modules/Base64/base64.min.js
generated
vendored
1
src/node_modules/Base64/base64.min.js
generated
vendored
@ -1 +0,0 @@
|
|||||||
!function(){function t(t){this.message=t}var e="undefined"!=typeof exports?exports:this,r="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";t.prototype=new Error,t.prototype.name="InvalidCharacterError",e.btoa||(e.btoa=function(e){for(var o,n,a=0,i=r,c="";e.charAt(0|a)||(i="=",a%1);c+=i.charAt(63&o>>8-a%1*8)){if(n=e.charCodeAt(a+=.75),n>255)throw new t("'btoa' failed: The string to be encoded contains characters outside of the Latin1 range.");o=o<<8|n}return c}),e.atob||(e.atob=function(e){if(e=e.replace(/=+$/,""),e.length%4==1)throw new t("'atob' failed: The string to be decoded is not correctly encoded.");for(var o,n,a=0,i=0,c="";n=e.charAt(i++);~n&&(o=a%4?64*o+n:n,a++%4)?c+=String.fromCharCode(255&o>>(-2*a&6)):0)n=r.indexOf(n);return c})}();
|
|
82
src/node_modules/Base64/package.json
generated
vendored
82
src/node_modules/Base64/package.json
generated
vendored
@ -1,82 +0,0 @@
|
|||||||
{
|
|
||||||
"_args": [
|
|
||||||
[
|
|
||||||
"Base64@~0.2.0",
|
|
||||||
"/media/Github/Vertinext2/src/node_modules/http-browserify"
|
|
||||||
]
|
|
||||||
],
|
|
||||||
"_from": "Base64@>=0.2.0 <0.3.0",
|
|
||||||
"_id": "Base64@0.2.1",
|
|
||||||
"_inCache": true,
|
|
||||||
"_installable": true,
|
|
||||||
"_location": "/Base64",
|
|
||||||
"_npmUser": {
|
|
||||||
"email": "npm@michael.ficarra.me",
|
|
||||||
"name": "michaelficarra"
|
|
||||||
},
|
|
||||||
"_npmVersion": "1.4.3",
|
|
||||||
"_phantomChildren": {},
|
|
||||||
"_requested": {
|
|
||||||
"name": "Base64",
|
|
||||||
"raw": "Base64@~0.2.0",
|
|
||||||
"rawSpec": "~0.2.0",
|
|
||||||
"scope": null,
|
|
||||||
"spec": ">=0.2.0 <0.3.0",
|
|
||||||
"type": "range"
|
|
||||||
},
|
|
||||||
"_requiredBy": [
|
|
||||||
"/http-browserify"
|
|
||||||
],
|
|
||||||
"_resolved": "https://registry.npmjs.org/Base64/-/Base64-0.2.1.tgz",
|
|
||||||
"_shasum": "ba3a4230708e186705065e66babdd4c35cf60028",
|
|
||||||
"_shrinkwrap": null,
|
|
||||||
"_spec": "Base64@~0.2.0",
|
|
||||||
"_where": "/media/Github/Vertinext2/src/node_modules/http-browserify",
|
|
||||||
"author": {
|
|
||||||
"email": "dc@davidchambers.me",
|
|
||||||
"name": "David Chambers"
|
|
||||||
},
|
|
||||||
"bugs": {
|
|
||||||
"url": "https://github.com/davidchambers/Base64.js/issues"
|
|
||||||
},
|
|
||||||
"dependencies": {},
|
|
||||||
"description": "Base64 encoding and decoding",
|
|
||||||
"devDependencies": {
|
|
||||||
"coffee-script": "1.7.x",
|
|
||||||
"istanbul": "0.2.x",
|
|
||||||
"mocha": "1.18.x",
|
|
||||||
"uglify-js": "2.4.x",
|
|
||||||
"xyz": "0.1.x"
|
|
||||||
},
|
|
||||||
"directories": {},
|
|
||||||
"dist": {
|
|
||||||
"shasum": "ba3a4230708e186705065e66babdd4c35cf60028",
|
|
||||||
"tarball": "http://registry.npmjs.org/Base64/-/Base64-0.2.1.tgz"
|
|
||||||
},
|
|
||||||
"homepage": "https://github.com/davidchambers/Base64.js",
|
|
||||||
"licenses": [
|
|
||||||
{
|
|
||||||
"type": "WTFPL",
|
|
||||||
"url": "https://raw.github.com/davidchambers/Base64.js/master/LICENSE"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"main": "./base64.js",
|
|
||||||
"maintainers": [
|
|
||||||
{
|
|
||||||
"email": "dc@hashify.me",
|
|
||||||
"name": "davidchambers"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"email": "npm@michael.ficarra.me",
|
|
||||||
"name": "michaelficarra"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"name": "Base64",
|
|
||||||
"optionalDependencies": {},
|
|
||||||
"readme": "ERROR: No README data found!",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "git://github.com/davidchambers/Base64.js.git"
|
|
||||||
},
|
|
||||||
"version": "0.2.1"
|
|
||||||
}
|
|
52
src/node_modules/Base64/test/base64.coffee
generated
vendored
52
src/node_modules/Base64/test/base64.coffee
generated
vendored
@ -1,52 +0,0 @@
|
|||||||
assert = require 'assert'
|
|
||||||
|
|
||||||
{btoa, atob} = require '..'
|
|
||||||
|
|
||||||
|
|
||||||
describe 'Base64.js', ->
|
|
||||||
|
|
||||||
it 'can encode ASCII input', ->
|
|
||||||
assert.strictEqual btoa(''), ''
|
|
||||||
assert.strictEqual btoa('f'), 'Zg=='
|
|
||||||
assert.strictEqual btoa('fo'), 'Zm8='
|
|
||||||
assert.strictEqual btoa('foo'), 'Zm9v'
|
|
||||||
assert.strictEqual btoa('quux'), 'cXV1eA=='
|
|
||||||
assert.strictEqual btoa('!"#$%'), 'ISIjJCU='
|
|
||||||
assert.strictEqual btoa("&'()*+"), 'JicoKSor'
|
|
||||||
assert.strictEqual btoa(',-./012'), 'LC0uLzAxMg=='
|
|
||||||
assert.strictEqual btoa('3456789:'), 'MzQ1Njc4OTo='
|
|
||||||
assert.strictEqual btoa(';<=>?@ABC'), 'Ozw9Pj9AQUJD'
|
|
||||||
assert.strictEqual btoa('DEFGHIJKLM'), 'REVGR0hJSktMTQ=='
|
|
||||||
assert.strictEqual btoa('NOPQRSTUVWX'), 'Tk9QUVJTVFVWV1g='
|
|
||||||
assert.strictEqual btoa('YZ[\\]^_`abc'), 'WVpbXF1eX2BhYmM='
|
|
||||||
assert.strictEqual btoa('defghijklmnop'), 'ZGVmZ2hpamtsbW5vcA=='
|
|
||||||
assert.strictEqual btoa('qrstuvwxyz{|}~'), 'cXJzdHV2d3h5ent8fX4='
|
|
||||||
|
|
||||||
it 'cannot encode non-ASCII input', ->
|
|
||||||
assert.throws (-> btoa '✈'), (err) ->
|
|
||||||
err instanceof Error and
|
|
||||||
err.name is 'InvalidCharacterError' and
|
|
||||||
err.message is "'btoa' failed: The string to be encoded contains characters outside of the Latin1 range."
|
|
||||||
|
|
||||||
it 'can decode Base64-encoded input', ->
|
|
||||||
assert.strictEqual atob(''), ''
|
|
||||||
assert.strictEqual atob('Zg=='), 'f'
|
|
||||||
assert.strictEqual atob('Zm8='), 'fo'
|
|
||||||
assert.strictEqual atob('Zm9v'), 'foo'
|
|
||||||
assert.strictEqual atob('cXV1eA=='), 'quux'
|
|
||||||
assert.strictEqual atob('ISIjJCU='), '!"#$%'
|
|
||||||
assert.strictEqual atob('JicoKSor'), "&'()*+"
|
|
||||||
assert.strictEqual atob('LC0uLzAxMg=='), ',-./012'
|
|
||||||
assert.strictEqual atob('MzQ1Njc4OTo='), '3456789:'
|
|
||||||
assert.strictEqual atob('Ozw9Pj9AQUJD'), ';<=>?@ABC'
|
|
||||||
assert.strictEqual atob('REVGR0hJSktMTQ=='), 'DEFGHIJKLM'
|
|
||||||
assert.strictEqual atob('Tk9QUVJTVFVWV1g='), 'NOPQRSTUVWX'
|
|
||||||
assert.strictEqual atob('WVpbXF1eX2BhYmM='), 'YZ[\\]^_`abc'
|
|
||||||
assert.strictEqual atob('ZGVmZ2hpamtsbW5vcA=='), 'defghijklmnop'
|
|
||||||
assert.strictEqual atob('cXJzdHV2d3h5ent8fX4='), 'qrstuvwxyz{|}~'
|
|
||||||
|
|
||||||
it 'cannot decode invalid input', ->
|
|
||||||
assert.throws (-> atob 'a'), (err) ->
|
|
||||||
err instanceof Error and
|
|
||||||
err.name is 'InvalidCharacterError' and
|
|
||||||
err.message is "'atob' failed: The string to be decoded is not correctly encoded."
|
|
2
src/node_modules/JSONStream/.npmignore
generated
vendored
2
src/node_modules/JSONStream/.npmignore
generated
vendored
@ -1,2 +0,0 @@
|
|||||||
node_modules/*
|
|
||||||
node_modules
|
|
3
src/node_modules/JSONStream/.travis.yml
generated
vendored
3
src/node_modules/JSONStream/.travis.yml
generated
vendored
@ -1,3 +0,0 @@
|
|||||||
language: node_js
|
|
||||||
node_js:
|
|
||||||
- "0.10"
|
|
15
src/node_modules/JSONStream/LICENSE.APACHE2
generated
vendored
15
src/node_modules/JSONStream/LICENSE.APACHE2
generated
vendored
@ -1,15 +0,0 @@
|
|||||||
Apache License, Version 2.0
|
|
||||||
|
|
||||||
Copyright (c) 2011 Dominic Tarr
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
24
src/node_modules/JSONStream/LICENSE.MIT
generated
vendored
24
src/node_modules/JSONStream/LICENSE.MIT
generated
vendored
@ -1,24 +0,0 @@
|
|||||||
The MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2011 Dominic Tarr
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge,
|
|
||||||
to any person obtaining a copy of this software and
|
|
||||||
associated documentation files (the "Software"), to
|
|
||||||
deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify,
|
|
||||||
merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom
|
|
||||||
the Software is furnished to do so,
|
|
||||||
subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice
|
|
||||||
shall be included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
|
||||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
|
|
||||||
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
|
||||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
|
||||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
13
src/node_modules/JSONStream/examples/all_docs.js
generated
vendored
13
src/node_modules/JSONStream/examples/all_docs.js
generated
vendored
@ -1,13 +0,0 @@
|
|||||||
var request = require('request')
|
|
||||||
, JSONStream = require('JSONStream')
|
|
||||||
, es = require('event-stream')
|
|
||||||
|
|
||||||
var parser = JSONStream.parse(['rows', true]) //emit parts that match this path (any element of the rows array)
|
|
||||||
, req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
|
|
||||||
, logger = es.mapSync(function (data) { //create a stream that logs to stderr,
|
|
||||||
console.error(data)
|
|
||||||
return data
|
|
||||||
})
|
|
||||||
|
|
||||||
req.pipe(parser)
|
|
||||||
parser.pipe(logger)
|
|
203
src/node_modules/JSONStream/index.js
generated
vendored
203
src/node_modules/JSONStream/index.js
generated
vendored
@ -1,203 +0,0 @@
|
|||||||
#! /usr/bin/env node
|
|
||||||
|
|
||||||
'use strict'
|
|
||||||
|
|
||||||
var Parser = require('jsonparse')
|
|
||||||
, through = require('through')
|
|
||||||
|
|
||||||
/*
|
|
||||||
|
|
||||||
the value of this.stack that creationix's jsonparse has is weird.
|
|
||||||
|
|
||||||
it makes this code ugly, but his problem is way harder that mine,
|
|
||||||
so i'll forgive him.
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
exports.parse = function (path, map) {
|
|
||||||
|
|
||||||
var parser = new Parser()
|
|
||||||
var stream = through(function (chunk) {
|
|
||||||
if('string' === typeof chunk)
|
|
||||||
chunk = new Buffer(chunk)
|
|
||||||
parser.write(chunk)
|
|
||||||
},
|
|
||||||
function (data) {
|
|
||||||
if(data)
|
|
||||||
stream.write(data)
|
|
||||||
stream.queue(null)
|
|
||||||
})
|
|
||||||
|
|
||||||
if('string' === typeof path)
|
|
||||||
path = path.split('.').map(function (e) {
|
|
||||||
if (e === '*')
|
|
||||||
return true
|
|
||||||
else if (e === '') // '..'.split('.') returns an empty string
|
|
||||||
return {recurse: true}
|
|
||||||
else
|
|
||||||
return e
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
var count = 0, _key
|
|
||||||
if(!path || !path.length)
|
|
||||||
path = null
|
|
||||||
|
|
||||||
parser.onValue = function (value) {
|
|
||||||
if (!this.root)
|
|
||||||
stream.root = value
|
|
||||||
|
|
||||||
if(! path) return
|
|
||||||
|
|
||||||
var i = 0 // iterates on path
|
|
||||||
var j = 0 // iterates on stack
|
|
||||||
while (i < path.length) {
|
|
||||||
var key = path[i]
|
|
||||||
var c
|
|
||||||
j++
|
|
||||||
|
|
||||||
if (key && !key.recurse) {
|
|
||||||
c = (j === this.stack.length) ? this : this.stack[j]
|
|
||||||
if (!c) return
|
|
||||||
if (! check(key, c.key)) return
|
|
||||||
i++
|
|
||||||
} else {
|
|
||||||
i++
|
|
||||||
var nextKey = path[i]
|
|
||||||
if (! nextKey) return
|
|
||||||
while (true) {
|
|
||||||
c = (j === this.stack.length) ? this : this.stack[j]
|
|
||||||
if (!c) return
|
|
||||||
if (check(nextKey, c.key)) {
|
|
||||||
i++;
|
|
||||||
this.stack[j].value = null
|
|
||||||
break
|
|
||||||
}
|
|
||||||
j++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
if (j !== this.stack.length) return
|
|
||||||
|
|
||||||
count ++
|
|
||||||
var actualPath = this.stack.slice(1).map(function(element) { return element.key }).concat([this.key])
|
|
||||||
var data = this.value[this.key]
|
|
||||||
if(null != data)
|
|
||||||
if(null != (data = map ? map(data, actualPath) : data))
|
|
||||||
stream.queue(data)
|
|
||||||
delete this.value[this.key]
|
|
||||||
for(var k in this.stack)
|
|
||||||
this.stack[k].value = null
|
|
||||||
}
|
|
||||||
parser._onToken = parser.onToken;
|
|
||||||
|
|
||||||
parser.onToken = function (token, value) {
|
|
||||||
parser._onToken(token, value);
|
|
||||||
if (this.stack.length === 0) {
|
|
||||||
if (stream.root) {
|
|
||||||
if(!path)
|
|
||||||
stream.queue(stream.root)
|
|
||||||
count = 0;
|
|
||||||
stream.root = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
parser.onError = function (err) {
|
|
||||||
if(err.message.indexOf("at position") > -1)
|
|
||||||
err.message = "Invalid JSON (" + err.message + ")";
|
|
||||||
stream.emit('error', err)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
return stream
|
|
||||||
}
|
|
||||||
|
|
||||||
function check (x, y) {
|
|
||||||
if ('string' === typeof x)
|
|
||||||
return y == x
|
|
||||||
else if (x && 'function' === typeof x.exec)
|
|
||||||
return x.exec(y)
|
|
||||||
else if ('boolean' === typeof x)
|
|
||||||
return x
|
|
||||||
else if ('function' === typeof x)
|
|
||||||
return x(y)
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.stringify = function (op, sep, cl, indent) {
|
|
||||||
indent = indent || 0
|
|
||||||
if (op === false){
|
|
||||||
op = ''
|
|
||||||
sep = '\n'
|
|
||||||
cl = ''
|
|
||||||
} else if (op == null) {
|
|
||||||
|
|
||||||
op = '[\n'
|
|
||||||
sep = '\n,\n'
|
|
||||||
cl = '\n]\n'
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
//else, what ever you like
|
|
||||||
|
|
||||||
var stream
|
|
||||||
, first = true
|
|
||||||
, anyData = false
|
|
||||||
stream = through(function (data) {
|
|
||||||
anyData = true
|
|
||||||
var json = JSON.stringify(data, null, indent)
|
|
||||||
if(first) { first = false ; stream.queue(op + json)}
|
|
||||||
else stream.queue(sep + json)
|
|
||||||
},
|
|
||||||
function (data) {
|
|
||||||
if(!anyData)
|
|
||||||
stream.queue(op)
|
|
||||||
stream.queue(cl)
|
|
||||||
stream.queue(null)
|
|
||||||
})
|
|
||||||
|
|
||||||
return stream
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.stringifyObject = function (op, sep, cl, indent) {
|
|
||||||
indent = indent || 0
|
|
||||||
if (op === false){
|
|
||||||
op = ''
|
|
||||||
sep = '\n'
|
|
||||||
cl = ''
|
|
||||||
} else if (op == null) {
|
|
||||||
|
|
||||||
op = '{\n'
|
|
||||||
sep = '\n,\n'
|
|
||||||
cl = '\n}\n'
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
//else, what ever you like
|
|
||||||
|
|
||||||
var first = true
|
|
||||||
var anyData = false
|
|
||||||
var stream = through(function (data) {
|
|
||||||
anyData = true
|
|
||||||
var json = JSON.stringify(data[0]) + ':' + JSON.stringify(data[1], null, indent)
|
|
||||||
if(first) { first = false ; this.queue(op + json)}
|
|
||||||
else this.queue(sep + json)
|
|
||||||
},
|
|
||||||
function (data) {
|
|
||||||
if(!anyData) this.queue(op)
|
|
||||||
this.queue(cl)
|
|
||||||
|
|
||||||
this.queue(null)
|
|
||||||
})
|
|
||||||
|
|
||||||
return stream
|
|
||||||
}
|
|
||||||
|
|
||||||
if(!module.parent && process.title !== 'browser') {
|
|
||||||
process.stdin
|
|
||||||
.pipe(exports.parse(process.argv[2]))
|
|
||||||
.pipe(exports.stringify('[', ',\n', ']\n', 2))
|
|
||||||
.pipe(process.stdout)
|
|
||||||
}
|
|
100
src/node_modules/JSONStream/package.json
generated
vendored
100
src/node_modules/JSONStream/package.json
generated
vendored
@ -1,100 +0,0 @@
|
|||||||
{
|
|
||||||
"_args": [
|
|
||||||
[
|
|
||||||
"JSONStream@^1.0.3",
|
|
||||||
"/media/Github/Vertinext2/src/node_modules/browserify"
|
|
||||||
]
|
|
||||||
],
|
|
||||||
"_from": "JSONStream@>=1.0.3 <2.0.0",
|
|
||||||
"_id": "JSONStream@1.0.7",
|
|
||||||
"_inCache": true,
|
|
||||||
"_installable": true,
|
|
||||||
"_location": "/JSONStream",
|
|
||||||
"_nodeVersion": "4.2.1",
|
|
||||||
"_npmUser": {
|
|
||||||
"email": "dominic.tarr@gmail.com",
|
|
||||||
"name": "dominictarr"
|
|
||||||
},
|
|
||||||
"_npmVersion": "2.14.8",
|
|
||||||
"_phantomChildren": {},
|
|
||||||
"_requested": {
|
|
||||||
"name": "JSONStream",
|
|
||||||
"raw": "JSONStream@^1.0.3",
|
|
||||||
"rawSpec": "^1.0.3",
|
|
||||||
"scope": null,
|
|
||||||
"spec": ">=1.0.3 <2.0.0",
|
|
||||||
"type": "range"
|
|
||||||
},
|
|
||||||
"_requiredBy": [
|
|
||||||
"/browser-pack",
|
|
||||||
"/browserify",
|
|
||||||
"/deps-sort",
|
|
||||||
"/insert-module-globals",
|
|
||||||
"/module-deps"
|
|
||||||
],
|
|
||||||
"_resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.0.7.tgz",
|
|
||||||
"_shasum": "700c8e4711fef1ce421f650bead55235bb21d7de",
|
|
||||||
"_shrinkwrap": null,
|
|
||||||
"_spec": "JSONStream@^1.0.3",
|
|
||||||
"_where": "/media/Github/Vertinext2/src/node_modules/browserify",
|
|
||||||
"author": {
|
|
||||||
"email": "dominic.tarr@gmail.com",
|
|
||||||
"name": "Dominic Tarr",
|
|
||||||
"url": "http://bit.ly/dominictarr"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"JSONStream": "./index.js"
|
|
||||||
},
|
|
||||||
"bugs": {
|
|
||||||
"url": "https://github.com/dominictarr/JSONStream/issues"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"jsonparse": "^1.1.0",
|
|
||||||
"through": ">=2.2.7 <3"
|
|
||||||
},
|
|
||||||
"description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)",
|
|
||||||
"devDependencies": {
|
|
||||||
"assertions": "~2.2.2",
|
|
||||||
"event-stream": "~0.7.0",
|
|
||||||
"it-is": "~1",
|
|
||||||
"render": "~0.1.1",
|
|
||||||
"tape": "~2.12.3",
|
|
||||||
"trees": "~0.0.3"
|
|
||||||
},
|
|
||||||
"directories": {},
|
|
||||||
"dist": {
|
|
||||||
"shasum": "700c8e4711fef1ce421f650bead55235bb21d7de",
|
|
||||||
"tarball": "http://registry.npmjs.org/JSONStream/-/JSONStream-1.0.7.tgz"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": "*"
|
|
||||||
},
|
|
||||||
"gitHead": "d02b9d588241f05271190b174c23f0c3846edf9c",
|
|
||||||
"homepage": "http://github.com/dominictarr/JSONStream",
|
|
||||||
"keywords": [
|
|
||||||
"json",
|
|
||||||
"stream",
|
|
||||||
"streaming",
|
|
||||||
"parser",
|
|
||||||
"async",
|
|
||||||
"parsing"
|
|
||||||
],
|
|
||||||
"license": "(MIT OR Apache-2.0)",
|
|
||||||
"maintainers": [
|
|
||||||
{
|
|
||||||
"email": "dominic.tarr@gmail.com",
|
|
||||||
"name": "dominictarr"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"name": "JSONStream",
|
|
||||||
"optionalDependencies": {},
|
|
||||||
"readme": "ERROR: No README data found!",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "git://github.com/dominictarr/JSONStream.git"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"test": "set -e; for t in test/*.js; do echo '***' $t '***'; node $t; done"
|
|
||||||
},
|
|
||||||
"version": "1.0.7"
|
|
||||||
}
|
|
172
src/node_modules/JSONStream/readme.markdown
generated
vendored
172
src/node_modules/JSONStream/readme.markdown
generated
vendored
@ -1,172 +0,0 @@
|
|||||||
# JSONStream
|
|
||||||
|
|
||||||
streaming JSON.parse and stringify
|
|
||||||
|
|
||||||
![](https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master)
|
|
||||||
|
|
||||||
## example
|
|
||||||
|
|
||||||
``` js
|
|
||||||
|
|
||||||
var request = require('request')
|
|
||||||
, JSONStream = require('JSONStream')
|
|
||||||
, es = require('event-stream')
|
|
||||||
|
|
||||||
request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
|
|
||||||
.pipe(JSONStream.parse('rows.*'))
|
|
||||||
.pipe(es.mapSync(function (data) {
|
|
||||||
console.error(data)
|
|
||||||
return data
|
|
||||||
}))
|
|
||||||
```
|
|
||||||
|
|
||||||
## JSONStream.parse(path)
|
|
||||||
|
|
||||||
parse stream of values that match a path
|
|
||||||
|
|
||||||
``` js
|
|
||||||
JSONStream.parse('rows.*.doc')
|
|
||||||
```
|
|
||||||
|
|
||||||
The `..` operator is the recursive descent operator from [JSONPath](http://goessner.net/articles/JsonPath/), which will match a child at any depth (see examples below).
|
|
||||||
|
|
||||||
If your keys have keys that include `.` or `*` etc, use an array instead.
|
|
||||||
`['row', true, /^doc/]`.
|
|
||||||
|
|
||||||
If you use an array, `RegExp`s, booleans, and/or functions. The `..` operator is also available in array representation, using `{recurse: true}`.
|
|
||||||
any object that matches the path will be emitted as 'data' (and `pipe`d down stream)
|
|
||||||
|
|
||||||
If `path` is empty or null, no 'data' events are emitted.
|
|
||||||
|
|
||||||
### Examples
|
|
||||||
|
|
||||||
query a couchdb view:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
curl -sS localhost:5984/tests/_all_docs&include_docs=true
|
|
||||||
```
|
|
||||||
you will get something like this:
|
|
||||||
|
|
||||||
``` js
|
|
||||||
{"total_rows":129,"offset":0,"rows":[
|
|
||||||
{ "id":"change1_0.6995461115147918"
|
|
||||||
, "key":"change1_0.6995461115147918"
|
|
||||||
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
|
|
||||||
, "doc":{
|
|
||||||
"_id": "change1_0.6995461115147918"
|
|
||||||
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
|
|
||||||
},
|
|
||||||
{ "id":"change2_0.6995461115147918"
|
|
||||||
, "key":"change2_0.6995461115147918"
|
|
||||||
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
|
|
||||||
, "doc":{
|
|
||||||
"_id":"change2_0.6995461115147918"
|
|
||||||
, "_rev":"1-13677d36b98c0c075145bb8975105153"
|
|
||||||
, "hello":2
|
|
||||||
}
|
|
||||||
},
|
|
||||||
]}
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
we are probably most interested in the `rows.*.doc`
|
|
||||||
|
|
||||||
create a `Stream` that parses the documents from the feed like this:
|
|
||||||
|
|
||||||
``` js
|
|
||||||
var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
|
|
||||||
|
|
||||||
stream.on('data', function(data) {
|
|
||||||
console.log('received:', data);
|
|
||||||
});
|
|
||||||
```
|
|
||||||
awesome!
|
|
||||||
|
|
||||||
### recursive patterns (..)
|
|
||||||
|
|
||||||
`JSONStream.parse('docs..value')`
|
|
||||||
(or `JSONStream.parse(['docs', {recurse: true}, 'value'])` using an array)
|
|
||||||
will emit every `value` object that is a child, grand-child, etc. of the
|
|
||||||
`docs` object. In this example, it will match exactly 5 times at various depth
|
|
||||||
levels, emitting 0, 1, 2, 3 and 4 as results.
|
|
||||||
|
|
||||||
```js
|
|
||||||
{
|
|
||||||
"total": 5,
|
|
||||||
"docs": [
|
|
||||||
{
|
|
||||||
"key": {
|
|
||||||
"value": 0,
|
|
||||||
"some": "property"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"value": 1},
|
|
||||||
{"value": 2},
|
|
||||||
{"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
|
|
||||||
{"value": 4}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## JSONStream.parse(pattern, map)
|
|
||||||
|
|
||||||
provide a function that can be used to map or filter
|
|
||||||
the json output. `map` is passed the value at that node of the pattern,
|
|
||||||
if `map` return non-nullish (anything but `null` or `undefined`)
|
|
||||||
that value will be emitted in the stream. If it returns a nullish value,
|
|
||||||
nothing will be emitted.
|
|
||||||
|
|
||||||
## JSONStream.stringify(open, sep, close)
|
|
||||||
|
|
||||||
Create a writable stream.
|
|
||||||
|
|
||||||
you may pass in custom `open`, `close`, and `seperator` strings.
|
|
||||||
But, by default, `JSONStream.stringify()` will create an array,
|
|
||||||
(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
|
|
||||||
|
|
||||||
If you call `JSONStream.stringify(false)`
|
|
||||||
the elements will only be seperated by a newline.
|
|
||||||
|
|
||||||
If you only write one item this will be valid JSON.
|
|
||||||
|
|
||||||
If you write many items,
|
|
||||||
you can use a `RegExp` to split it into valid chunks.
|
|
||||||
|
|
||||||
## JSONStream.stringifyObject(open, sep, close)
|
|
||||||
|
|
||||||
Very much like `JSONStream.stringify`,
|
|
||||||
but creates a writable stream for objects instead of arrays.
|
|
||||||
|
|
||||||
Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
|
|
||||||
|
|
||||||
When you `.write()` to the stream you must supply an array with `[ key, data ]`
|
|
||||||
as the first argument.
|
|
||||||
|
|
||||||
## unix tool
|
|
||||||
|
|
||||||
query npm to see all the modules that browserify has ever depended on.
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
curl https://registry.npmjs.org/browserify | JSONStream 'versions.*.dependencies'
|
|
||||||
```
|
|
||||||
|
|
||||||
## numbers
|
|
||||||
|
|
||||||
There are occasional problems parsing and unparsing very precise numbers.
|
|
||||||
|
|
||||||
I have opened an issue here:
|
|
||||||
|
|
||||||
https://github.com/creationix/jsonparse/issues/2
|
|
||||||
|
|
||||||
+1
|
|
||||||
|
|
||||||
## Acknowlegements
|
|
||||||
|
|
||||||
this module depends on https://github.com/creationix/jsonparse
|
|
||||||
by Tim Caswell
|
|
||||||
and also thanks to Florent Jaby for teaching me about parsing with:
|
|
||||||
https://github.com/Floby/node-json-streams
|
|
||||||
|
|
||||||
## license
|
|
||||||
|
|
||||||
Dual-licensed under the MIT License or the Apache License, version 2.0
|
|
41
src/node_modules/JSONStream/test/bool.js
generated
vendored
41
src/node_modules/JSONStream/test/bool.js
generated
vendored
@ -1,41 +0,0 @@
|
|||||||
|
|
||||||
var fs = require ('fs')
|
|
||||||
, join = require('path').join
|
|
||||||
, file = join(__dirname, 'fixtures','all_npm.json')
|
|
||||||
, JSONStream = require('../')
|
|
||||||
, it = require('it-is').style('colour')
|
|
||||||
|
|
||||||
function randomObj () {
|
|
||||||
return (
|
|
||||||
Math.random () < 0.4
|
|
||||||
? {hello: 'eonuhckmqjk',
|
|
||||||
whatever: 236515,
|
|
||||||
lies: true,
|
|
||||||
nothing: [null],
|
|
||||||
// stuff: [Math.random(),Math.random(),Math.random()]
|
|
||||||
}
|
|
||||||
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
var expected = []
|
|
||||||
, stringify = JSONStream.stringify()
|
|
||||||
, es = require('event-stream')
|
|
||||||
, stringified = ''
|
|
||||||
, called = 0
|
|
||||||
, count = 10
|
|
||||||
, ended = false
|
|
||||||
|
|
||||||
while (count --)
|
|
||||||
expected.push(randomObj())
|
|
||||||
|
|
||||||
es.connect(
|
|
||||||
es.readArray(expected),
|
|
||||||
stringify,
|
|
||||||
JSONStream.parse([true]),
|
|
||||||
es.writeArray(function (err, lines) {
|
|
||||||
|
|
||||||
it(lines).has(expected)
|
|
||||||
console.error('PASSED')
|
|
||||||
})
|
|
||||||
)
|
|
18
src/node_modules/JSONStream/test/browser.js
generated
vendored
18
src/node_modules/JSONStream/test/browser.js
generated
vendored
@ -1,18 +0,0 @@
|
|||||||
var test = require('tape')
|
|
||||||
var JSONStream = require('../')
|
|
||||||
var testData = '{"rows":[{"hello":"world"}, {"foo": "bar"}]}'
|
|
||||||
|
|
||||||
test('basic parsing', function (t) {
|
|
||||||
t.plan(2)
|
|
||||||
var parsed = JSONStream.parse("rows.*")
|
|
||||||
var parsedKeys = {}
|
|
||||||
parsed.on('data', function(match) {
|
|
||||||
parsedKeys[Object.keys(match)[0]] = true
|
|
||||||
})
|
|
||||||
parsed.on('end', function() {
|
|
||||||
t.equal(!!parsedKeys['hello'], true)
|
|
||||||
t.equal(!!parsedKeys['foo'], true)
|
|
||||||
})
|
|
||||||
parsed.write(testData)
|
|
||||||
parsed.end()
|
|
||||||
})
|
|
27
src/node_modules/JSONStream/test/destroy_missing.js
generated
vendored
27
src/node_modules/JSONStream/test/destroy_missing.js
generated
vendored
@ -1,27 +0,0 @@
|
|||||||
var fs = require ('fs');
|
|
||||||
var net = require('net');
|
|
||||||
var join = require('path').join;
|
|
||||||
var file = join(__dirname, 'fixtures','all_npm.json');
|
|
||||||
var JSONStream = require('../');
|
|
||||||
|
|
||||||
|
|
||||||
var server = net.createServer(function(client) {
|
|
||||||
var parser = JSONStream.parse([]);
|
|
||||||
parser.on('end', function() {
|
|
||||||
console.log('close')
|
|
||||||
console.error('PASSED');
|
|
||||||
server.close();
|
|
||||||
});
|
|
||||||
client.pipe(parser);
|
|
||||||
var n = 4
|
|
||||||
client.on('data', function () {
|
|
||||||
if(--n) return
|
|
||||||
client.end();
|
|
||||||
})
|
|
||||||
});
|
|
||||||
server.listen(9999);
|
|
||||||
|
|
||||||
|
|
||||||
var client = net.connect({ port : 9999 }, function() {
|
|
||||||
fs.createReadStream(file).pipe(client).on('data', console.log) //.resume();
|
|
||||||
});
|
|
29
src/node_modules/JSONStream/test/disabled/doubledot1.js
generated
vendored
29
src/node_modules/JSONStream/test/disabled/doubledot1.js
generated
vendored
@ -1,29 +0,0 @@
|
|||||||
var fs = require ('fs')
|
|
||||||
, join = require('path').join
|
|
||||||
, file = join(__dirname, 'fixtures','all_npm.json')
|
|
||||||
, JSONStream = require('../')
|
|
||||||
, it = require('it-is')
|
|
||||||
|
|
||||||
var expected = JSON.parse(fs.readFileSync(file))
|
|
||||||
, parser = JSONStream.parse('rows..rev')
|
|
||||||
, called = 0
|
|
||||||
, ended = false
|
|
||||||
, parsed = []
|
|
||||||
|
|
||||||
fs.createReadStream(file).pipe(parser)
|
|
||||||
|
|
||||||
parser.on('data', function (data) {
|
|
||||||
called ++
|
|
||||||
parsed.push(data)
|
|
||||||
})
|
|
||||||
|
|
||||||
parser.on('end', function () {
|
|
||||||
ended = true
|
|
||||||
})
|
|
||||||
|
|
||||||
process.on('exit', function () {
|
|
||||||
it(called).equal(expected.rows.length)
|
|
||||||
for (var i = 0 ; i < expected.rows.length ; i++)
|
|
||||||
it(parsed[i]).deepEqual(expected.rows[i].value.rev)
|
|
||||||
console.error('PASSED')
|
|
||||||
})
|
|
29
src/node_modules/JSONStream/test/disabled/doubledot2.js
generated
vendored
29
src/node_modules/JSONStream/test/disabled/doubledot2.js
generated
vendored
@ -1,29 +0,0 @@
|
|||||||
var fs = require ('fs')
|
|
||||||
, join = require('path').join
|
|
||||||
, file = join(__dirname, 'fixtures','depth.json')
|
|
||||||
, JSONStream = require('../')
|
|
||||||
, it = require('it-is')
|
|
||||||
|
|
||||||
var expected = JSON.parse(fs.readFileSync(file))
|
|
||||||
, parser = JSONStream.parse(['docs', {recurse: true}, 'value'])
|
|
||||||
, called = 0
|
|
||||||
, ended = false
|
|
||||||
, parsed = []
|
|
||||||
|
|
||||||
fs.createReadStream(file).pipe(parser)
|
|
||||||
|
|
||||||
parser.on('data', function (data) {
|
|
||||||
called ++
|
|
||||||
parsed.push(data)
|
|
||||||
})
|
|
||||||
|
|
||||||
parser.on('end', function () {
|
|
||||||
ended = true
|
|
||||||
})
|
|
||||||
|
|
||||||
process.on('exit', function () {
|
|
||||||
it(called).equal(5)
|
|
||||||
for (var i = 0 ; i < 5 ; i++)
|
|
||||||
it(parsed[i]).deepEqual(i)
|
|
||||||
console.error('PASSED')
|
|
||||||
})
|
|
44
src/node_modules/JSONStream/test/empty.js
generated
vendored
44
src/node_modules/JSONStream/test/empty.js
generated
vendored
@ -1,44 +0,0 @@
|
|||||||
var JSONStream = require('../')
|
|
||||||
, stream = require('stream')
|
|
||||||
, it = require('it-is')
|
|
||||||
|
|
||||||
var output = [ [], [] ]
|
|
||||||
|
|
||||||
var parser1 = JSONStream.parse(['docs', /./])
|
|
||||||
parser1.on('data', function(data) {
|
|
||||||
output[0].push(data)
|
|
||||||
})
|
|
||||||
|
|
||||||
var parser2 = JSONStream.parse(['docs', /./])
|
|
||||||
parser2.on('data', function(data) {
|
|
||||||
output[1].push(data)
|
|
||||||
})
|
|
||||||
|
|
||||||
var pending = 2
|
|
||||||
function onend () {
|
|
||||||
if (--pending > 0) return
|
|
||||||
it(output).deepEqual([
|
|
||||||
[], [{hello: 'world'}]
|
|
||||||
])
|
|
||||||
console.error('PASSED')
|
|
||||||
}
|
|
||||||
parser1.on('end', onend)
|
|
||||||
parser2.on('end', onend)
|
|
||||||
|
|
||||||
function makeReadableStream() {
|
|
||||||
var readStream = new stream.Stream()
|
|
||||||
readStream.readable = true
|
|
||||||
readStream.write = function (data) { this.emit('data', data) }
|
|
||||||
readStream.end = function (data) { this.emit('end') }
|
|
||||||
return readStream
|
|
||||||
}
|
|
||||||
|
|
||||||
var emptyArray = makeReadableStream()
|
|
||||||
emptyArray.pipe(parser1)
|
|
||||||
emptyArray.write('{"docs":[]}')
|
|
||||||
emptyArray.end()
|
|
||||||
|
|
||||||
var objectArray = makeReadableStream()
|
|
||||||
objectArray.pipe(parser2)
|
|
||||||
objectArray.write('{"docs":[{"hello":"world"}]}')
|
|
||||||
objectArray.end()
|
|
4030
src/node_modules/JSONStream/test/fixtures/all_npm.json
generated
vendored
4030
src/node_modules/JSONStream/test/fixtures/all_npm.json
generated
vendored
File diff suppressed because it is too large
Load Diff
15
src/node_modules/JSONStream/test/fixtures/depth.json
generated
vendored
15
src/node_modules/JSONStream/test/fixtures/depth.json
generated
vendored
@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"total": 5,
|
|
||||||
"docs": [
|
|
||||||
{
|
|
||||||
"key": {
|
|
||||||
"value": 0,
|
|
||||||
"some": "property"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"value": 1},
|
|
||||||
{"value": 2},
|
|
||||||
{"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
|
|
||||||
{"value": 4}
|
|
||||||
]
|
|
||||||
}
|
|
39
src/node_modules/JSONStream/test/fn.js
generated
vendored
39
src/node_modules/JSONStream/test/fn.js
generated
vendored
@ -1,39 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
var fs = require ('fs')
|
|
||||||
, join = require('path').join
|
|
||||||
, file = join(__dirname, 'fixtures','all_npm.json')
|
|
||||||
, JSONStream = require('../')
|
|
||||||
, it = require('it-is')
|
|
||||||
|
|
||||||
function fn (s) {
|
|
||||||
return !isNaN(parseInt(s, 10))
|
|
||||||
}
|
|
||||||
|
|
||||||
var expected = JSON.parse(fs.readFileSync(file))
|
|
||||||
, parser = JSONStream.parse(['rows', fn])
|
|
||||||
, called = 0
|
|
||||||
, ended = false
|
|
||||||
, parsed = []
|
|
||||||
|
|
||||||
fs.createReadStream(file).pipe(parser)
|
|
||||||
|
|
||||||
parser.on('data', function (data) {
|
|
||||||
called ++
|
|
||||||
it.has({
|
|
||||||
id: it.typeof('string'),
|
|
||||||
value: {rev: it.typeof('string')},
|
|
||||||
key:it.typeof('string')
|
|
||||||
})
|
|
||||||
parsed.push(data)
|
|
||||||
})
|
|
||||||
|
|
||||||
parser.on('end', function () {
|
|
||||||
ended = true
|
|
||||||
})
|
|
||||||
|
|
||||||
process.on('exit', function () {
|
|
||||||
it(called).equal(expected.rows.length)
|
|
||||||
it(parsed).deepEqual(expected.rows)
|
|
||||||
console.error('PASSED')
|
|
||||||
})
|
|
135
src/node_modules/JSONStream/test/gen.js
generated
vendored
135
src/node_modules/JSONStream/test/gen.js
generated
vendored
@ -1,135 +0,0 @@
|
|||||||
return // dont run this test for now since tape is weird and broken on 0.10
|
|
||||||
|
|
||||||
var fs = require('fs')
|
|
||||||
var JSONStream = require('../')
|
|
||||||
var file = process.argv[2] || '/tmp/JSONStream-test-large.json'
|
|
||||||
var size = Number(process.argv[3] || 100000)
|
|
||||||
var tape = require('tape')
|
|
||||||
// if (process.title !== 'browser') {
|
|
||||||
tape('out of mem', function (t) {
|
|
||||||
t.plan(1)
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
// Produces a random number between arg1 and arg2
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
var randomNumber = function (min, max) {
|
|
||||||
var number = Math.floor(Math.random() * (max - min + 1) + min);
|
|
||||||
return number;
|
|
||||||
};
|
|
||||||
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
// Produces a random string of a length between arg1 and arg2
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
var randomString = function (min, max) {
|
|
||||||
|
|
||||||
// add several spaces to increase chanses of creating 'words'
|
|
||||||
var chars = ' 0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
|
||||||
var result = '';
|
|
||||||
|
|
||||||
var randomLength = randomNumber(min, max);
|
|
||||||
|
|
||||||
for (var i = randomLength; i > 0; --i) {
|
|
||||||
result += chars[Math.round(Math.random() * (chars.length - 1))];
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
// Produces a random JSON document, as a string
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
var randomJsonDoc = function () {
|
|
||||||
|
|
||||||
var doc = {
|
|
||||||
"CrashOccurenceID": randomNumber(10000, 50000),
|
|
||||||
"CrashID": randomNumber(1000, 10000),
|
|
||||||
"SiteName": randomString(10, 25),
|
|
||||||
"MachineName": randomString(10, 25),
|
|
||||||
"Date": randomString(26, 26),
|
|
||||||
"ProcessDuration": randomString(18, 18),
|
|
||||||
"ThreadIdentityName": null,
|
|
||||||
"WindowsIdentityName": randomString(15, 40),
|
|
||||||
"OperatingSystemName": randomString(35, 65),
|
|
||||||
"DetailedExceptionInformation": randomString(100, 800)
|
|
||||||
};
|
|
||||||
|
|
||||||
doc = JSON.stringify(doc);
|
|
||||||
doc = doc.replace(/\,/g, ',\n'); // add new lines after each attribute
|
|
||||||
return doc;
|
|
||||||
};
|
|
||||||
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
// generates test data
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
var generateTestData = function (cb) {
|
|
||||||
|
|
||||||
console.log('generating large data file...');
|
|
||||||
|
|
||||||
var stream = fs.createWriteStream(file, {
|
|
||||||
encoding: 'utf8'
|
|
||||||
});
|
|
||||||
|
|
||||||
var i = 0;
|
|
||||||
var max = size;
|
|
||||||
var writing = false
|
|
||||||
var split = ',\n';
|
|
||||||
var doc = randomJsonDoc();
|
|
||||||
stream.write('[');
|
|
||||||
|
|
||||||
function write () {
|
|
||||||
if(writing) return
|
|
||||||
writing = true
|
|
||||||
while(++i < max) {
|
|
||||||
if(Math.random() < 0.001)
|
|
||||||
console.log('generate..', i + ' / ' + size)
|
|
||||||
if(!stream.write(doc + split)) {
|
|
||||||
writing = false
|
|
||||||
return stream.once('drain', write)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stream.write(doc + ']')
|
|
||||||
stream.end();
|
|
||||||
console.log('END')
|
|
||||||
}
|
|
||||||
write()
|
|
||||||
stream.on('close', cb)
|
|
||||||
};
|
|
||||||
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
// Shows that parsing 100000 instances using JSONStream fails
|
|
||||||
//
|
|
||||||
// After several seconds, you will get this crash
|
|
||||||
// FATAL ERROR: JS Allocation failed - process out of memory
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
var testJSONStreamParse_causesOutOfMem = function (done) {
|
|
||||||
var items = 0
|
|
||||||
console.log('parsing data files using JSONStream...');
|
|
||||||
|
|
||||||
var parser = JSONStream.parse([true]);
|
|
||||||
var stream = fs.createReadStream(file);
|
|
||||||
stream.pipe(parser);
|
|
||||||
|
|
||||||
parser.on('data', function (data) {
|
|
||||||
items++
|
|
||||||
if(Math.random() < 0.01) console.log(items, '...')
|
|
||||||
});
|
|
||||||
|
|
||||||
parser.on('end', function () {
|
|
||||||
t.equal(items, size)
|
|
||||||
});
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
// main
|
|
||||||
//////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
fs.stat(file, function (err, stat) {
|
|
||||||
console.log(stat)
|
|
||||||
if(err)
|
|
||||||
generateTestData(testJSONStreamParse_causesOutOfMem);
|
|
||||||
else
|
|
||||||
testJSONStreamParse_causesOutOfMem()
|
|
||||||
})
|
|
||||||
|
|
||||||
})
|
|
||||||
|
|
||||||
// }
|
|
20
src/node_modules/JSONStream/test/issues.js
generated
vendored
20
src/node_modules/JSONStream/test/issues.js
generated
vendored
@ -1,20 +0,0 @@
|
|||||||
var JSONStream = require('../');
|
|
||||||
var test = require('tape')
|
|
||||||
|
|
||||||
test('#66', function (t) {
|
|
||||||
var error = 0;
|
|
||||||
var stream = JSONStream
|
|
||||||
.parse()
|
|
||||||
.on('error', function (err) {
|
|
||||||
t.ok(err);
|
|
||||||
error++;
|
|
||||||
})
|
|
||||||
.on('end', function () {
|
|
||||||
t.ok(error === 1);
|
|
||||||
t.end();
|
|
||||||
});
|
|
||||||
|
|
||||||
stream.write('["foo":bar[');
|
|
||||||
stream.end();
|
|
||||||
|
|
||||||
});
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user