diff --git a/.babelrc b/.babelrc index d4d8656..9e3d274 100644 --- a/.babelrc +++ b/.babelrc @@ -1,7 +1,6 @@ { "presets": [ "es2015", - "stage-2", - "react" + "stage-2" ] } diff --git a/.eslintrc b/.eslintrc index 693d1aa..193ff15 100644 --- a/.eslintrc +++ b/.eslintrc @@ -1,11 +1,18 @@ { "env": { "node": true, - "mocha": true, + "jest": true }, "extends": "airbnb", "parser": "babel-eslint", "rules": { - "camelcase": 0 + "camelcase": 0, + "arrow-body-style": 0, + "class-methods-use-this": 0, + "import/prefer-default-export": 0, + "import/no-extraneous-dependencies": 0, + "import/imports-first": 0, + "no-use-before-define": 0, + "no-underscore-dangle": 0 } } diff --git a/.gitignore b/.gitignore index e67fcdf..6fded33 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,9 @@ node_modules .DS_Store dev.sqlite3 +test.sqlite3 .env .idea/ +yarn.lock +yarn-error.log +*.tgz \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index b9879cc..698c3cb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,8 @@ language: node_js node_js: - "6" - - "5" - - "4" +notifications: + email: false +script: + - npm run test:setup + - npm test diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..67ad692 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,50 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Launch", + "type": "node", + "request": "launch", + "program": "${workspaceRoot}/api/index.js", + "stopOnEntry": false, + "args": [ + "--watch", + "api", + "--exec", + "node_modules/.bin/babel-node" + ], + "cwd": "${workspaceRoot}", + "preLaunchTask": null, + "runtimeExecutable": "${workspaceRoot}/node_modules/.bin/nodemon", + "runtimeArgs": [ + ], + "env": { + "NODE_ENV": "development" + }, + "console": "internalConsole", + "sourceMaps": true, + "outFiles": [] + }, + { + "name": "Attach", + "type": "node", + "request": "attach", + "port": 5858, + "address": "localhost", + "restart": false, + "sourceMaps": false, + "outFiles": [], + "localRoot": "${workspaceRoot}", + "remoteRoot": null + }, + { + "name": "Attach to Process", + "type": "node", + "request": "attach", + "processId": "${command.PickProcess}", + "port": 5858, + "sourceMaps": false, + "outFiles": [] + } + ] +} \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..651c071 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,82 @@ +# Apollo Contributor Guide + +Excited about Apollo and want to make it better? We’re excited too! + +Apollo is a community of developers just like you, striving to create the best tools and libraries around GraphQL. We welcome anyone who wants to contribute or provide constructive feedback, no matter the age or level of experience. If you want to help but don't know where to start, let us know, and we'll find something for you. + +Oh, and if you haven't already, sign up for the [Apollo Slack](http://www.apollodata.com/#slack). + +Here are some ways to contribute to the project, from easiest to most difficult: + +* [Reporting bugs](#reporting-bugs) +* [Improving the documentation](#improving-the-documentation) +* [Responding to issues](#responding-to-issues) +* [Small bug fixes](#small-bug-fixes) +* [Suggesting features](#suggesting-features) +* [Big pull requests](#big-prs) + +## Issues + +### Reporting bugs + +If you encounter a bug, please file an issue on GitHub via the repository of the sub-project you think contains the bug. If an issue you have is already reported, please add additional information or add a 👍 reaction to indicate your agreement. + +While we will try to be as helpful as we can on any issue reported, please include the following to maximize the chances of a quick fix: + +1. **Intended outcome:** What you were trying to accomplish when the bug occurred, and as much code as possible related to the source of the problem. +2. **Actual outcome:** A description of what actually happened, including a screenshot or copy-paste of any related error messages, logs, or other output that might be related. Places to look for information include your browser console, server console, and network logs. Please avoid non-specific phrases like “didn’t work” or “broke”. +3. **How to reproduce the issue:** Instructions for how the issue can be reproduced by a maintainer or contributor. Be as specific as possible, and only mention what is necessary to reproduce the bug. If possible, try to isolate the exact circumstances in which the bug occurs and avoid speculation over what the cause might be. + +Creating a good reproduction really helps contributors investigate and resolve your issue quickly. In many cases, the act of creating a minimal reproduction illuminates that the source of the bug was somewhere outside the library in question, saving time and effort for everyone. + +### Improving the documentation + +Improving the documentation, examples, and other open source content can be the easiest way to contribute to the library. If you see a piece of content that can be better, open a PR with an improvement, no matter how small! If you would like to suggest a big change or major rewrite, we’d love to hear your ideas but please open an issue for discussion before writing the PR. + +### Responding to issues + +In addition to reporting issues, a great way to contribute to Apollo is to respond to other peoples' issues and try to identify the problem or help them work around it. If you’re interested in taking a more active role in this process, please go ahead and respond to issues. And don't forget to say "Hi" on Apollo Slack! + +### Small bug fixes + +For a small bug fix change (less than 20 lines of code changed), feel free to open a pull request. We’ll try to merge it as fast as possible and ideally publish a new release on the same day. The only requirement is, make sure you also add a test that verifies the bug you are trying to fix. + +### Suggesting features + +Most of the features in Apollo came from suggestions by you, the community! We welcome any ideas about how to make Apollo better for your use case. Unless there is overwhelming demand for a feature, it might not get implemented immediately, but please include as much information as possible that will help people have a discussion about your proposal: + +1. **Use case:** What are you trying to accomplish, in specific terms? Often, there might already be a good way to do what you need and a new feature is unnecessary, but it’s hard to know without information about the specific use case. +2. **Could this be a plugin?** In many cases, a feature might be too niche to be included in the core of a library, and is better implemented as a companion package. If there isn’t a way to extend the library to do what you want, could we add additional plugin APIs? It’s important to make the case for why a feature should be part of the core functionality of the library. +3. **Is there a workaround?** Is this a more convenient way to do something that is already possible, or is there some blocker that makes a workaround unfeasible? + +Feature requests will be labeled as such, and we encourage using GitHub issues as a place to discuss new features and possible implementation designs. Please refrain from submitting a pull request to implement a proposed feature until there is consensus that it should be included. This way, you can avoid putting in work that can’t be merged in. + +Once there is a consensus on the need for a new feature, proceed as listed below under “Big PRs”. + +## Big PRs + +This includes: + +- Big bug fixes +- New features + +For significant changes to a repository, it’s important to settle on a design before starting on the implementation. This way, we can make sure that major improvements get the care and attention they deserve. Since big changes can be risky and might not always get merged, it’s good to reduce the amount of possible wasted effort by agreeing on an implementation design/plan first. + +1. **Open an issue.** Open an issue about your bug or feature, as described above. +2. **Reach consensus.** Some contributors and community members should reach an agreement that this feature or bug is important, and that someone should work on implementing or fixing it. +3. **Agree on intended behavior.** On the issue, reach an agreement about the desired behavior. In the case of a bug fix, it should be clear what it means for the bug to be fixed, and in the case of a feature, it should be clear what it will be like for developers to use the new feature. +4. **Agree on implementation plan.** Write a plan for how this feature or bug fix should be implemented. What modules need to be added or rewritten? Should this be one pull request or multiple incremental improvements? Who is going to do each part? +5. **Submit PR.** In the case where multiple dependent patches need to be made to implement the change, only submit one at a time. Otherwise, the others might get stale while the first is reviewed and merged. Make sure to avoid “while we’re here” type changes - if something isn’t relevant to the improvement at hand, it should be in a separate PR; this especially includes code style changes of unrelated code. +6. **Review.** At least one core contributor should sign off on the change before it’s merged. Look at the “code review” section below to learn about factors are important in the code review. If you want to expedite the code being merged, try to review your own code first! +7. **Merge and release!** + +### Code review guidelines + +It’s important that every piece of code in Apollo packages is reviewed by at least one core contributor familiar with that codebase. Here are some things we look for: + +1. **Required CI checks pass.** This is a prerequisite for the review, and it is the PR author's responsibility. As long as the tests don’t pass, the PR won't get reviewed. +2. **Simplicity.** Is this the simplest way to achieve the intended goal? If there are too many files, redundant functions, or complex lines of code, suggest a simpler way to do the same thing. In particular, avoid implementing an overly general solution when a simple, small, and pragmatic fix will do. +3. **Testing.** Do the tests ensure this code won’t break when other stuff changes around it? When it does break, will the tests added help us identify which part of the library has the problem? Did we cover an appropriate set of edge cases? Look at the test coverage report if there is one. Are all significant code paths in the new code exercised at least once? +4. **No unnecessary or unrelated changes.** PRs shouldn’t come with random formatting changes, especially in unrelated parts of the code. If there is some refactoring that needs to be done, it should be in a separate PR from a bug fix or feature, if possible. +5. **Code has appropriate comments.** Code should be commented, or written in a clear “self-documenting” way. +6. **Idiomatic use of the language.** In TypeScript, make sure the typings are specific and correct. In ES2015, make sure to use imports rather than require and const instead of var, etc. Ideally a linter enforces a lot of this, but use your common sense and follow the style of the surrounding code. diff --git a/README.md b/README.md index 649d869..0a5a727 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,10 @@ The Apollo Server backend shared by all Apollo client example apps. +Interact with the API yourself at [http://api.githunt.com/graphiql](http://api.githunt.com/graphiql). + [![Get on Slack](https://img.shields.io/badge/slack-join-orange.svg)](http://www.apollostack.com/#slack) -[![Build Status](https://travis-ci.org/apollostack/GitHunt-server.svg?branch=master)](https://travis-ci.org/apollostack/GitHunt-server) +[![Build Status](https://travis-ci.org/apollographql/GitHunt-API.svg?branch=master)](https://travis-ci.org/apollographql/GitHunt-API) Demonstrates: @@ -14,59 +16,42 @@ Please submit a pull request if you see anything that can be improved! ## Running the server -### 1. Install Node/npm - -Make sure you have Node.js 4 or newer installed. - -### 2. Clone and install dependencies - -``` -git clone https://github.com/apollostack/GitHunt.git -cd GitHunt -npm install -``` - -### 3. Run Migrations - -Set up the SQLite database and run migrations/seed data with the following commands: - -``` -npm run migrate -npm run seed -``` - -### 4. Get GitHub API keys - -- Go to [OAuth applications > Developer applications](https://github.com/settings/developers) in GitHub settings -- Click 'Register a new application' button -- Register your application like below -- Click 'Register application' button - -![Github OAuth](screenshots/github-oath-setup.png) - -On the following page, grab: +1. **Install Node/npm.** Make sure you have Node.js 4 or newer installed. +2. **Clone and install dependencies.** + Run the following commands: -- Client ID -- Client Secret + ``` + git clone https://github.com/apollostack/GitHunt-API.git + cd GitHunt-API + npm install + ``` -![OAuth Key](screenshots/github-oauth-keys.png) +3. **Run migrations.** Set up the SQLite database and run migrations/seed data with the following commands: -### 5. Add Environment Variables -Set your Client ID and Client Secret Environment variables: + ``` + npm run migrate + npm run seed + ``` -``` -export GITHUB_CLIENT_ID="your Client ID" -export GITHUB_CLIENT_SECRET="your Client Secret" -``` +4. **Get GitHub API keys.** + 1. Go to [OAuth applications > Developer applications](https://github.com/settings/developers) in GitHub settings + 2. Click 'Register a new application' button + 3. Register your application like below + 4. Click 'Register application' button at the bottom. [It should look like this screenshot of the app setup page.](screenshots/github-oath-setup.png) + 5. On the following page, grab the **Client ID** and **Client Secret**, as indicated in [this screenshot of the GitHub OAuth keys page.](screenshots/github-oauth-keys.png) -Or you can use `dotenv`. +5. **Add Environment Variables.** Set your Client ID and Client Secret Environment variables in the terminal like this: + ``` + export GITHUB_CLIENT_ID="your Client ID" + export GITHUB_CLIENT_SECRET="your Client Secret" + ``` -`cp .env.default .env` and edit with your Github keys. + Or you can use `dotenv`, to do this `cp .env.default .env` and edit with your Github keys. -### 6. Run the app +6. **Run the app.** -``` -npm start -``` + ``` + npm run dev + ``` -- Open graphiql at http://localhost:3010/graphiql +7. **Open the app.** Open http://localhost:3010/ to see what to do next. diff --git a/__mocks__/request-promise.js b/__mocks__/request-promise.js new file mode 100644 index 0000000..153df48 --- /dev/null +++ b/__mocks__/request-promise.js @@ -0,0 +1,57 @@ +let requestQueue = []; + +export default function rp(requestOptions) { + // Ensure we expected to get more requests + expect(requestQueue.length).not.toBe(0); + + const nextRequest = requestQueue.shift(); + // Ensure this is the request we expected + expect(requestOptions).toEqual(nextRequest.options); + + return new Promise((resolve, reject) => { + if (nextRequest.result) { + resolve(nextRequest.result); + } else if (nextRequest.error) { + reject(nextRequest.error); + } else { + throw new Error('Mocked request must have result or error.'); + } + }); +} + +function pushMockRequest({ options, result, error }) { + const defaultOptions = { + json: true, + headers: { + 'user-agent': 'GitHunt', + }, + resolveWithFullResponse: true, + }; + const { uri, ...rest } = options; + + const url = `https://api.github.com${uri}`; + + requestQueue.push({ + options: { + ...defaultOptions, + ...rest, + uri: url, + }, + result, + error, + }); +} + +function flushRequestQueue() { + requestQueue = []; +} + +function noRequestsLeft() { + expect(requestQueue.length).toBe(0); +} + +rp.__pushMockRequest = pushMockRequest; // eslint-disable-line no-underscore-dangle +rp.__flushRequestQueue = flushRequestQueue; // eslint-disable-line no-underscore-dangle +rp.__noRequestsLeft = noRequestsLeft; // eslint-disable-line no-underscore-dangle + +rp.actual = require.requireActual('request-promise'); diff --git a/__tests__/__snapshots__/basic.js.snap b/__tests__/__snapshots__/basic.js.snap new file mode 100644 index 0000000..0e79da7 --- /dev/null +++ b/__tests__/__snapshots__/basic.js.snap @@ -0,0 +1,65 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`accepts a query 1`] = ` +Object { + "data": Object { + "feed": Array [ + Object { + "postedBy": Object { + "login": "stubailo", + }, + "repository": Object { + "name": "apollo-client", + "owner": Object { + "login": "apollographql", + }, + }, + }, + Object { + "postedBy": Object { + "login": "helfer", + }, + "repository": Object { + "name": "graphql-server", + "owner": Object { + "login": "apollographql", + }, + }, + }, + Object { + "postedBy": Object { + "login": "tmeasday", + }, + "repository": Object { + "name": "meteor", + "owner": Object { + "login": "meteor", + }, + }, + }, + Object { + "postedBy": Object { + "login": "Slava", + }, + "repository": Object { + "name": "bootstrap", + "owner": Object { + "login": "twbs", + }, + }, + }, + Object { + "postedBy": Object { + "login": "Slava", + }, + "repository": Object { + "name": "d3", + "owner": Object { + "login": "d3", + }, + }, + }, + ], + }, +} +`; diff --git a/__tests__/basic.js b/__tests__/basic.js new file mode 100644 index 0000000..53a9877 --- /dev/null +++ b/__tests__/basic.js @@ -0,0 +1,92 @@ +import rp from 'request-promise'; +import casual from 'casual'; + +import { run } from '../api/server'; + +const testPort = 6789; +const endpointUrl = `http://localhost:${testPort}/graphql`; + +let server; +beforeAll(() => { + server = run({ PORT: testPort }); +}); + +it('accepts a query', async () => { + casual.seed(123); + + [ + ['apollographql/apollo-client', 'stubailo'], + ['apollographql/graphql-server', 'helfer'], + ['meteor/meteor', 'tmeasday'], + ['twbs/bootstrap', 'Slava'], + ['d3/d3', 'Slava'], + ].forEach(([full_name, postedBy]) => { + // First, it will request the repository; + rp.__pushMockRequest({ + options: { + uri: `/repos/${full_name}`, + }, + result: { + headers: { + etag: casual.string, + }, + body: { + name: full_name.split('/')[1], + full_name, + description: casual.sentence, + html_url: casual.url, + stargazers_count: casual.integer(0), + open_issues_count: casual.integer(0), + owner: { + login: full_name.split('/')[0], + avatar_url: casual.url, + html_url: casual.url, + }, + }, + }, + }); + + // Then the user who posted it + rp.__pushMockRequest({ + options: { + uri: `/users/${postedBy}`, + }, + result: { + headers: { + etag: casual.string, + }, + body: { + login: postedBy, + }, + }, + }); + }); + + const result = await fetchGraphQL(` + { + feed (type: NEW, limit: 5) { + repository { + owner { login } + name + } + + postedBy { login } + } + } + `); + + expect(result).toMatchSnapshot(); +}); + +afterAll(() => { + server.close(); + server = null; +}); + +function fetchGraphQL(query, variables) { + return rp.actual(endpointUrl, { + method: 'post', + body: { query, variables }, + json: true, + }); +} diff --git a/api/config.js b/api/config.js new file mode 100644 index 0000000..1446a51 --- /dev/null +++ b/api/config.js @@ -0,0 +1,10 @@ +export default { + // If set to to true, GitHunt will use `extractgql` in order to + // map query ids received from the client to GraphQL documents. + // + // Note that the same option must be enabled on the client + // and the extracted_queries.json file in both the client and API server + // must be the same. + persistedQueries: false, + sessionStoreSecret: 'your secret', +}; diff --git a/api/github/connector.js b/api/github/connector.js index 1ecc878..ecf58ee 100644 --- a/api/github/connector.js +++ b/api/github/connector.js @@ -39,8 +39,6 @@ export class GitHubConnector { }; } - // TODO: pass GitHub API key - return Promise.all(urls.map((url) => { const cachedRes = eTagCache[url]; diff --git a/api/github/connector.test.js b/api/github/connector.test.js index 60d0dde..6cb1d94 100644 --- a/api/github/connector.test.js +++ b/api/github/connector.test.js @@ -1,69 +1,24 @@ -import { assert } from 'chai'; -import { GitHubConnector } from './connector'; - -let requestQueue = []; - -function mockRequestPromise(requestOptions) { - // Ensure we expected to get more requests - assert.notEqual(requestQueue.length, 0); - - const nextRequest = requestQueue.shift(); - // Ensure this is the request we expected - assert.deepEqual(requestOptions, nextRequest.options); +import rp from 'request-promise'; - return new Promise((resolve, reject) => { - if (nextRequest.result) { - resolve(nextRequest.result); - } else if (nextRequest.error) { - reject(nextRequest.error); - } else { - throw new Error('Mocked request must have result or error.'); - } - }); -} - -function pushMockRequest({ options, result, error }) { - const defaultOptions = { - json: true, - headers: { - 'user-agent': 'GitHunt', - }, - resolveWithFullResponse: true, - }; - const { uri, ...rest } = options; - - const url = `https://api.github.com${uri}`; - - requestQueue.push({ - options: { - ...defaultOptions, - ...rest, - uri: url, - }, - result, - error, - }); -} - -GitHubConnector.mockRequestPromise = mockRequestPromise; +import { GitHubConnector } from './connector'; describe('GitHub connector', () => { beforeEach(() => { - requestQueue = []; + rp.__flushRequestQueue(); }); afterEach(() => { - assert.equal(requestQueue.length, 0); + rp.__noRequestsLeft(); }); it('can be constructed', () => { - assert.isOk(new GitHubConnector()); + expect(new GitHubConnector()).toBeTruthy(); }); it('can load one endpoint', () => { const connector = new GitHubConnector(); - pushMockRequest({ + rp.__pushMockRequest({ options: { uri: '/endpoint' }, result: { headers: {}, @@ -72,14 +27,14 @@ describe('GitHub connector', () => { }); return connector.get('/endpoint').then((result) => { - assert.deepEqual(result, { id: 1 }); + expect(result).toEqual({ id: 1 }); }); }); it('fetches each endpoint only once per instance', () => { const connector = new GitHubConnector(); - pushMockRequest({ + rp.__pushMockRequest({ options: { uri: '/endpoint', }, @@ -91,7 +46,7 @@ describe('GitHub connector', () => { return connector.get('/endpoint') .then((result) => { - assert.deepEqual(result, { id: 1 }); + expect(result).toEqual({ id: 1 }); }) .then(() => ( // This get call doesn't actually call the API - note that we only @@ -99,7 +54,7 @@ describe('GitHub connector', () => { connector.get('/endpoint') )) .then((result) => { - assert.deepEqual(result, { id: 1 }); + expect(result).toEqual({ id: 1 }); }); }); @@ -109,7 +64,7 @@ describe('GitHub connector', () => { clientSecret: 'fake_client_secret', }); - pushMockRequest({ + rp.__pushMockRequest({ options: { uri: '/endpoint', qs: { @@ -126,7 +81,7 @@ describe('GitHub connector', () => { }); return connector.get('/endpoint').then((result) => { - assert.deepEqual(result, { id: 1 }); + expect(result).toEqual({ id: 1 }); }); }); @@ -134,7 +89,7 @@ describe('GitHub connector', () => { const connector = new GitHubConnector(); const etag = 'etag'; - pushMockRequest({ + rp.__pushMockRequest({ options: { uri: '/endpoint', }, @@ -150,7 +105,7 @@ describe('GitHub connector', () => { const connector2 = new GitHubConnector(); - pushMockRequest({ + rp.__pushMockRequest({ options: { uri: '/endpoint', headers: { @@ -169,7 +124,7 @@ describe('GitHub connector', () => { return connector.get('/endpoint') .then(() => connector2.get('/endpoint')) .then((result) => { - assert.deepEqual(result, { id: 1 }); + expect(result).toEqual({ id: 1 }); }); }); }); diff --git a/api/github/schema.js b/api/github/schema.js index 2865673..a3cc492 100644 --- a/api/github/schema.js +++ b/api/github/schema.js @@ -1,25 +1,40 @@ import { property } from 'lodash'; export const schema = [` -# This uses the exact field names returned by the GitHub API for simplicity +# A repository object from the GitHub API. This uses the exact field names returned by the +# GitHub API for simplicity, even though the convention for GraphQL is usually to camel case. type Repository { + # Just the name of the repository, e.g. GitHunt-API name: String! + + # The full name of the repository with the username, e.g. apollostack/GitHunt-API full_name: String! + + # The description of the repository description: String + + # The link to the repository on GitHub html_url: String! + + # The number of people who have starred this repository on GitHub stargazers_count: Int! - open_issues_count: Int - # We should investigate how best to represent dates - created_at: String! + # The number of open issues on this repository on GitHub + open_issues_count: Int + # The owner of this repository on GitHub, e.g. apollostack owner: User } -# Uses exact field names from GitHub for simplicity +# A user object from the GitHub API. This uses the exact field names returned from the GitHub API. type User { + # The name of the user, e.g. apollostack login: String! + + # The URL to a directly embeddable image for this user's avatar avatar_url: String! + + # The URL of this user's GitHub page html_url: String! } `]; diff --git a/api/githubKeys.js b/api/githubKeys.js new file mode 100644 index 0000000..820eac3 --- /dev/null +++ b/api/githubKeys.js @@ -0,0 +1,8 @@ +import dotenv from 'dotenv'; + +dotenv.config({ silent: true }); + +export const { + GITHUB_CLIENT_ID, + GITHUB_CLIENT_SECRET, +} = process.env; diff --git a/api/githubLogin.js b/api/githubLogin.js new file mode 100644 index 0000000..62281d5 --- /dev/null +++ b/api/githubLogin.js @@ -0,0 +1,63 @@ +import session from 'express-session'; +import passport from 'passport'; +import { Strategy as GitHubStrategy } from 'passport-github'; +import knex from './sql/connector'; +import config from './config'; + +import { + GITHUB_CLIENT_ID, + GITHUB_CLIENT_SECRET, +} from './githubKeys'; + +const KnexSessionStore = require('connect-session-knex')(session); + +const store = new KnexSessionStore({ + knex, +}); + +export function setUpGitHubLogin(app) { + if (!GITHUB_CLIENT_ID) { + console.warn('GitHub client ID not passed; login won\'t work.'); // eslint-disable-line no-console + return null; + } + + const gitHubStrategyOptions = { + clientID: GITHUB_CLIENT_ID, + clientSecret: GITHUB_CLIENT_SECRET, + callbackURL: process.env.NODE_ENV !== 'production' ? + 'http://localhost:3000/login/github/callback' : + 'http://www.githunt.com/login/github/callback', + }; + + passport.use(new GitHubStrategy(gitHubStrategyOptions, + (accessToken, refreshToken, profile, cb) => { + cb(null, profile); + })); + + passport.serializeUser((user, cb) => { cb(null, user); }); + passport.deserializeUser((obj, cb) => { cb(null, obj); }); + + app.use(session({ + secret: config.sessionStoreSecret, + resave: true, + saveUninitialized: true, + store, + })); + + app.use(passport.initialize()); + app.use(passport.session()); + + app.get('/login/github', + passport.authenticate('github')); + + app.get('/login/github/callback', + passport.authenticate('github', { failureRedirect: '/' }), + (req, res) => res.redirect('/')); + + app.get('/logout', (req, res) => { + req.logout(); + res.redirect('/'); + }); + + return store; +} diff --git a/api/index.html b/api/index.html new file mode 100644 index 0000000..4673411 --- /dev/null +++ b/api/index.html @@ -0,0 +1,37 @@ + + + + + Codestin Search App + + +

GitHunt API server

+

Thanks for downloading and running our example server app! This server doesn't include any UI code.

+

Try one of the following options:

+ +

+ Have any improvements in mind? File an issue or a PR about this app at + apollographql/GitHunt-API. +

+ + diff --git a/api/index.js b/api/index.js index b04318e..a1136c9 100644 --- a/api/index.js +++ b/api/index.js @@ -1,125 +1,3 @@ -import express from 'express'; -import session from 'express-session'; -import passport from 'passport'; -import { apolloExpress, graphiqlExpress } from 'apollo-server'; -import { makeExecutableSchema } from 'graphql-tools'; -import { Strategy as GitHubStrategy } from 'passport-github'; -import bodyParser from 'body-parser'; -import dotenv from 'dotenv'; -import knex from './sql/connector'; +import { run } from './server'; -const KnexSessionStore = require('connect-session-knex')(session); -const store = new KnexSessionStore({ - knex, -}); - -import { schema, resolvers } from './schema'; -import { GitHubConnector } from './github/connector'; -import { Repositories, Users } from './github/models'; -import { Entries, Comments } from './sql/models'; - -dotenv.config({ silent: true }); -let PORT = 3010; - -if (process.env.PORT) { - PORT = parseInt(process.env.PORT, 10) + 100; -} - -const { - GITHUB_CLIENT_ID, - GITHUB_CLIENT_SECRET, -} = process.env; - -const app = express(); - -app.use(session({ - secret: 'your secret', - resave: true, - saveUninitialized: true, - store, -})); - -app.use(passport.initialize()); -app.use(passport.session()); - -app.use(bodyParser.urlencoded({ extended: true })); -app.use(bodyParser.json()); - -app.use(express.static('dist')); - -app.get('/login/github', - passport.authenticate('github')); - -app.get('/login/github/callback', - passport.authenticate('github', { failureRedirect: '/' }), - (req, res) => res.redirect('/')); - -app.get('/logout', (req, res) => { - req.logout(); - res.redirect('/'); -}); - -const executableSchema = makeExecutableSchema({ - typeDefs: schema, - resolvers, -}); - -app.use('/graphql', apolloExpress((req) => { - // Get the query, the same way express-graphql does it - // https://github.com/graphql/express-graphql/blob/3fa6e68582d6d933d37fa9e841da5d2aa39261cd/src/index.js#L257 - const query = req.query.query || req.body.query; - if (query && query.length > 2000) { - // None of our app's queries are this long - // Probably indicates someone trying to send an overly expensive query - throw new Error('Query too large.'); - } - - let user; - if (req.user) { - // We get req.user from passport-github with some pretty oddly named fields, - // let's convert that to the fields in our schema, which match the GitHub - // API field names. - user = { - login: req.user.username, - html_url: req.user.profileUrl, - avatar_url: req.user.photos[0].value, - }; - } - - const gitHubConnector = new GitHubConnector({ - clientId: GITHUB_CLIENT_ID, - clientSecret: GITHUB_CLIENT_SECRET, - }); - - return { - schema: executableSchema, - context: { - user, - Repositories: new Repositories({ connector: gitHubConnector }), - Users: new Users({ connector: gitHubConnector }), - Entries: new Entries(), - Comments: new Comments(), - }, - }; -})); - -app.use('/graphiql', graphiqlExpress({ - endpointURL: '/graphql', -})); - -app.listen(PORT, () => console.log( // eslint-disable-line no-console - `API Server is now running on http://localhost:${PORT}` -)); - -const gitHubStrategyOptions = { - clientID: GITHUB_CLIENT_ID, - clientSecret: GITHUB_CLIENT_SECRET, - callbackURL: 'http://localhost:3000/login/github/callback', -}; - -passport.use(new GitHubStrategy(gitHubStrategyOptions, (accessToken, refreshToken, profile, cb) => { - cb(null, profile); -})); - -passport.serializeUser((user, cb) => cb(null, user)); -passport.deserializeUser((obj, cb) => cb(null, obj)); +run(process.env); diff --git a/api/schema.js b/api/schema.js index 9546f44..e0096fc 100644 --- a/api/schema.js +++ b/api/schema.js @@ -1,27 +1,49 @@ import { merge } from 'lodash'; +import { makeExecutableSchema } from 'graphql-tools'; +import { withFilter } from 'graphql-subscriptions'; + import { schema as gitHubSchema, resolvers as gitHubResolvers } from './github/schema'; import { schema as sqlSchema, resolvers as sqlResolvers } from './sql/schema'; +import { pubsub } from './subscriptions'; const rootSchema = [` -# To select the sort order of the feed + +# A list of options for the sort order of the feed enum FeedType { + # Sort by a combination of freshness and score, using Reddit's algorithm HOT + + # Newest entries first NEW + + # Highest score entries first TOP } type Query { - # For the home page, the offset arg is optional to get a new page of the feed - feed(type: FeedType!, offset: Int, limit: Int): [Entry] + # A feed of repository submissions + feed( + # The sort order for the feed + type: FeedType!, + + # The number of items to skip, for pagination + offset: Int, - # For the entry page - entry(repoFullName: String!): Entry + # The number of items to fetch starting from the offset, for pagination + limit: Int + ): [Entry] - # To display the current user on the submission page, and the navbar + # A single entry + entry( + # The full repository name from GitHub, e.g. "apollostack/GitHunt-API" + repoFullName: String! + ): Entry + + # Return the currently logged in user, or null if nobody is logged in currentUser: User } -# Type of vote +# The type of vote to record, when submitting a vote enum VoteType { UP DOWN @@ -29,39 +51,64 @@ enum VoteType { } type Mutation { - # Submit a new repository - submitRepository(repoFullName: String!): Entry - - # Vote on a repository - vote(repoFullName: String!, type: VoteType!): Entry + # Submit a new repository, returns the new submission + submitRepository( + # The full repository name from GitHub, e.g. "apollostack/GitHunt-API" + repoFullName: String! + ): Entry + + # Vote on a repository submission, returns the submission that was voted on + vote( + # The full repository name from GitHub, e.g. "apollostack/GitHunt-API" + repoFullName: String!, + + # The type of vote - UP, DOWN, or CANCEL + type: VoteType! + ): Entry + + # Comment on a repository, returns the new comment + submitComment( + # The full repository name from GitHub, e.g. "apollostack/GitHunt-API" + repoFullName: String!, + + # The text content for the new comment + commentContent: String! + ): Comment +} - # Comment on a repository - submitComment(repoFullName: String!, commentContent: String!): Comment +type Subscription { + # Subscription fires on every comment added + commentAdded(repoFullName: String!): Comment } schema { query: Query mutation: Mutation + subscription: Subscription } + `]; +const COMMENT_ADDED_TOPIC = 'commentAdded'; + const rootResolvers = { Query: { - feed(_, { type, offset, limit }, context) { - const protectedLimit = (limit < 1 || limit > 10) ? 10 : limit; + feed(root, { type, offset, limit }, context) { + // Ensure API consumer can only fetch 20 items at most + const protectedLimit = (limit < 1 || limit > 20) ? 20 : limit; return context.Entries.getForFeed(type, offset, protectedLimit); }, - entry(_, { repoFullName }, context) { + entry(root, { repoFullName }, context) { return context.Entries.getByRepoFullName(repoFullName); }, - currentUser(_, __, context) { + currentUser(root, args, context) { return context.user || null; }, }, Mutation: { - submitRepository(_, { repoFullName }, context) { - if (! context.user) { + submitRepository(root, { repoFullName }, context) { + if (!context.user) { throw new Error('Must be logged in to submit a repository.'); } @@ -77,7 +124,8 @@ const rootResolvers = { )) .then(() => context.Entries.getByRepoFullName(repoFullName)); }, - submitComment(_, { repoFullName, commentContent }, context) { + + submitComment(root, { repoFullName, commentContent }, context) { if (!context.user) { throw new Error('Must be logged in to submit a comment.'); } @@ -86,16 +134,20 @@ const rootResolvers = { context.Comments.submitComment( repoFullName, context.user.login, - commentContent + commentContent, ) )) - .then(([id]) => ( - context.Comments.getCommentById(id) - )); + .then(([id]) => context.Comments.getCommentById(id)) + .then((comment) => { + // publish subscription notification + pubsub.publish(COMMENT_ADDED_TOPIC, { commentAdded: comment }); + + return comment; + }); }, - vote(_, { repoFullName, type }, context) { - if (! context.user) { + vote(root, { repoFullName, type }, context) { + if (!context.user) { throw new Error('Must be logged in to vote.'); } @@ -108,13 +160,29 @@ const rootResolvers = { return context.Entries.voteForEntry( repoFullName, voteValue, - context.user.login + context.user.login, ).then(() => ( context.Entries.getByRepoFullName(repoFullName) )); }, }, + Subscription: { + commentAdded: { + subscribe: withFilter(() => pubsub.asyncIterator(COMMENT_ADDED_TOPIC), (payload, args) => { + return payload.commentAdded.repository_name === args.repoFullName; + }), + }, + }, }; -export const schema = [...rootSchema, ...gitHubSchema, ...sqlSchema]; -export const resolvers = merge(rootResolvers, gitHubResolvers, sqlResolvers); +// Put schema together into one array of schema strings +// and one map of resolvers, like makeExecutableSchema expects +const schema = [...rootSchema, ...gitHubSchema, ...sqlSchema]; +const resolvers = merge(rootResolvers, gitHubResolvers, sqlResolvers); + +const executableSchema = makeExecutableSchema({ + typeDefs: schema, + resolvers, +}); + +export default executableSchema; diff --git a/api/server.js b/api/server.js new file mode 100644 index 0000000..ec698be --- /dev/null +++ b/api/server.js @@ -0,0 +1,246 @@ +import path from 'path'; +import express from 'express'; +import cookie from 'cookie'; +import cookieParser from 'cookie-parser'; +import cors from 'cors'; +import { graphqlExpress, graphiqlExpress } from 'graphql-server-express'; +import OpticsAgent from 'optics-agent'; +import bodyParser from 'body-parser'; +import { invert, isString } from 'lodash'; +import { createServer } from 'http'; +import { SubscriptionServer } from 'subscriptions-transport-ws'; +import { execute, subscribe } from 'graphql'; + +import { + GITHUB_CLIENT_ID, + GITHUB_CLIENT_SECRET, +} from './githubKeys'; + +import { setUpGitHubLogin } from './githubLogin'; +import { GitHubConnector } from './github/connector'; +import { Repositories, Users } from './github/models'; +import { Entries, Comments } from './sql/models'; + +import schema from './schema'; +import queryMap from '../extracted_queries.json'; +import config from './config'; + +const WS_GQL_PATH = '/subscriptions'; + +// Arguments usually come from env vars +export function run({ + OPTICS_API_KEY, + PORT: portFromEnv = 3010, + } = {}) { + if (OPTICS_API_KEY) { + OpticsAgent.instrumentSchema(schema); + } + + let port = portFromEnv; + if (isString(portFromEnv)) { + port = parseInt(portFromEnv, 10); + } + + const wsGqlURL = process.env.NODE_ENV !== 'production' + ? `ws://localhost:${port}${WS_GQL_PATH}` + : `ws://api.githunt.com${WS_GQL_PATH}`; + + const app = express(); + + app.use(cors()); + app.use(bodyParser.urlencoded({ extended: true })); + app.use(bodyParser.json()); + + const invertedMap = invert(queryMap); + + app.use( + '/graphql', + (req, resp, next) => { + if (config.persistedQueries) { + // eslint-disable-next-line no-param-reassign + req.body.query = invertedMap[req.body.id]; + } + next(); + }, + ); + + const sessionStore = setUpGitHubLogin(app); + app.use(cookieParser(config.sessionStoreSecret)); + + if (OPTICS_API_KEY) { + app.use('/graphql', OpticsAgent.middleware()); + } + + app.use('/graphql', graphqlExpress((req) => { + if (!config.persistedQueries) { + // Get the query, the same way express-graphql does it + // https://github.com/graphql/express-graphql/blob/3fa6e68582d6d933d37fa9e841da5d2aa39261cd/src/index.js#L257 + const query = req.query.query || req.body.query; + if (query && query.length > 2000) { + // None of our app's queries are this long + // Probably indicates someone trying to send an overly expensive query + throw new Error('Query too large.'); + } + } + + let user; + if (req.user) { + // We get req.user from passport-github with some pretty oddly named fields, + // let's convert that to the fields in our schema, which match the GitHub + // API field names. + user = { + login: req.user.username, + html_url: req.user.profileUrl, + avatar_url: req.user.photos[0].value, + }; + } + + // Initialize a new GitHub connector instance for every GraphQL request, so that API fetches + // are deduplicated per-request only. + const gitHubConnector = new GitHubConnector({ + clientId: GITHUB_CLIENT_ID, + clientSecret: GITHUB_CLIENT_SECRET, + }); + + let opticsContext; + if (OPTICS_API_KEY) { + opticsContext = OpticsAgent.context(req); + } + + return { + schema, + context: { + user, + Repositories: new Repositories({ connector: gitHubConnector }), + Users: new Users({ connector: gitHubConnector }), + Entries: new Entries(), + Comments: new Comments(), + opticsContext, + }, + }; + })); + + app.use('/graphiql', graphiqlExpress({ + endpointURL: '/graphql', + subscriptionsEndpoint: wsGqlURL, + query: `{ + feed (type: NEW, limit: 5) { + repository { + owner { login } + name + } + + postedBy { login } + } + } + `, + })); + + // Serve our helpful static landing page. Not used in production. + app.get('/', (req, res) => { + res.sendFile(path.join(__dirname, 'index.html')); + }); + + const server = createServer(app); + + server.listen(port, () => { + console.log(`API Server is now running on http://localhost:${port}`); // eslint-disable-line no-console + console.log(`API Server over web socket with subscriptions is now running on ws://localhost:${port}${WS_GQL_PATH}`); // eslint-disable-line no-console + }); + + // eslint-disable-next-line + new SubscriptionServer( + { + schema, + execute, + subscribe, + + // the onOperation function is called for every new operation + // and we use it to set the GraphQL context for this operation + onOperation: (msg, params, socket) => { + return new Promise((resolve) => { + if (!config.persistedQueries) { + // Get the query, the same way express-graphql does it + // https://github.com/graphql/express-graphql/blob/3fa6e68582d6d933d37fa9e841da5d2aa39261cd/src/index.js#L257 + const query = params.query; + if (query && query.length > 2000) { + // None of our app's queries are this long + // Probably indicates someone trying to send an overly expensive query + throw new Error('Query too large.'); + } + } + + const gitHubConnector = new GitHubConnector({ + clientId: GITHUB_CLIENT_ID, + clientSecret: GITHUB_CLIENT_SECRET, + }); + + // Support for persistedQueries + if (config.persistedQueries) { + // eslint-disable-next-line no-param-reassign + params.query = invertedMap[msg.payload.id]; + } + + let opticsContext; + if (OPTICS_API_KEY) { + opticsContext = OpticsAgent.context(socket.upgradeReq); + } + + let wsSessionUser = null; + if (socket.upgradeReq) { + const cookies = cookie.parse(socket.upgradeReq.headers.cookie); + const sessionID = cookieParser.signedCookie(cookies['connect.sid'], config.sessionStoreSecret); + + const baseContext = { + context: { + Repositories: new Repositories({ connector: gitHubConnector }), + Users: new Users({ connector: gitHubConnector }), + Entries: new Entries(), + Comments: new Comments(), + opticsContext, + }, + }; + + const paramsWithFulfilledBaseContext = Object.assign({}, params, baseContext); + + if (!sessionID) { + resolve(paramsWithFulfilledBaseContext); + + return; + } + + // get the session object + sessionStore.get(sessionID, (err, session) => { + if (err) { + throw new Error('Failed retrieving sessionID from the sessionStore.'); + } + + if (session && session.passport && session.passport.user) { + const sessionUser = session.passport.user; + wsSessionUser = { + login: sessionUser.username, + html_url: sessionUser.profileUrl, + avatar_url: sessionUser.photos[0].value, + }; + + resolve(Object.assign(paramsWithFulfilledBaseContext, { + context: Object.assign(paramsWithFulfilledBaseContext.context, { + user: wsSessionUser, + }), + })); + } + + resolve(paramsWithFulfilledBaseContext); + }); + } + }); + }, + }, + { + path: WS_GQL_PATH, + server, + }, + ); + + return server; +} diff --git a/api/sql/connector.js b/api/sql/connector.js index 5f954cf..0e836b1 100644 --- a/api/sql/connector.js +++ b/api/sql/connector.js @@ -1,6 +1,6 @@ import knex from 'knex'; -import { development } from '../../knexfile'; +import knexfile from '../../knexfile'; // Eventually we want to wrap Knex to do some batching and caching, but for // now this will do since we know none of our queries need it -export default knex(development); +export default knex(knexfile[process.env.NODE_ENV || 'development']); diff --git a/api/sql/models.js b/api/sql/models.js index 5470a30..ccfe888 100644 --- a/api/sql/models.js +++ b/api/sql/models.js @@ -1,24 +1,30 @@ +import RedditScore from 'reddit-score'; + import knex from './connector'; +// A utility function that makes sure we always query the same columns function addSelectToEntryQuery(query) { - query.select('entries.*', knex.raw('SUM(votes.vote_value) as score')) + query.select('entries.*', knex.raw('coalesce(sum(votes.vote_value), 0) as score')) .leftJoin('votes', 'entries.id', 'votes.entry_id') .groupBy('entries.id'); } -function convertNullColsToZero({ score, ...rest }) { +// If we don't have a score, it is NULL by default +// Convert it to 0 on read. +function handleNullScoreInRow({ score, ...rest }) { return { score: score || 0, ...rest, }; } -function mapNullColsToZero(query) { +// Given a Knex query promise, resolve it and then format one or more rows +function formatRows(query) { return query.then((rows) => { if (rows.map) { - return rows.map(convertNullColsToZero); + return rows.map(handleNullScoreInRow); } - return convertNullColsToZero(rows); + return handleNullScoreInRow(rows); }); } @@ -28,38 +34,38 @@ export class Comments { .where({ id }); return query.then(([row]) => row); } - getCommentsByRepoName(name) { + + getCommentsByRepoName(name, limit, offset) { const query = knex('comments') .where({ repository_name: name }) .orderBy('created_at', 'desc'); - return query.then((rows) => ( - rows || [] - )); + + if (limit !== -1) { + query.limit(limit).offset(offset); + } + + return query.then(rows => (rows || [])); } + getCommentCount(name) { const query = knex('comments') .where({ repository_name: name }) .count(); - return query.then((rows) => ( - rows.map((row) => ( - row['count(*)'] || '0' - )) - )); + return query.then(rows => rows.map(row => (row['count(*)'] || row.count || '0'))); } + submitComment(repoFullName, username, content) { - return knex.transaction((trx) => ( - trx('comments') - .insert({ - content, - created_at: Date.now(), - repository_name: repoFullName, - posted_by: username, - }) - )); + return knex.transaction(trx => trx('comments') + .insert({ + content, + created_at: new Date(Date.now()), + repository_name: repoFullName, + posted_by: username, + }) + .returning('id')); } } export class Entries { - getForFeed(type, offset, limit) { const query = knex('entries') .modify(addSelectToEntryQuery); @@ -68,6 +74,8 @@ export class Entries { query.orderBy('created_at', 'desc'); } else if (type === 'TOP') { query.orderBy('score', 'desc'); + } else if (type === 'HOT') { + query.orderBy('hot_score', 'desc'); } else { throw new Error(`Feed type ${type} not implemented.`); } @@ -78,7 +86,7 @@ export class Entries { query.limit(limit); - return mapNullColsToZero(query); + return formatRows(query); } getByRepoFullName(name) { @@ -90,7 +98,7 @@ export class Entries { }) .first(); - return mapNullColsToZero(query); + return formatRows(query); } voteForEntry(repoFullName, voteValue, username) { @@ -127,6 +135,57 @@ export class Entries { username, vote_value: voteValue, }) + )) + // Update hot score + .then(() => this.updateHotScore(repoFullName)); + } + + updateHotScore(repoFullName) { + let entryId; + let createdAt; + + return Promise.resolve() + .then(() => ( + knex('entries') + .where({ + repository_name: repoFullName, + }) + .select(['id', 'created_at']) + .first() + .then(({ id, created_at }) => { + entryId = id; + createdAt = created_at; + }) + )) + .then(() => { + return knex('votes') + .select(['vote_value']) + .where({ + entry_id: entryId, + }); + }) + .then((results) => { + function countVotes(vote) { + return (count, value) => count + (value === vote ? 1 : 0); + } + + if (results && results.map) { + const votes = results.map(vote => vote.vote_value); + const ups = votes.reduce(countVotes(1), 0); + const downs = votes.reduce(countVotes(-1), 0); + const date = createdAt instanceof Date ? createdAt : new Date(createdAt); + + return (new RedditScore()).hot(ups, downs, date); + } + + return 0; + }) + .then(hotScore => ( + knex('entries') + .where('id', entryId) + .update({ + hot_score: hotScore, + }) )); } @@ -135,7 +194,7 @@ export class Entries { return Promise.resolve() - // First, get the entry_id from repoFullName + // First, get the entry_id from repoFullName .then(() => ( knex('entries') .where({ @@ -158,7 +217,7 @@ export class Entries { .first() )) - .then((vote) => vote || { vote_value: 0 }); + .then(vote => vote || { vote_value: 0 }); } submitRepository(repoFullName, username) { @@ -166,27 +225,26 @@ export class Entries { const rateLimitThresh = 3; // Rate limiting logic - return knex.transaction((trx) => ( - trx('entries') - .count() - .where('posted_by', '=', username) - .where('created_at', '>', Date.now() - rateLimitMs) - .then((obj) => { - // If the user has already submitted too many times, we don't - // post the repo. - const postCount = obj[0]['count(*)']; - if (postCount > rateLimitThresh) { - throw new Error('Too many repos submitted in the last hour!'); - } else { - return trx('entries') - .insert({ - created_at: Date.now(), - updated_at: Date.now(), - repository_name: repoFullName, - posted_by: username, - }); - } - }) - )); + return knex.transaction(trx => trx('entries') + .count() + .where('posted_by', '=', username) + .where('created_at', '>', new Date(Date.now() - rateLimitMs)) + .then((obj) => { + // If the user has already submitted too many times, we don't + // post the repo. + const postCount = obj[0]['count(*)']; + if (postCount > rateLimitThresh) { + throw new Error('Too many repos submitted in the last hour!'); + } else { + return trx('entries') + .insert({ + created_at: new Date(Date.now()), + updated_at: new Date(Date.now()), + repository_name: repoFullName, + posted_by: username, + }); + } + })) + .then(() => this.updateHotScore(repoFullName)); } } diff --git a/api/sql/schema.js b/api/sql/schema.js index 0adda2e..e0d0328 100644 --- a/api/sql/schema.js +++ b/api/sql/schema.js @@ -1,27 +1,60 @@ import { property, constant } from 'lodash'; export const schema = [` + +# A comment about an entry, submitted by a user type Comment { + # The SQL ID of this entry + id: Int! + + # The GitHub user who posted the comment postedBy: User! + + # A timestamp of when the comment was posted createdAt: Float! # Actually a date + + # The text of the comment content: String! + + # The repository which this comment is about repoName: String! } +# XXX to be removed type Vote { vote_value: Int! } +# Information about a GitHub repository submitted to GitHunt type Entry { + # Information about the repository from GitHub repository: Repository! + + # The GitHub user who submitted this entry postedBy: User! + + # A timestamp of when the entry was submitted createdAt: Float! # Actually a date + + # The score of this repository, upvotes - downvotes score: Int! - comments: [Comment]! # Should this be paginated? + + # The hot score of this repository + hotScore: Float! + + # Comments posted about this repository + comments(limit: Int, offset: Int): [Comment]! + + # The number of comments posted about this repository commentCount: Int! + + # The SQL ID of this entry id: Int! + + # XXX to be changed vote: Vote! } + `]; export const resolvers = { @@ -32,10 +65,11 @@ export const resolvers = { postedBy({ posted_by }, _, context) { return context.Users.getByLogin(posted_by); }, - comments({ repository_name }, _, context) { - return context.Comments.getCommentsByRepoName(repository_name); + comments({ repository_name }, { limit = -1, offset = 0 }, context) { + return context.Comments.getCommentsByRepoName(repository_name, limit, offset); }, createdAt: property('created_at'), + hotScore: property('hot_score'), commentCount({ repository_name }, _, context) { return context.Comments.getCommentCount(repository_name) || constant(0); }, @@ -44,6 +78,7 @@ export const resolvers = { return context.Entries.haveVotedForEntry(repository_name, context.user.login); }, }, + Comment: { createdAt: property('created_at'), postedBy({ posted_by }, _, context) { diff --git a/api/subscriptions.js b/api/subscriptions.js new file mode 100644 index 0000000..d86d23f --- /dev/null +++ b/api/subscriptions.js @@ -0,0 +1,3 @@ +import { PubSub } from 'graphql-subscriptions'; + +export const pubsub = new PubSub(); diff --git a/extracted_queries.json b/extracted_queries.json new file mode 100644 index 0000000..fbd5dbd --- /dev/null +++ b/extracted_queries.json @@ -0,0 +1 @@ +{"query Comment($repoName: String!) {\n currentUser {\n login\n html_url\n __typename\n }\n entry(repoFullName: $repoName) {\n id\n postedBy {\n login\n html_url\n __typename\n }\n createdAt\n comments {\n ...CommentsPageComment\n __typename\n }\n repository {\n full_name\n html_url\n description\n open_issues_count\n stargazers_count\n __typename\n }\n __typename\n }\n}\n\nfragment CommentsPageComment on Comment {\n id\n postedBy {\n login\n html_url\n __typename\n }\n createdAt\n content\n __typename\n}\n\nfragment CommentsPageComment on Comment {\n id\n postedBy {\n login\n html_url\n __typename\n }\n createdAt\n content\n __typename\n}\n":1,"subscription onCommentAdded($repoFullName: String!) {\n commentAdded(repoFullName: $repoFullName) {\n id\n postedBy {\n login\n html_url\n __typename\n }\n createdAt\n content\n __typename\n }\n}\n":2,"query Feed($type: FeedType!, $offset: Int, $limit: Int) {\n currentUser {\n login\n __typename\n }\n feed(type: $type, offset: $offset, limit: $limit) {\n ...FeedEntry\n __typename\n }\n}\n\nfragment FeedEntry on Entry {\n id\n commentCount\n repository {\n full_name\n html_url\n owner {\n avatar_url\n __typename\n }\n __typename\n }\n ...VoteButtons\n ...RepoInfo\n __typename\n}\n\nfragment VoteButtons on Entry {\n score\n vote {\n vote_value\n __typename\n }\n __typename\n}\n\nfragment RepoInfo on Entry {\n createdAt\n repository {\n description\n stargazers_count\n open_issues_count\n __typename\n }\n postedBy {\n html_url\n login\n __typename\n }\n __typename\n}\n":3,"query CurrentUserForLayout {\n currentUser {\n login\n avatar_url\n __typename\n }\n}\n":4,"mutation submitComment($repoFullName: String!, $commentContent: String!) {\n submitComment(repoFullName: $repoFullName, commentContent: $commentContent) {\n ...CommentsPageComment\n __typename\n }\n}\n\nfragment CommentsPageComment on Comment {\n id\n postedBy {\n login\n html_url\n __typename\n }\n createdAt\n content\n __typename\n}\n\nfragment CommentsPageComment on Comment {\n id\n postedBy {\n login\n html_url\n __typename\n }\n createdAt\n content\n __typename\n}\n":5,"mutation submitRepository($repoFullName: String!) {\n submitRepository(repoFullName: $repoFullName) {\n createdAt\n __typename\n }\n}\n":6,"mutation vote($repoFullName: String!, $type: VoteType!) {\n vote(repoFullName: $repoFullName, type: $type) {\n score\n id\n vote {\n vote_value\n __typename\n }\n __typename\n }\n}\n":7} \ No newline at end of file diff --git a/knexfile.js b/knexfile.js index 93e4f2d..eba559c 100644 --- a/knexfile.js +++ b/knexfile.js @@ -1,44 +1,27 @@ -// Update with your config settings. +// Since Knex always runs this file first, all of our seeds and migrations are babelified. +require('babel-register'); -module.exports = { +const parse = require('pg-connection-string').parse; + +const DATABASE_URL = process.env.DATABASE_URL; +module.exports = { development: { client: 'sqlite3', connection: { - filename: './dev.sqlite3' - } + filename: './dev.sqlite3', + }, + useNullAsDefault: true, + }, + test: { + client: 'sqlite3', + connection: { + filename: './test.sqlite3', + }, + useNullAsDefault: true, + }, + production: DATABASE_URL && { + client: 'pg', + connection: Object.assign({}, parse(DATABASE_URL), { ssl: true }), }, - // - // staging: { - // client: 'postgresql', - // connection: { - // database: 'my_db', - // user: 'username', - // password: 'password' - // }, - // pool: { - // min: 2, - // max: 10 - // }, - // migrations: { - // tableName: 'knex_migrations' - // } - // }, - // - // production: { - // client: 'postgresql', - // connection: { - // database: 'my_db', - // user: 'username', - // password: 'password' - // }, - // pool: { - // min: 2, - // max: 10 - // }, - // migrations: { - // tableName: 'knex_migrations' - // } - // } - }; diff --git a/migrations/20160518201950_create_comments_entries_votes.js b/migrations/20160518201950_create_comments_entries_votes.js index 14551d3..0084451 100644 --- a/migrations/20160518201950_create_comments_entries_votes.js +++ b/migrations/20160518201950_create_comments_entries_votes.js @@ -1,6 +1,6 @@ -exports.up = function(knex, Promise) { +export function up(knex, Promise) { return Promise.all([ - knex.schema.createTable('comments', function (table) { + knex.schema.createTable('comments', (table) => { table.increments(); table.timestamps(); table.string('posted_by'); @@ -8,14 +8,15 @@ exports.up = function(knex, Promise) { table.string('repository_name'); }), - knex.schema.createTable('entries', function (table) { + knex.schema.createTable('entries', (table) => { table.increments(); table.timestamps(); table.string('repository_name').unique(); table.string('posted_by'); + table.float('hot_score'); }), - knex.schema.createTable('votes', function (table) { + knex.schema.createTable('votes', (table) => { table.increments(); table.timestamps(); table.integer('entry_id'); @@ -24,12 +25,12 @@ exports.up = function(knex, Promise) { table.unique(['entry_id', 'username']); }), ]); -}; +} -exports.down = function(knex, Promise) { +export function down(knex, Promise) { return Promise.all([ knex.schema.dropTable('comments'), knex.schema.dropTable('entries'), knex.schema.dropTable('votes'), ]); -}; +} diff --git a/package.json b/package.json index 4e91475..ab27bb6 100644 --- a/package.json +++ b/package.json @@ -1,15 +1,18 @@ { - "name": "githunt", + "name": "githunt-api", "version": "1.0.0", "description": "Example app for Apollo", - "main": "index.js", "scripts": { - "start": "nodemon api/index.js --watch api --exec babel-node", - "lint": "eslint api", - "test": "mocha --compilers js:babel-core/register --reporter spec --full-trace 'api/**/*.test.js' && npm run lint", + "start": "babel-node api/index.js", + "dev": "nodemon api/index.js --watch api --exec babel-node", + "lint": "eslint api migrations seeds", + "test": "jest && npm run lint", + "test:watch": "jest --watch", "seed": "knex seed:run", - "migrate": "knex migrate:latest" + "migrate": "knex migrate:latest", + "test:setup": "rm test.sqlite3 || true; knex migrate:latest --env test && knex seed:run --env test" }, + "private": true, "repository": { "type": "git", "url": "git+https://github.com/apollostack/GitHunt.git" @@ -21,39 +24,49 @@ }, "homepage": "https://github.com/apollostack/GitHunt#readme", "devDependencies": { - "babel-cli": "^6.8.0", - "babel-core": "^6.8.0", - "babel-eslint": "^6.1.0", - "babel-loader": "^6.2.4", - "babel-preset-es2015": "^6.6.0", - "babel-preset-react": "^6.5.0", - "babel-preset-stage-2": "^6.5.0", - "babel-register": "^6.9.0", - "chai": "^3.5.0", - "eslint": "^2.13.1", - "eslint-config-airbnb": "^9.0.1", - "eslint-plugin-babel": "^3.3.0", - "eslint-plugin-import": "^1.8.1", - "eslint-plugin-jsx-a11y": "^1.2.2", - "eslint-plugin-react": "^5.1.1", - "mocha": "^2.4.5", - "nodemon": "^1.9.2" + "babel-eslint": "7.2.1", + "babel-jest": "^20.0.0", + "babel-register": "6.24.0", + "casual": "^1.5.11", + "eslint": "3.19.0", + "eslint-config-airbnb": "14.1.0", + "eslint-plugin-babel": "4.1.1", + "eslint-plugin-import": "2.2.0", + "eslint-plugin-jsx-a11y": "4.0.0", + "eslint-plugin-react": "6.10.1", + "jest": "^19.0.2", + "nodemon": "1.11.0", + "sqlite3": "3.1.8" }, "dependencies": { - "apollo-server": "^0.2.1", - "body-parser": "^1.15.1", - "connect-session-knex": "^1.0.23", - "dataloader": "^1.2.0", - "dotenv": "^2.0.0", - "express": "^4.13.4", - "express-session": "^1.13.0", - "graphql": "^0.6.1", - "graphql-tools": "^0.6.2", - "knex": "^0.11.3", - "lodash": "^4.12.0", - "passport": "^0.3.2", - "passport-github": "^1.1.0", - "request-promise": "^3.0.0", - "sqlite3": "^3.1.4" + "babel-cli": "6.23.0", + "babel-core": "6.24.1", + "babel-preset-es2015": "6.24.0", + "babel-preset-react": "6.24.1", + "babel-preset-stage-2": "6.22.0", + "body-parser": "1.17.1", + "connect-session-knex": "1.3.4", + "cookie": "^0.3.1", + "cookie-parser": "^1.4.3", + "cors": "^2.8.3", + "dataloader": "1.3.0", + "dotenv": "4.0.0", + "express": "4.15.2", + "express-session": "1.15.2", + "graphql": "^0.10.1", + "graphql-server-express": "^0.8.0", + "graphql-subscriptions": "^0.4.2", + "graphql-tools": "^1.0.0", + "knex": "0.13.0", + "lodash": "4.17.4", + "optics-agent": "^1.0.5", + "passport": "0.3.2", + "passport-github": "1.1.0", + "persistgraphql": "^0.3.0", + "pg": "^6.1.2", + "pg-connection-string": "^0.1.3", + "reddit-score": "0.0.2", + "request-promise": "4.2.1", + "subscriptions-transport-ws": "^0.7.0" } } diff --git a/seeds/dev.js b/seeds/dev.js deleted file mode 100644 index b36de19..0000000 --- a/seeds/dev.js +++ /dev/null @@ -1,2 +0,0 @@ -require('babel-register'); -module.exports = { seed: function () {} } diff --git a/seeds/seed.js b/seeds/seed.js index 16154c7..92d87f6 100644 --- a/seeds/seed.js +++ b/seeds/seed.js @@ -1,12 +1,29 @@ -const _ = require('lodash'); +import _ from 'lodash'; +import RedditScore from 'reddit-score'; + +function countScore(score) { + return (count, value) => count + (value === score ? 1 : 0); +} + +function hot(repoVotes, date) { + const redditScore = new RedditScore(); + + const createdAt = date instanceof Date ? date : new Date(date); + + const scores = _.values(repoVotes || {}); + const ups = scores.reduce(countScore(1), 0); + const downs = scores.reduce(countScore(-1), 0); + + return redditScore.hot(ups, downs, createdAt); +} const repos = [ { - repository_name: 'apollostack/apollo-client', + repository_name: 'apollographql/apollo-client', posted_by: 'stubailo', }, { - repository_name: 'apollostack/apollo-server', + repository_name: 'apollographql/graphql-server', posted_by: 'helfer', }, { @@ -61,11 +78,6 @@ const repos = [ const repoIds = {}; -const usenames = [ - 'stubailo', - 'helfer', -]; - const votes = { [repos[0].repository_name]: { stubailo: 1, @@ -88,12 +100,17 @@ export function seed(knex, Promise) { // Insert some entries for the repositories .then(() => { return Promise.all(repos.map(({ repository_name, posted_by }, i) => { + const createdAt = new Date(Date.now() - (i * 10000)); + const repoVotes = votes[repository_name]; + const hotScore = hot(repoVotes, createdAt); + return knex('entries').insert({ - created_at: Date.now() - i * 10000, - updated_at: Date.now() - i * 10000, + created_at: createdAt, + updated_at: createdAt, repository_name, posted_by, - }).then(([id]) => { + hot_score: hotScore, + }).returning('id').then(([id]) => { repoIds[repository_name] = id; }); }));