diff --git a/.circleci/config.yml b/.circleci/config.yml index fedf2b4bd6090..cff7048af7b10 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -130,58 +130,6 @@ commands: - slack/status: channel: eng-react-integration-status - run-benchmark: - parameters: - working_directory: - type: string - NUM_PAGES: - type: string - BENCHMARK_CONTENT_SOURCE: - type: string - BENCHMARK_REPO_NAME: - type: string - default: gatsbyjs/gatsby - BENCHMARK_SITE_TYPE: - type: string - default: BLOG - BENCHMARK_BRANCH: - type: string - default: master - steps: - - checkout - - run: - command: npm install - working_directory: << parameters.working_directory >> - environment: - NUM_PAGES: << parameters.NUM_PAGES >> - - run: - command: npm run build - working_directory: << parameters.working_directory >> - environment: - BENCHMARK_BUILD_TYPE: COLD_START - NUM_PAGES: << parameters.NUM_PAGES >> - BENCHMARK_CONTENT_SOURCE: << parameters.BENCHMARK_CONTENT_SOURCE >> - BENCHMARK_REPO_NAME: << parameters.BENCHMARK_REPO_NAME >> - BENCHMARK_SITE_TYPE: << parameters.BENCHMARK_SITE_TYPE >> - BENCHMARK_BRANCH: << parameters.BENCHMARK_BRANCH >> - CI_NAME: circleci - - run: - command: npm install - working_directory: << parameters.working_directory >> - environment: - NUM_PAGES: << parameters.NUM_PAGES >> - - run: - command: npm run build - working_directory: << parameters.working_directory >> - environment: - BENCHMARK_BUILD_TYPE: WARM_START - NUM_PAGES: << parameters.NUM_PAGES >> - BENCHMARK_CONTENT_SOURCE: << parameters.BENCHMARK_CONTENT_SOURCE >> - BENCHMARK_REPO_NAME: << parameters.BENCHMARK_REPO_NAME >> - BENCHMARK_SITE_TYPE: << parameters.BENCHMARK_SITE_TYPE >> - BENCHMARK_BRANCH: << parameters.BENCHMARK_BRANCH >> - CI_NAME: circleci - e2e-test: parameters: skip_file_change_test: @@ -399,6 +347,19 @@ jobs: command: yarn run update-source working_directory: ~/project/scripts/i18n + sync_translation_repo: + executor: node + steps: + - checkout + - run: git config --global user.name "GatsbyJS Bot" + - run: git config --global user.email "core-team@gatsbyjs.com" + - run: + command: yarn + working_directory: ~/project/scripts/i18n + - run: + command: yarn run-all sync + working_directory: ~/project/scripts/i18n + windows_unit_tests: executor: name: win/vs2019 @@ -439,210 +400,19 @@ jobs: paths: - "*" - benchmark_markdown_id_512: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_id - NUM_PAGES: "512" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_id - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_id_4096: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_id - NUM_PAGES: "4096" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_id - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_id_8192: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_id - NUM_PAGES: "8192" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_id - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_id_32768: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_id - NUM_PAGES: "32768" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_id - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_slug_512: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_slug - NUM_PAGES: "512" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_slug - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_slug_4096: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_slug - NUM_PAGES: "4096" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_slug - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_slug_8192: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_slug - NUM_PAGES: "8192" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_slug - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_slug_32768: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_slug - NUM_PAGES: "32768" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_slug - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_table_512: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_table - NUM_PAGES: "512" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_table - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_table_4096: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_table - NUM_PAGES: "4096" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_table - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_table_8192: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_table - NUM_PAGES: "8192" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_table - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_table_32768: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_table - NUM_PAGES: "32768" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_table - BENCHMARK_SITE_TYPE: BLOG - - benchmark_mdx_512: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/mdx - NUM_PAGES: "512" - BENCHMARK_CONTENT_SOURCE: MDX - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/mdx - BENCHMARK_SITE_TYPE: BLOG - - benchmark_mdx_4096: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/mdx - NUM_PAGES: "4096" - BENCHMARK_CONTENT_SOURCE: MDX - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/mdx - BENCHMARK_SITE_TYPE: BLOG - - benchmark_mdx_8192: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/mdx - NUM_PAGES: "8192" - BENCHMARK_CONTENT_SOURCE: MDX - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/mdx - BENCHMARK_SITE_TYPE: BLOG - - benchmark_mdx_32768: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/mdx - NUM_PAGES: "32768" - BENCHMARK_CONTENT_SOURCE: MDX - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/mdx - BENCHMARK_SITE_TYPE: BLOG - workflows: version: 2 - benchmark: - jobs: - - benchmark_markdown_id_512 - - benchmark_markdown_id_4096 - - benchmark_markdown_id_8192 - - benchmark_markdown_id_32768 - - benchmark_markdown_slug_512 - - benchmark_markdown_slug_4096 - - benchmark_markdown_slug_8192 - - benchmark_markdown_slug_32768 - - benchmark_markdown_table_512 - - benchmark_markdown_table_4096 - - benchmark_markdown_table_8192 - - benchmark_markdown_table_32768 - - benchmark_mdx_512 - - benchmark_mdx_4096 - - benchmark_mdx_8192 - - benchmark_mdx_32768 + weekly-i18n-sync: triggers: - schedule: - cron: 22 16 * * * + cron: "0 1 * * 6" filters: branches: only: - master + jobs: + - sync_translation_repo nightly-react-next: triggers: diff --git a/benchmarks/source-wordpress/gatsby-config.js b/benchmarks/source-wordpress/gatsby-config.js index 525ca36d03827..18756833d2749 100644 --- a/benchmarks/source-wordpress/gatsby-config.js +++ b/benchmarks/source-wordpress/gatsby-config.js @@ -1,5 +1,5 @@ require("dotenv").config({ - path: `.env`, + path: `.env.${process.env.NODE_ENV}`, }) module.exports = { diff --git a/benchmarks/source-wordpress/package.json b/benchmarks/source-wordpress/package.json index 6a539b989e503..b47a5b6fb5fe0 100644 --- a/benchmarks/source-wordpress/package.json +++ b/benchmarks/source-wordpress/package.json @@ -12,18 +12,14 @@ "serve": "gatsby serve", "start": "npm run develop" }, - "resolutions": { - "sharp": "0.25.1" - }, "dependencies": { "dotenv": "^8.2.0", "gatsby": "^2.19.35", "gatsby-image": "^2.2.40", "gatsby-plugin-sharp": "^2.4.5", "gatsby-source-filesystem": "^2.1.48", - "gatsby-source-wordpress-experimental": "^0.0.15", + "gatsby-source-wordpress-experimental": "^0.0.31", "gatsby-transformer-sharp": "^2.3.14", - "lodash.kebabcase": "^4.1.1", "react": "^16.12.0", "react-dom": "^16.12.0" }, diff --git a/docs/blog/2020-03-30-Fast-Secure-Flexible-MagMutual/index.md b/docs/blog/2020-03-30-Fast-Secure-Flexible-MagMutual/index.md index fc003743074e7..1faa18e14a31d 100644 --- a/docs/blog/2020-03-30-Fast-Secure-Flexible-MagMutual/index.md +++ b/docs/blog/2020-03-30-Fast-Secure-Flexible-MagMutual/index.md @@ -9,11 +9,11 @@ tags: - netlify --- -MagMutual, a leading professional healthcare liability insurer, needed to redesign and re-platform the company’s [MagMutual.com](http://www.magmutual.com) website to further its core mission: to serve and protect policyholders. +MagMutual, a leading professional healthcare liability insurer, needed to redesign and re-platform the company’s [MagMutual.com](https://www.magmutual.com) website to further its core mission: to serve and protect policyholders. ![Overview of the new MagMutal website UI](./MagMutual_design_sm.png) -To achieve the company's vision for their new site, MagMutual collaborated with [Mediacurrent](https://www.mediacurrent.com/), the Atlanta-based open source development and digital marketing agency. The new magmutual.com united Drupal and Gatsby to form a fully open source, enterprise-grade system that empowers MagMutual’s web team to closely yet easily manage content and customer experience. +To achieve the company's vision for their new site, MagMutual collaborated with [Mediacurrent](https://www.mediacurrent.com/), the Atlanta-based open source development and digital marketing agency. The new MagMutual.com united Drupal and Gatsby to form a fully open source, enterprise-grade system that empowers MagMutual’s web team to closely yet easily manage content and customer experience. ## The Challenge @@ -23,7 +23,7 @@ For MagMutual, the best path forward was an enterprise-grade CMS built on an ope ## The Solution -A fully decoupled Drupal 8 system powered by the Gatsbyjs platform lifted the burden from MagMutual’s technology team and put content authors in the driver’s seat. The Drupal 8 backend provides a powerful capacity for content modeling. Gatsby, the presentation layer, adds a robust dimension of UI flexibility and performance. +A fully decoupled Drupal 8 system powered by the Gatsby platform lifted the burden from MagMutual’s technology team and put content authors in the driver’s seat. The Drupal 8 backend provides a powerful capacity for content modeling. Gatsby, the presentation layer, adds a robust dimension of UI flexibility and performance. ![Mission statement page on MagMutual.com](./MagMutual_mission.png) @@ -73,9 +73,9 @@ From a security perspective, he continued, Drupal gave MagMutual a highly secure ### _Why Apollo GraphQL Server?_ -Central data source: Creates a single data source for your website, aggregating data from multiple data sources. -Data integrity: Apollo removes the need to sync data from different sources so it can be displayed on the website. Data is queried and retrieved in real-time using a simple API. -Developer velocity: Self-documented API for developers increases efficiency. +- **Central data source:** Creates a single data source for your website, aggregating data from multiple data sources. +- **Data integrity:** Apollo removes the need to sync data from different sources so it can be displayed on the website. Data is queried and retrieved in real-time using a simple API. +- **Developer velocity:** Self-documented API for developers increases efficiency. ### _Why Netlify?_ @@ -98,5 +98,5 @@ MagMutual serves the intersection of two rapidly evolving industries: insurance ## Resources - See the [complete case study for MagMutual.com](https://www.mediacurrent.com/work/case-study/magmutual-drupal-8-gatsby). -- Learn more about some of the development tools used for this project in our webinar, [Rain + GatsbyJS: Fast-Tracking to Drupal 8](https://www.mediacurrent.com/videos/webinar-recording-rain-gatsbyjs-fast-tracking-drupal-8). +- Learn more about some of the development tools used for this project in our webinar, [Rain + Gatsby: Fast-Tracking to Drupal 8](https://www.mediacurrent.com/videos/webinar-recording-rain-gatsbyjs-fast-tracking-drupal-8). - Planning for a decoupled Drupal/Gatsby project and not sure where to start? [Mediacurrent can help](https://www.mediacurrent.com/contact-us). diff --git a/docs/blog/2020-04-02-LA-2020-Schau/index.md b/docs/blog/2020-04-02-LA-2020-Schau/index.md index f4b39d0e23c30..71b1a2dc2c807 100644 --- a/docs/blog/2020-04-02-LA-2020-Schau/index.md +++ b/docs/blog/2020-04-02-LA-2020-Schau/index.md @@ -14,7 +14,7 @@ tags: _Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ -Dustin Shau, Gatsby's Head of Product, is a dedicated open source developer. He was first drawn to the framework by Gatsby’s exceptional performance and outstanding developer experience. Now a Gatsby team member, Dustin is focused on making Gatsby the fastest, most inclusive platform for building websites and web applications. +Dustin Schau, Gatsby's Head of Product, is a dedicated open source developer. He was first drawn to the framework by Gatsby’s exceptional performance and outstanding developer experience. Now a Gatsby team member, Dustin is focused on making Gatsby the fastest, most inclusive platform for building websites and web applications. Watch Dustin demonstrate the impressive build-time performance of Gatsby and Gatsby Cloud by walking through typical developer workflows, including updating a website using Contentful's headless CMS and then rapidly deploying the results through a content delivery network (CDN). And discover how Gatsby enables developers to quickly incorporate accessibility capabilities—such as a SkipNav function—to deliver inclusive web experiences to the widest possible audience. diff --git a/docs/blog/2020-04-05-LA-2020-Gladwell/index.md b/docs/blog/2020-04-05-LA-2020-Gladwell/index.md index ba538952dbb00..667ffa074c8b1 100644 --- a/docs/blog/2020-04-05-LA-2020-Gladwell/index.md +++ b/docs/blog/2020-04-05-LA-2020-Gladwell/index.md @@ -2,7 +2,7 @@ title: "Gatsby Days LA 2020 Video 3: Slash Build Times with Gatsby Builds Best Practices" date: 2020-04-05 author: Greg Thomas -excerpt: "React developer Grant Gladwell tells how he identified best practices for using Gatsby Builds to cut build time for image-heavy websites by a factor of five." +excerpt: "React developer Grant Glidewell tells how he identified best practices for using Gatsby Builds to cut build time for image-heavy websites by a factor of five." tags: - gatsby-days - community @@ -13,8 +13,8 @@ tags: _Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ -Grant Gladwell is a React developer dedicated to employing modern technologies in cutting-edge projects. During his time at the digital experience agency Third and Grove, Grant was part of the team that integrated Drupal and Gatsby to create the agency’s fast, yet content-rich website. He also helped build Gatsby Preview + Drupal—a live preview module for Drupal that can be used with Gatsby Cloud. +Grant Glidewell is a React developer dedicated to employing modern technologies in cutting-edge projects. During his time at the digital experience agency Third and Grove, Grant was part of the team that integrated Drupal and Gatsby to create the agency’s fast, yet content-rich website. He also helped build Gatsby Preview + Drupal—a live preview module for Drupal that can be used with Gatsby Cloud. At Gatsby Days LA 2020, Grant focused on performance. After analyzing top Gatsby-based sites, he and his team identified best practices that can help developers achieve blazing fast speeds. Check out this video of Grant’s presentation to discover how Gatsby Builds helped Third and Grove reduce build times for its image-heavy site from about 40 minutes to only 8. And learn how Gatsby’s lazy load components can deliver impressive performance benefits without excessive coding. -[![Gatsby Days LA Video 3: What separates the fastest Gatsby sites from everybody else with Grant Gladwell](https://res.cloudinary.com/marcomontalbano/image/upload/v1585858632/video_to_markdown/images/youtube--xMorT50I0cw-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=xMorT50I0cw "Gatsby Days LA Video 3: What separates the fastest Gatsby sites from everybody else with Grant Gladwell") +[![Gatsby Days LA Video 3: What separates the fastest Gatsby sites from everybody else with Grant Glidewell](https://res.cloudinary.com/marcomontalbano/image/upload/v1585858632/video_to_markdown/images/youtube--xMorT50I0cw-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=xMorT50I0cw "Gatsby Days LA Video 3: What separates the fastest Gatsby sites from everybody else with Grant Glidewell") diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/index.md b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/index.md new file mode 100644 index 0000000000000..faeadf524cef6 --- /dev/null +++ b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/index.md @@ -0,0 +1,286 @@ +--- +title: "Rebuilding the Agility CMS Website with Gatsby (one page at a time)" +date: 2020-04-13 +author: Joel Varty +excerpt: "When the Agility CMS team asked Joel Varty, the company's president, for a home page upgrade that could handle not only content but new integrations with mar-tech apps, he jumped at the chance to build it on Gatsby using JAMstack architecture." +tags: + - netlify + - headless-cms + - jamstack +--- + +I've been preaching about JAMStack for a while now, and lately I've been talking a lot about how you can [move your website to JAMStack without rebuilding everything](https://agilitycms.com/resources/posts/migrate-to-jamstack-now-no-excuses). + +I decided it was time to take my own advice and upgrade my company's website, [agilitycms.com](https://agilitycms.com), starting with the home page, and adding pages and sections over time. Agility CMS is a headless content management system running in the cloud (Microsoft Azure). The current website is built on ASP.Net Core. Our marketing team came to me with a request to build a brand new home page which included not only updating content, but a brand new design, new modules, and new integrations with mar-tech. + +This was just the opportunity I’d been looking for: A chance to practice what I've been preaching! What's also great is the current .net website is already built using a headless CMS, so I don't have to rewrite or migrate any content. + +## Goals + +- Build the new home page using [Gatsby](https://www.gatsbyjs.org/) +- Re-use much of the existing site content from [our headless cms](https://agilitycms.com/) +- Zero downtime + +## tl;dr + +For those of you who just want to see the code for the new site (it only has code for the modules that are on the homepage right now, but it will expand over time), it's all here on GitHub: https://github.com/agility/agility-website-gatsby. + +## Steps + +Here's what I did to get everything up and running, right from coding the new site, to deploying, testing and flipping over the DNS. + +Get it running locally with Gatsby +Implement the Header and Footer +Create a new Home Page +Run it in Gatsby Cloud +Deploy to Netlify +Setup the CDN to do the Edge Routing + +What's really cool is that this workflow isn't just for upgrading Agility websites to JAMstack - you can use it for any website! Now let’s break each step into specific details. + +## Step 1: Get it running locally with Gatsby + +It's really easy to get started creating a Gatsby website with Agility CMS. Just clone the [starter repo from github](https://github.com/agility/agility-gatsby-starter), open up the folder in [VS Code](https://code.visualstudio.com/) and pop in your API Keys. + +```shell +git clone https://github.com/agility/agility-gatsby-starter.git +``` + +Now, find your API keys on the Getting Started page of the [Agility CMS Content Manager](https://manager.agilitycms.com/) + +![Agility CMS Getting Started landing page](post-image-1.png "Agility CMS Screenshot") + +Put your keys into the **.env.development** and **.env.production** files. They look something like this and have instructions about which values go where. + +```text +# Your Instance Id +AGILITY_GUID= + +# Your Preview API Key (recommended) - you can get this from the Getting Started Page in Agility CMS +AGILITY_API_KEY= + +# If using your Preview API Key, set this to true +AGILITY_API_ISPREVIEW=true + +# If you want to enable /__refresh endpoint +ENABLE_GATSBY_REFRESH_ENDPOINT=true +``` + +Now, check out the **gatsby.config** file - it has a section for plugins, and the Agility CMS source plugin is called **@agility/gatsby-source-agilitycms**. Check that the language code and channel name matches what you have in your Agility CMS instance. + +### Modules and Page Templates + +Since this was an existing website, we already had a few Page Templates and Module Definitions set up in the instance. We need to make sure we at least have placeholders in our new Gatsby project for those, and we'll just implement whatever is needed for our new home page. + +![Project Folder Structure](post-image-2.png "Project Folder Structure") + +There are folders for Page Templates and Modules, and you can just put in placeholder React code for these right now. + +Here's an example Page Template component with a single content zone called "Main": + +```jsx +import React from "react" +import ContentZone from "../agility/components/ContentZone" + +const MainTemplate = props => { + return ( +
+ +
+ ) +} +export default MainTemplate +``` + +Here's an example Module component that doesn't do anything except output its name. + +```jsx +import React from "react" + +const LatestResources = ({ item }) => { + return
LatestResources
+} + +export default LatestResources +``` + +When I got all those things in place, I started up Gatsby to see what would happen. + +```shell +gatsby develop +``` + +Gatsby will pull down all the content for our website and put it into GraphQL. This is a _content sync_, so from now on it will only pull down a delta (what's changed) from Agility CMS. +![Alt Text](post-image-3.png "Agility CMS - Gatsby - Terminal Output") + +## Step 2: Implement the Header and Footer + +We need to make our new website look just like the old one, so we need to match the colors, fonts, and other visual styles as much as we can. You may want to pull in the CSS from your old site—or start from scratch if you want to make a clean break. + +Either way, this is a great time to familiarize yourself with the GraphQL data in your website. Point your browser to [http://localhost:8000/\_\_\_graphql](http://localhost:8000/___graphql) to start exploring your data, and you can build the query access and Shared Content or Sitemap data. You can see that all content is available, grouped by content definition name. + +Here's the query that I used to grab a Global Header shared content item, as well as the nested sitemap as JSON. + +```graphql +query GlobalHeaderQuery { + agilityGlobalHeader(properties: { referenceName: { eq: "globalheader" } }) { + customFields { + marketingBanner + logo { + url + label + } + } + preHeaderLinks { + customFields { + title + uRL { + href + target + text + } + } + } + } + agilitynestedsitemap { + internal { + content + } + } +} +``` + +Your query will look different, of course, but I hope you get the idea of how to query your Agility CMS content. + +Now, you can create a component that uses a `` to pull in the data and make it available. Check out the example `GlobalHeader.js` component in your project for an example of that. + +## Step 3: Create a new Home Page + +In Agility CMS, the first page in your sitemap is considered your Home Page. So, I created a new home page and temporarily called it home-2. I didn't publish it, but this meant that I could use this to build out the modules on the new home page. + +![Agility CMS Screenshot - temporary home page](post-image-4.png "Agility CMS Screenshot - Home Page") + +I created a couple of new Module Definitions that I needed for the new page design, so I created new react components in the **modules** folder for those. The amazing thing about the Agility CMS Gatsby implementation is that nearly all the data that you need to render a module on a page is given to you in a property called **item**. + +What I normally do is just `console.log("ModuleName", item)` so I can see exactly what that data looks like. Then run the site locally in your browser http://localhost:8000 and open up your Developer Tools to see what it looks like in the console. + +### Hot Reloading - Code and Content + +One of the best things about React development with Gatsby is that everything can be hot reloaded, including the content! + +If you leave your browser open beside your code, you can just make changes and see them. Additionally, if you open a second terminal window, you can also pull down any changes that you make to the content in Agility CMS without having to run `gatsby develop` again. + +```shell +curl -X POST http://localhost:8000/__refresh +``` + +Here's a side-by-side screenshot of my 2 monitor setup. You can see that I have 2 terminal windows opened in VS Code. + +![two screens side by side showing hot reloading website and the Gatsby code for it](post-image-5.png "Side-by-side Hot Module Reload") + +I really love this workflow! It makes it really easy to tweak things and see the changes instantly. + +## Step 4: Run it in Gatsby Cloud + +To get going, [Gatsby Cloud](https://www.gatsbyjs.com/) is the easiest way to Preview and Build Gatsby sites. The free version is enough to get you started. + +Push your code to a GitHub repo, sign up for Gatsby Cloud, and create a new site. When asked, simply choose "I already have a Gatsby site" and don't add any integrations just now. + +![landing page for Gatsby Cloud Create New Site](post-image-6.png "Gatsby Cloud - Create New Site") + +You can securely add your API Keys in the Environment Variable section of Settings. + +![Gatsby webpage for setting environment variables](post-image-7.png "Gatsby Cloud - Environment Variables") + +Now you can take the Preview link from Gatsby and plug that into Agility CMS in the Domain Configuration area of the Settings section. + +Additionally, Gatsby gives you webhook URLs for Preview and Build. You can go ahead and plug these into the Webhook area in Agility Settings. + +## Step 5: Deploy to Netlify + +Netlify is a really great service to easily host static websites. Even better, it integrates seamlessly so that Gatsby can automatically deploy your website to Netlify when it builds! + +Go ahead and create a free Netlify account and point to it under the Gatsby **Hosting Integrations** settings section. + +Since Gatsby is going to be building the LIVE version of our site, we need to publish our new Homepage in Agility. If you've reviewed everything in Preview and you're ready to go, the first thing you need to do is to disable the Syncing Web Servers for the existing website in Agility CMS. You'll have to coordinate this with your content team, of course. + +When I was testing all this out, I actually built my new site using the Preview API Keys temporarily. That way I could verify everything was working first. + +In the end, you're going to end up with a URL to your new home page in Netlify. + +## Step 6: Setup the CDN to do the Edge Routing + +We can use Edge computing to decide whether to route to the new website or the old one, depending on the page. + +In this example, I decided to use a [Stackpath](https://www.stackpath.com/) Script to do this for us. + +You set up a Stackpath site just like normal, but pointing to your OLD website's unique hostname. It can't be your public DNS name - you need to have another unique way to address that site. For example, since our website is hosted in an Azure App Service, we get an azurewebsites.net URL. + +Now you create a Script in Stackpath to do the routing. In our case, we ONLY want to route requests to the home page, plus any Gatsby-specific stuff, to our new website. + +You can also see that I'm only allowing for 60 seconds on caching in the CDN for all requests. This is because we don't have anything built into this workflow to clear the cache in this CDN, and I don't want my content team to have to wait too long to see their changes. I'll take care of that later. + +```javascript +// sample script +addEventListener("fetch", event => { + event.respondWith(handleRequest(event.request)) +}) + +/** + * Fetch and return the request body + * @param {Request} request + */ +async function handleRequest(request) { + // Wrap your script in a try/catch and return the error stack to view error information + try { + /* The request can be modified here before sending it with fetch */ + + const originalUrl = request.url + const url = new URL(request.url) + // we need get the url in order to figure out where to route them + let path = url.pathname + + //secondary domain... + const secDomain = "https://my-new-website.netlify.com" + + if ( + path == "/" || //redirect the home page... + path.indexOf("/webpack") != -1 || + path.indexOf("/common") != -1 || + path.indexOf("/component") != -1 || + path.indexOf("/page-data") != -1 || + path.indexOf("/styles") != -1 || + path.indexOf("/app-") != -1 + ) { + // we need get the url in order to figure out where to route them + request.url = secDomain + path + } + + const response = await fetch(request) + + response.headers.set("Cache-Control", "public, max-age=60") + + return response + } catch (e) { + return new Response(e.stack || e, { status: 500 }) + } +} +``` + +You can now test this whole thing with the unique Stackpath URL that you get (123xyz.stackpathcdn.com). + +Once you are happy with everything, you simply switch your DNS to point to Stackpath. + +That's it—you’re finished! + +If you have any questions about JAMstack or migrating to this technology, reach out! + +## Next Steps + +I encourage you to go ahead and use this technique as the starting point for one of the pages on your own website! You can use [Agility CMS for free](https://agilitycms.com/v3-free-signup-developers?source=devto) to do it. + +## BONUS CONTENT! + +As a companion to this article, I recorded a video that walks you through the steps I took and the different tools involved. I also highlight some of the really neat features of Agility CMS, Gatsby, Netlify, and Stackpath. + +[![Migrating a website to JAMstack with Gatsby](https://res.cloudinary.com/marcomontalbano/image/upload/v1586464859/video_to_markdown/images/youtube--WSIzYKDgJuE-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/embed/WSIzYKDgJuE "Migrating a website to JAMstack with Gatsby") diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-1.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-1.png new file mode 100644 index 0000000000000..d2fb5577091c0 Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-1.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-2.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-2.png new file mode 100644 index 0000000000000..3e39d4759e004 Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-2.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-3.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-3.png new file mode 100644 index 0000000000000..56acad47f7885 Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-3.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-4.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-4.png new file mode 100644 index 0000000000000..8591c472e3d6a Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-4.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-5.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-5.png new file mode 100644 index 0000000000000..f774a63b65e2d Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-5.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-6.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-6.png new file mode 100644 index 0000000000000..db693894bb03a Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-6.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-7.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-7.png new file mode 100644 index 0000000000000..029071e04469c Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-7.png differ diff --git a/docs/blog/2020-04-14-LA-2020-Kim/index.md b/docs/blog/2020-04-14-LA-2020-Kim/index.md new file mode 100644 index 0000000000000..1717d54988795 --- /dev/null +++ b/docs/blog/2020-04-14-LA-2020-Kim/index.md @@ -0,0 +1,23 @@ +--- +title: 'Gatsby Days LA 2020 Video 10: Teaching Web Development to Beginners with Gatsby' +date: 2020-04-14 +author: Greg Thomas +excerpt: "UC Davis grad student Daniel Kim explains why Gatsby offers +a better framework than React for teaching beginning web development +at Gatsby Days LA 2020." +tags: +- gatsby-days +- community +- themes +- contentful +- markdown +- learning-to-code +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +Daniel Kim is a master’s degree student at the University of California, Davis, and the founder of [Bit Project](https://www.bitproject.org/)—a student organization that strives to make technical education accessible to more people. With over 70 developer members, Bit Project has reached more than 3,100 students. The group is working to expand its community through technical workshops, coding bootcamps, and outreach programs. + +Daniel and his team recently decided to launch a new five-week course to teach beginning web development. They considered using React as the basis for the course but found that the extreme customization available with React made it tough for teaching. Instead, they chose Gatsby. Watch Daniel’s presentation from Gatsby Days LA 2020 and learn why hot reloading, routing, themes, and other capabilities made Gatsby the right choice. Then hear how Daniel used Gatsby in each week of the course to teach key web development concepts. + +[![Teaching Web Development to Beginners with Gatsby.js - Daniel Kim - Gatsby Days LA 2020](https://res.cloudinary.com/marcomontalbano/image/upload/v1586358897/video_to_markdown/images/youtube--XQ1hGhIk1IA-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=XQ1hGhIk1IA "Teaching Web Development to Beginners with Gatsby.js - Daniel Kim - Gatsby Days LA 2020") diff --git a/docs/blog/2020-04-14-virtual-gatsby-days-registration/index.md b/docs/blog/2020-04-14-virtual-gatsby-days-registration/index.md new file mode 100644 index 0000000000000..25e1d7dd5bc88 --- /dev/null +++ b/docs/blog/2020-04-14-virtual-gatsby-days-registration/index.md @@ -0,0 +1,22 @@ +--- +title: "Register Now for Virtual Gatsby Days" +date: 2020-04-14 +author: Laci Texter +excerpt: "Our first virtual Gatsby Days is coming June 2-3, 2020. This free remote conference will be live-streamed on YouTube from 9am to noon PST both days, and you can register now!" +tags: + - gatsby-days + - community + - gatsby-cloud +--- + +Back before the world changed so suddenly, our quarterly Gatsby Days was an in-person gathering: A way to assemble the Gatsby community, guest presenters, and Gatsby team for a day long conference and celebration-slash-deep-dive into all things Gatsby. Obviously we can't do things the same way right now, but that's not going to stop us! + +Join us for Virtual Gatsby Days, June 2nd and 3rd: Registration is now open at https://www.gatsbyjs.com/virtual-gatsby-days. + +![Gatsby logo transposed on binary background receding into distance](./virutalGatsbyDays.jpg "Gatsby logo on futuristic binary background") + +Virtual Gatsby Days will continue to focus on the future of modern website development, use cases, and deep dive into topics such as what’s new in the web technology stack. You’ll have the opportunity to participate in a Q&A with Gatsby creators, Kyle Mathews and Sam Bhagwat, hear other Gatsby core team members speak, and connect with the Gatsby community. + +Register for Virtual Gatsby Days, mark your calendars for June 2nd - 3rd and follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with speaker announcements and other developments. + +In case you missed it, we’re still accepting speaker submissions! [Fill out the CFP](https://docs.google.com/forms/d/e/1FAIpQLSfjUpqpmRL18ydo_PmC4jxvPG8xhOlix43KeRHOhUbPp3u7Mw/viewform?usp=sf_link) by April 24th and let us know what you’re thinking about. We’re reviewing submissions as they come in so the earlier you submit, the better. diff --git a/docs/blog/2020-04-14-virtual-gatsby-days-registration/virutalGatsbyDays.jpg b/docs/blog/2020-04-14-virtual-gatsby-days-registration/virutalGatsbyDays.jpg new file mode 100644 index 0000000000000..41447c26fad69 Binary files /dev/null and b/docs/blog/2020-04-14-virtual-gatsby-days-registration/virutalGatsbyDays.jpg differ diff --git a/docs/blog/2020-04-15-LA-2020-estevez/index.md b/docs/blog/2020-04-15-LA-2020-estevez/index.md new file mode 100644 index 0000000000000..92c9230f10388 --- /dev/null +++ b/docs/blog/2020-04-15-LA-2020-estevez/index.md @@ -0,0 +1,20 @@ +--- +title: "Gatsby Days LA 2020 Video 11: Building Accessible Components (Without First Reading Docs for Days)" +date: 2020-04-15 +author: Greg Thomas +excerpt: "New York Times Senior Software Engineer Yuraima Estevez shows how developers can improve the accessibility of websites in three “easy” steps that do not involve days of documentation reading." +tags: + - gatsby-days + - community + - accessibility + - documentation + - diversity-and-inclusion +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +Yuraima Estevez is a senior software engineer tech lead at the New York Times who is passionate about building open source tools and enabling empathetic web development. At Gatsby Days LA 2020, Yuraima focused on accessibility. To realize the power of the web’s universality, developers must build sites that are accessible to people with disabilities. But doing so can be challenging, especially when there is a ton of documentation to sort through. + +Yuraima believes that building accessible components can help streamline progress toward delivering accessible sites. Learn how you can increase accessibility and improve support for assistive technologies as you are building components through three “easy” steps: using semantic HTML whenever possible, employing ARIA (Accessible Rich Internet Applications) attributes, and integrating keyboard navigation capabilities. Adopting this three-step approach also makes much more efficient use of available documentation. + +[![TL;DR for Accessible Components - Yuraima Estevez - Gatsby Days LA 2020](https://res.cloudinary.com/marcomontalbano/image/upload/v1586365251/video_to_markdown/images/youtube--Qu3HuUKLNh8-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=Qu3HuUKLNh8&t=1s "TL;DR for Accessible Components - Yuraima Estevez - Gatsby Days LA 2020") diff --git a/docs/blog/2020-04-15-get-together-by-staying-apart-for-now/index.md b/docs/blog/2020-04-15-get-together-by-staying-apart-for-now/index.md new file mode 100644 index 0000000000000..4f8a21e55ecd0 --- /dev/null +++ b/docs/blog/2020-04-15-get-together-by-staying-apart-for-now/index.md @@ -0,0 +1,35 @@ +--- +title: "Gatsby Community Events and COVID-19" +date: 2020-04-15 +author: Caitlin Cashin +excerpt: "Gatsby is all about our amazing community, but we are also all about safety. So we are asking most sincerely: If there's an in-person Gatsby community meetup on your calendar, please reschedule or figure out a way to hold it remotely. Here are some ways we are doing that ourselves." +tags: + - community + - diversity-and-inclusion + - gatsby-days +--- + +Getting folks together to learn, grow, and connect with each other is the lifeblood of any community, and especially Gatsby’s community. In-person events have always been something that our team has worked to support and encourage. Unfortunately, with the spread of COVID19, those in-person events have become a major health risk. + +We love that you love getting together as much as we do. But in light of the current situation, we ask that you please reschedule all in-person Gatsby community events or find a way to hold them remotely until the World Health Organization and/or your local authorities say it’s safe for people to meet in groups again. + +## What can you do instead? + +The good news is it’s easier than ever to interact with people online and host remote events. Here are some ideas for engaging with the community and sharing your Gatsby expertise while still safely physically, if not socially, distancing: + +1. **Host your own Gatsby webinar.** Invite the folks who usually attend your in-person events to a live stream or online call, and give a Gatsby presentation. +2. **Do some live pair programming.** Get together with one of your Gatsby buddies on your preferred remote interaction tools and stream on Twitch or YouTube. You can also [sign up to pair with someone from the Gatsby team](https://www.gatsbyjs.org/contributing/pair-programming/). You might even make it onto our [Twitch channel](https://www.twitch.tv/gatsbyjs)! +3. **Chat with other community members on the [Gatsby Discord server](https://gatsby.dev/discord).** +4. **Hang out on the [Gatsby Twitch channel](https://www.twitch.tv/gatsbyjs).** Check out our channel for info on our upcoming streams. +5. **Plan to attend [Virtual Gatsby Days](https://www.gatsbyjs.org/blog/2020-04-14-virtual-gatsby-days-registration/)**, June 2nd & 3rd! + +## Some great tools for remote events + +We have the technology! There are lots of free tools and applications you can use to host an awesome remote event. Here are a few that we’ve used at Gatsby: + +- [Lightstream Studio](https://golightstream.com/) for streaming on Twitch and YouTube +- [Zoom](https://zoom.us/) for remote calls and meetings +- [Glitch](https://glitch.com/) for building apps, sharing projects, and learning to code +- [FindCollabs](https://findcollabs.com/) for hosting online hackathons + +We’ve got big plans for the Gatsby community and events coming down the road! You can [follow Gatsby on Twitter](https://twitter.com/gatsbyjs) for up-to-the-minute announcements and updates. So stay tuned, stay healthy, and stay home—so we can _all_ be here to gather together when pandemic restrictions are a thing of the past 💜. diff --git a/docs/blog/author.yaml b/docs/blog/author.yaml index ad866b4ec3982..a65839e41bcd8 100644 --- a/docs/blog/author.yaml +++ b/docs/blog/author.yaml @@ -410,3 +410,7 @@ - id: Debra Combs bio: "Product Manager, Writer, Gamer, and lover of laughter." avatar: avatars/debra-combs.png +- id: Joel Varty + bio: "Dad to teens, HS football coach, president of Agility CMS." + avatar: avatars/joel-varty.jpg + twitter: "@joelvarty" diff --git a/docs/blog/avatars/joel-varty.jpg b/docs/blog/avatars/joel-varty.jpg new file mode 100644 index 0000000000000..d140c70568672 Binary files /dev/null and b/docs/blog/avatars/joel-varty.jpg differ diff --git a/docs/contributing/organize-a-gatsby-event.md b/docs/contributing/organize-a-gatsby-event.md index 6ac7030caa45f..849f66ffc22f5 100644 --- a/docs/contributing/organize-a-gatsby-event.md +++ b/docs/contributing/organize-a-gatsby-event.md @@ -2,4 +2,4 @@ title: Organize a Gatsby Event --- -**IMPORATANT NOTE ON COMMUNITY EVENTS: Promotion and support of Gatsby community events is currently suspended due to COVID-19. Stay tuned for updates on when our community events program will resume.** +**IMPORTANT NOTE ON COMMUNITY EVENTS: Promotion and support of Gatsby community events is currently suspended due to COVID-19. Stay tuned for updates on when our community events program will resume.** diff --git a/docs/contributing/translation/sync-guide.md b/docs/contributing/translation/sync-guide.md index 4e652cc8113a9..ff2a4ae44a1b9 100644 --- a/docs/contributing/translation/sync-guide.md +++ b/docs/contributing/translation/sync-guide.md @@ -2,7 +2,11 @@ title: Keeping Translations Up-to-date --- -Periodically, gatsbybot will update your translation repo to be up-to-date with the current English repo. If there is an update to a page that is already translated, gatsbybot will create a pull request listing the conflicts between translation and the new English content. Resolving these conflicts and merging these pull requests is essential to keeping your translation repo up-to-date. +Every Friday at 6:00 PM PST, gatsbybot will run a sync script on every translation repo to bring them up-to-date with the current English repo. If there is an update to a page that is already translated, gatsbybot will create a pull request listing the conflicts between the translation and the new English content. Resolving these conflicts and merging these pull requests is essential to keeping your translation repo up-to-date. + +## Gatsbybot sync behavior + +If there are existing sync pull requests marked by the `sync` label, gatsbybot will skip the repository until the next time it runs. Because of this, it is important to make sure that you merge sync and conflict pull requests promptly, so that stale translations don't build up. ## Resolving sync pull requests diff --git a/docs/docs/add-custom-webpack-config.md b/docs/docs/add-custom-webpack-config.md index 60a40ee01668d..00c75eebc2a47 100644 --- a/docs/docs/add-custom-webpack-config.md +++ b/docs/docs/add-custom-webpack-config.md @@ -75,6 +75,19 @@ exports.onCreateWebpackConfig = ({ stage, actions }) => { You can always find more information on _resolve_ and other options in the official [Webpack docs](https://webpack.js.org/concepts/). +### Importing non-webpack tools using `yarn` + +Note that using absolute imports only applies to webpack resolutions and will not work for other tools, e.g. eslint or typescript. +But if you are using yarn, then the best practice is to set up your imports in package.json as shown below: + +```js +{ + "dependencies": { + "hooks": "link:./src/hooks", + } +} +``` + ### Modifying the Babel loader You need this if you want to do things like transpile parts of `node_modules`. diff --git a/docs/docs/building-an-e-commerce-site.md b/docs/docs/building-an-e-commerce-site.md index 5eeecc6381b00..bf4bc0d56b7b1 100644 --- a/docs/docs/building-an-e-commerce-site.md +++ b/docs/docs/building-an-e-commerce-site.md @@ -4,7 +4,7 @@ title: Building an E-commerce Site The speed and performance of sites built with Gatsby make it a great tool for building e-commerce sites. There are existing plugins for connecting services like [Shopify](/packages/gatsby-source-shopify/) and [Snipcart](/packages/gatsby-plugin-snipcart/) to Gatsby, and this section contains reference guides to help get things setup. -To see examples of e-commerce sites built with Gatsby, check out the [showcase](/showcase/?filters%5B0%5D=eCommerce). +To see examples of e-commerce sites built with Gatsby, check out the [showcase](/showcase/?filters%5B0%5D=E-commerce). diff --git a/docs/docs/creating-a-source-plugin.md b/docs/docs/creating-a-source-plugin.md index 0af8ac02d0e47..fa905bcb9711b 100644 --- a/docs/docs/creating-a-source-plugin.md +++ b/docs/docs/creating-a-source-plugin.md @@ -4,11 +4,11 @@ title: Creating a Source Plugin Source plugins are essentially out of the box integrations between Gatsby and various third-party systems. -These systems can be CMSs like Contentful or WordPress, other cloud services like Lever and Strava, or your local filesystem -- literally anything that has an API. Currently, Gatsby has [over 300 source plugins](/plugins/?=gatsby-source). +These systems can be CMSs like Contentful or WordPress, other cloud services like Lever and Strava, or your local filesystem -- literally anything that has an API. Currently, Gatsby has [over 400 source plugins](/plugins/?=gatsby-source). Once a source plugin brings data into Gatsby's system, it can be transformed further with **transformer plugins**. For step-by-step examples of how to create source and transformer plugins, check out the Gatsby [tutorials section](/tutorial/plugin-and-theme-tutorials/). -## What do source plugins do? +## Overview of a source plugin At a high-level, a source plugin: @@ -18,26 +18,59 @@ At a high-level, a source plugin: - Links nodes & creates relationships between them. - Lets Gatsby know when nodes are finished sourcing so it can move on to processing them. -## What does the code look like? +A source plugin is a regular npm package. It has a `package.json` file, with optional dependencies, as well as a [`gatsby-node.js`](/docs/api-files-gatsby-node) file where you implement Gatsby's [Node APIs](/docs/node-apis/). Read more about [files Gatsby looks for in a plugin](/docs/files-gatsby-looks-for-in-a-plugin/) or [creating a generic plugin](/docs/creating-a-generic-plugin). -A source plugin is a regular NPM package. It has a `package.json` file with optional -dependencies as well as a [`gatsby-node.js`](/docs/api-files-gatsby-node) file where you implement Gatsby's [Node -APIs](/docs/node-apis/). Read more about [Files Gatsby Looks for in a Plugin](/docs/files-gatsby-looks-for-in-a-plugin/). +## Implementing features for source plugins -Gatsby's minimum supported Node.js version is Node 8 and as it's common to want to use more modern Node.js and JavaScript syntax, many plugins write code in a -source directory and compile the code. All plugins maintained in the Gatsby repo -follow this pattern. +Key features that are often built into source plugins are covered in this guide to help explain Gatsby specific helpers and APIs, independent of the source the data is coming from. -Your `gatsby-node.js` should look something like: +> You can see examples of all the features implemented in this guide (sourcing data, caching, live data synchronization, and remote image optimization) **in the working example repository** for [creating source plugins](https://github.com/gatsbyjs/gatsby/tree/master/examples/creating-source-plugins) which contains a local server you can run to test with an example source plugin. -```javascript:title=gatsby-node.js +### Sourcing data and creating nodes + +All source plugins must fetch data and create nodes from that data. By fetching data and creating nodes at [build time](/docs/glossary#build), Gatsby can make the data available as static assets instead of having to fetch it at [runtime](/docs/glossary#runtime). This happens in the [`sourceNodes` lifecycle](/docs/node-apis/#sourceNodes) with the [`createNode` action](/docs/actions/#createNode). + +This example—taken from [the `sourceNodes` API docs](/docs/node-apis/#sourceNodes)—shows how to create a single node from hardcoded data: + +```javascript:title=source-plugin/gatsby-node.js +exports.sourceNodes = ({ actions, createNodeId, createContentDigest }) => { + const { createNode } = actions + + // Data can come from anywhere, but for now create it manually + const myData = { + key: 123, + foo: `The foo field of my node`, + bar: `Baz`, + } + + const nodeContent = JSON.stringify(myData) + + const nodeMeta = { + id: createNodeId(`my-data-${myData.key}`), + parent: null, + children: [], + internal: { + type: `MyNodeType`, + mediaType: `text/html`, + content: nodeContent, + contentDigest: createContentDigest(myData), + }, + } + + const node = Object.assign({}, myData, nodeMeta) + createNode(node) +} +``` + +Source plugins follow the same pattern, the only difference is that data comes from other sources. Plugins can leverage Node.js built-in functions like `http.get`, libraries like `node-fetch` or `axios`, or even fully-featured GraphQL clients to fetch data. With data being returned from a remote location, the plugin code can loop through and create nodes programmatically: + +```javascript:title=source-plugin/gatsby-node.js exports.sourceNodes = async ({ actions }) => { const { createNode } = actions - // Create nodes here, generally by downloading data - // from a remote API. + // Download data from a remote API. const data = await fetch(REMOTE_API) - // Process data into nodes. + // Process data and create nodes.using a custom processDatum function data.forEach(datum => createNode(processDatum(datum))) // You're done, return. @@ -45,120 +78,409 @@ exports.sourceNodes = async ({ actions }) => { } ``` -Peruse the [`sourceNodes`](/docs/node-apis/#sourceNodes) and -[`createNode`](/docs/actions/#createNode) docs for detailed -documentation on implementing those APIs. +The [`createNode`](/docs/actions/#createNode) function is a Gatsby specific action. `createNode` is used to create the nodes that Gatsby tracks and makes available for querying with GraphQL. -### Transforming data received from remote sources +_Note: **Be aware of asynchronous operations!** Because fetching data is an asynchronous task, you need to make sure you `await` data coming from remote sources, return a Promise, or return the callback (the 3rd parameter available in lifecycle APIs) from `sourceNodes`. If you don't, Gatsby will continue on in the build process, before nodes are finished being created. This can result in your nodes not ending up in the generated schema at compilation time, or the process could hang while waiting for an indication that it's finished. You can read more in the [Debugging Asynchronous Lifecycle APIs guide](/docs/debugging-async-lifecycles/)._ -Each node created by the filesystem source plugin includes the -raw content of the file and its _media type_. +### Caching data between runs -[A **media type**](https://en.wikipedia.org/wiki/Media_type) (also **MIME type** -and **content type**) is an official way to identify the format of -files/content that is transmitted on the internet, e.g. over HTTP or through -email. You might be familiar with other media types such as -`application/javascript`, `application/pdf`, `audio/mpeg`, `text/html`, -`text/plain`, `image/jpeg`, etc. +Some operations like fetching data from an endpoint can be performance heavy or time-intensive. In order to improve the experience of developing with your source plugin, you can leverage the Gatsby cache to store data between runs of `gatsby develop` or `gatsby build`. -Each source plugin is responsible for setting the media type for the nodes they -create. This way, source and transformer plugins can work together easily. +You access the `cache` in Gatsby Node APIs and use the `set` and `get` functions to store and retrieve data as JSON objects. -This is not a required field -- if it's not provided, Gatsby will [infer](/docs/glossary#inference) the type from data that is sent -- but it's the way for source plugins to indicate to -transformers that there is "raw" data that can still be further processed. It -also allows plugins to remain small and focused. Source plugins don't have to have -opinions on how to transform their data: they can set the `mediaType` and -push that responsibility to transformer plugins, instead. +```javascript:title=source-plugin/gatsby-node.js +exports.onPostBuild = async ({ cache }) => { + await cache.set(`key`, `value`) + const cachedValue = await cache.get(`key`) + console.log(cachedValue) // logs `value` +} +``` -For example, it's common for services to allow you to add content in -Markdown format. If you pull that Markdown into Gatsby and create a new node, what -then? How would a user of your source plugin convert that Markdown into HTML -they can use in their site? You would create a -node for the Markdown content and set its `mediaType` as `text/markdown` and the -various Gatsby Markdown transformer plugins would see your node and transform it -into HTML. +The above snippet shows a contrived example for the `cache`, but it can be used in more sophisticated cases to reduce the time it takes to run your plugin. For example, by caching a timestamp, you can use it to fetch solely the data that has been updated since the last time data was fetched from the source: -This loose coupling between the data source and the transformer plugins allow Gatsby site builders to assemble complex data transformation pipelines with -little work on their (and your (the source plugin author)) part. +```javascript:title=source-plugin/gatsby-node.js +exports.sourceNodes = async ({ cache }) => { + // get the last timestamp from the cache + const lastFetched = await cache.get(`timestamp`) -## Getting helper functions + // pull data from some remote source using cached data as an option in the request + const data = await fetch( + `https://remotedatasource.com/posts?lastUpdated=${lastFetched}` + ) + // ... +} -[`gatsby-node-helpers`](https://github.com/angeloashmore/gatsby-node-helpers), -a community-made NPM package, can help when writing source plugins. This -package provides a set of helper functions to generate Node objects with the -required fields. This includes automatically generating fields like node IDs -and the `contentDigest` MD5 hash, keeping your code focused on data gathering, -not boilerplate. +exports.onPostBuild = async ({ cache }) => { + // set a timestamp at the end of the build + await cache.set(`timestamp`, Date.now()) +} +``` -## Gotcha: don't forget to return! +> In addition to the cache, plugins can save metadata to the [internal Redux store](/docs/data-storage-redux/) with `setPluginStatus`. -After your plugin is finished sourcing nodes, it should either return a Promise or use the callback (3rd parameter) to report back to Gatsby when `sourceNodes` is fully executed. If a Promise or callback isn't returned, Gatsby will continue on in the build process, before nodes are finished being created. Without the necessary return statement your nodes might not end up in the generated schema at compilation time, or the process will hang while waiting for an indication that it's finished. +This can reduce the time it takes repeated data fetching operations to run if you are pulling in large amounts of data for your plugin. Existing plugins like [`gatsby-source-contentful`](https://github.com/gatsbyjs/gatsby/blob/master/packages/gatsby-source-contentful/src/gatsby-node.js) generate a token that is sent with each request to only return new data. -## Advanced +You can read more about the cache API, other types of plugins that leverage the cache, and example open source plugins that use the cache in the [build caching guide](/docs/build-caching). ### Adding relationships between nodes Gatsby source plugins not only create nodes, they also create relationships between nodes that are exposed to GraphQL queries. -There are two ways of adding node relationships in Gatsby: (1) transformations (parent-child) or (2) foreign-key based. - -#### Option 1: transformation relationships +There are two types of node relationships in Gatsby: (1) foreign-key based and (2) transformations (parent-child). -An example of a transformation relationship is the `gatsby-transformer-remark` plugin, which transforms a parent `File` node's markdown string into a `MarkdownRemark` node. The Remark transformer plugin adds its newly created child node as a child of the parent node using the action [`createParentChildLink`](/docs/actions/#createParentChildLink). Transformation relationships are used when a new node is _completely_ derived from a single parent node. E.g. the markdown node is derived from the parent `File` node and wouldn't ever exist if the parent `File` node hadn't been created. +#### Option 1: foreign-key relationships -Because all children nodes are derived from their parent, when a parent node is deleted or changed, Gatsby deletes all of the child nodes (and their child nodes, and so on) with the expectation that they'll be recreated again by transformer plugins. This is done to ensure there are no nodes left over that were derived from older versions of data but shouldn't exist any longer. +An example of a foreign-key relationship would be a `Post` type (like a blog post) that has an `Author`. -_Creating the transformation relationship_ +In this relationship, each object is a distinct entity that exists whether or not the other does. They could each be queried individually. -In order to create a parent/child relationship, when calling `createNode` for the child node, the new node object that is passed in should have a `parent` key with the value set to the parent node's `id`. After this, call the `createParentChildLink` function exported inside `actions`. - -_Examples_ +```graphql +post { + id + title +} +author { + id + name +} +``` -[Here's the above example](https://github.com/gatsbyjs/gatsby/blob/72077527b4acd3f2109ed5a2fcb780cddefee35a/packages/gatsby-transformer-remark/src/on-node-create.js#L39-L67) from the `gatsby-transformer-remark` source plugin. +Each type has independent schemas and field(s) on that reference the other entity -- in this case the `Post` would have an `Author`, and the `Author` might have `Post`s. The API of a service that allows complex object modelling, for example a CMS, will often allow users to add relationships between entities and expose them through the API. This same relationship can be represented by your schema. + +```graphql +post { + id + title + // highlight-start + author { + id + name + } + // highlight-end +} +author { + id + name + // highlight-start + posts { + id + title + } + // highlight-end +} +``` -[Here's another example](https://github.com/gatsbyjs/gatsby/blob/1fb19f9ad16618acdac7eda33d295d8ceba7f393/packages/gatsby-transformer-sharp/src/on-node-create.js#L3-L25) from the `gatsby-transformer-sharp` source plugin. +When an object node is deleted, Gatsby _does not_ delete any referenced entities. When using foreign-key references, it's a source plugin's responsibility to clean up any dangling entity references. -#### Option 2: foreign-key relationships +##### Creating the relationship -An example of a foreign-key relationship would be a Post that has an Author. +Suppose you want to create a relationship between `Post`s and `Author`s in order to query the `author` field on a post: + +```graphql +query { + post { + id + // highlight-start + author { + id + name + } + // highlight-end + } +} +``` -In this relationship, each object is a distinct entity that exists whether or not the other does, with independent schemas, and field(s) on each entity that reference the other entity -- in this case the Post would have an Author, and the Author might have Posts. The API of a service that allows complex object modelling, for example a CMS, will often allow users to add relationships between entities and expose them through the API. +For Gatsby to automatically infer a relationship, you need to create a field called `author___NODE` on the Post object to hold the relationship to Authors before you create the node. The value of this field should be the node ID of the Author. -When an object node is deleted, Gatsby _does not_ delete any referenced entities. When using foreign-key references, it's a source plugin's responsibility to clean up any dangling entity references. +```javascript:title=source-plugin/gatsby-node.js +exports.sourceNodes = ({ actions, createContentDigest }) => { + const { createNode } = actions + createNode({ + // Data for the Post node + author___NODE: ``, // highlight-line + // Required fields + id: `a-node-id`, + parent: null + children: [], + internal: { + type: `post`, + contentDigest: createContentDigest(fieldData), + } + }) +} +``` -##### Creating the relationship +For a stricter GraphQL schema, you can specify the exact field and value to link nodes using schema customization APIs. -Suppose you want to create a relationship between Posts and Authors, and you want to call the field `author`. +```javascript:title=source-plugin/gatsby-node.js +exports.sourceNodes = ({ actions, createContentDigest }) => { + const { createNode } = actions + createNode({ + // Data for the Post node + // highlight-start + author: { + name: `Jay Gatsby`, + }, + // highlight-end + // Required fields + id: `a-node-id`, + parent: null + children: [], + internal: { + type: `post`, + contentDigest: createContentDigest(fieldData), + } + }) +} -Before you pass the Post object and Author object into `createNode` and create the respective nodes, you need to create a field called `author___NODE` on the Post object to hold the relationship to Authors. The value of this field should be the node ID of the Author. +exports.createSchemaCustomization = ({ actions }) => { + const { createTypes } = actions + createTypes(` + type Post implements Node { + id: ID! + # create a relationship between Post and the File nodes for optimized images + author: Author @link(from: "author.name" by: "name") // highlight-line + # ... other fields + }`) +} +``` ##### Creating the reverse relationship -It's often convenient for querying to add to the schema backwards references. For example, you might want to query the Author of a Post but you might also want to query all the posts an author has written. +It's often convenient for querying to add to the schema backwards references. For example, you might want to query the author of a post, but you might also want to query all the posts an author has written. -If you want to call this field on `Author` `posts`, you would create a field called `posts___NODE` to hold the relationship to Posts. The value of this field should be an array of Post IDs. +If you want to call a field to access the author on the `Post` nodes using the inference method, you would create a field called `posts___NODE` to hold the relationship to posts. The value of this field should be an array of `Post` IDs. Here's an example from the [WordPress source plugin](https://github.com/gatsbyjs/gatsby/blob/1fb19f9ad16618acdac7eda33d295d8ceba7f393/packages/gatsby-source-wordpress/src/normalize.js#L178-L189). +With schema customization, you would add the `@link` directive to your Author type. The `@link` directive will look for an ID on the `post` field of the Author nodes, which can be added when the Author nodes are created. + +```javascript:title=source-plugin/gatsby-node.js +exports.createSchemaCustomization = ({ actions }) => { + const { createTypes } = actions + createTypes(` + type Post implements Node { + id: ID! + # create a relationship between Post and the File nodes for optimized images + author: Author @link(from: "author.name" by: "name") // highlight-line + # ... other fields + } + + type Author implements Node { + name: String! + post: Post @link // highlight-line + }`) +} +``` + +You can read more about connecting foreign key fields with schema customization in the guide on [customizing the GraphQL schema](/docs/schema-customization/#foreign-key-fields). + +#### Option 2: transformation relationships + +When a node is _completely_ derived from another node you'll want to use a transformation relationship. An example that is common in source plugins is for transforming File nodes from remote sources, e.g. images. You can read about this use case in the section below on [sourcing images from remote locations](/docs/creating-a-source-plugin/#sourcing-images-from-remote-locations). + +You can find more information about transformation relationships in the [creating a transformer plugin guide](/docs/creating-a-transformer-plugin/#creating-the-transformer-relationship). + #### Union types -When creating fields linking to an array of nodes, if the array of IDs are all of the same type, the relationship field that is created will be of this type. If the linked nodes are of different types; the field will turn into a union type of all types that are linked. See the [GraphQL documentation on how to query union types](https://graphql.org/learn/schema/#union-types). +For either type of relationship you can link a field to an array of nodes. If the array of IDs all correspond to nodes of the same type, the relationship field that is created will be of this type. If the linked nodes are of different types the field will turn into a union type of all types that are linked. See the [GraphQL documentation on how to query union types](https://graphql.org/learn/schema/#union-types). + +### Working with data received from remote sources + +#### Setting media and MIME types + +Each node created by the filesystem source plugin includes the raw content of the file and its _media type_. + +[A **media type**](https://en.wikipedia.org/wiki/Media_type) (also **MIME type** and **content type**) is an official way to identify the format of files/content that are transmitted via the internet, e.g. over HTTP or through email. You might be familiar with other media types such as `application/javascript`, `audio/mpeg`, `text/html`, etc. + +Each source plugin is responsible for setting the media type for the nodes it creates. This way, source and transformer plugins can work together easily. + +This is not a required field -- if it's not provided, Gatsby will [infer](/docs/glossary#inference) the type from data that is sent -- but it's how source plugins indicate to transformers that there is "raw" data the transformer can further process. + +It also allows plugins to remain small and focused. Source plugins don't have to have opinions on how to transform their data: they can set the `mediaType` and push that responsibility to transformer plugins instead. + +For example, it's common for services to allow you to add content in Markdown format. If you pull that Markdown into Gatsby and create a new node, what then? How would a user of your source plugin convert that Markdown into HTML they can use in their site? You would create a node for the Markdown content and set its `mediaType` as `text/markdown` and the various Gatsby Markdown transformer plugins would see your node and transform it into HTML. -#### Further specification +This loose coupling between the data source and the transformer plugins allow Gatsby site builders to assemble complex data transformation pipelines with little work on their (and your (the source plugin author)) part. -See -[_Node Link_](/docs/api-specification/) in the API Specification concepts -section for more info. +#### Sourcing and optimizing images from remote locations + +A common use case for source plugins is pulling images from a remote location and optimizing them for use with [Gatsby Image](/packages/gatsby-image/). An API may return a URL for an image on a CDN, which could be further optimized by Gatsby at build time. + +This can be achieved by the following steps: + +1. Install `gatsby-source-filesystem` as a dependency in your source plugin: + +``` +npm install gatsby-source-filesystem +``` + +2. Create File nodes using the `createRemoteFileNode` function exported by `gatsby-source-filesystem`: + +```javascript:title=source-plugin/gatsby-node.js +const { createRemoteFileNode } = require(`gatsby-source-filesystem`) + +exports.onCreateNode = async ({ + actions: { createNode }, + getCache, + createNodeId, + node, +}) => { + // because onCreateNode is called for all nodes, verify that you are only running this code on nodes created by your plugin + if (node.internal.type === `your-source-node-type`) { + // create a FileNode in Gatsby that gatsby-transformer-sharp will create optimized images for + const fileNode = await createRemoteFileNode({ + // the url of the remote image to generate a node for + url: node.imgUrl, + getCache, + createNode, + createNodeId, + parentNodeId: node.id, + }) + } +} +``` + +3. Add the ID of the new File node to your source plugin's node. + +```javascript:title=source-plugin/gatsby-node.js +const { createRemoteFileNode } = require(`gatsby-source-filesystem`) + +exports.onCreateNode = async ({ + actions: { createNode }, + getCache, + createNodeId, + node, +}) => { + // because onCreateNode is called for all nodes, verify that you are only running this code on nodes created by your plugin + if (node.internal.type === `your-source-node-type`) { + // create a FileNode in Gatsby that gatsby-transformer-sharp will create optimized images for + const fileNode = await createRemoteFileNode({ + // the url of the remote image to generate a node for + url: node.imgUrl, + getCache, + createNode, + createNodeId, + parentNodeId: node.id, + }) + + // highlight-start + if (fileNode) { + // with schemaCustomization: add a field `remoteImage` to your source plugin's node from the File node + node.remoteImage = fileNode.id + + // OR with inference: link your source plugin's node to the File node without schemaCustomization like this, but creates a less sturdy schema + node.remoteImage___NODE = fileNode.id + } + // highlight-end + } +} +``` + +Attaching `fileNode.id` to `remoteImage___NODE` will rely on Gatsby's [inference](/docs/glossary/#inference) of the GraphQL schema to create a new field `remoteImage` as a relationship between the nodes. This is done automatically. For a sturdier schema, you can relate them using [`schemaCustomization` APIs](/docs/node-apis/#createSchemaCustomization) by adding the `fileNode.id` to a field that you reference when you `createTypes`: + +```javascript:title=source-plugin/gatsby-node.js +exports.createSchemaCustomization = ({ actions }) => { + const { createTypes } = actions + createTypes(` + type YourSourceType implements Node { + id: ID! + # create a relationship between YourSourceType and the File nodes for optimized images + remoteImage: File @link // highlight-line + }`) +} +``` + +4. Verify that `gatsby-plugin-sharp` and `gatsby-transformer-sharp` are included in the site that is using the plugin: + +```javascript:title=gatsby-config.js +module.exports = { + plugins: [ + // loads the source-plugin + `your-source-plugin`, + // required to generate optimized images + `gatsby-plugin-sharp`, + `gatsby-transformer-sharp`, + ], +} +``` + +Then, the sharp plugins will automatically transform the File nodes created by `createRemoteFileNode` in `your-source-plugin` (which have supported image extensions like .jpg or .png). You can then query for the `remoteImage` field on your source type: + +```graphql +query { + yourSourceType { + id + remoteImage { + childImageSharp { + # fluid or fixed fields for optimzed images + } + } + } +} +``` ### Improve plugin developer experience by enabling faster sync -One tip to improve the development experience of using a plugin is to reduce the time it takes to sync between Gatsby and the data source. There are two approaches for doing this: +One challenge when developing locally is that a developer might make modifications in a remote data source, like a CMS, and then want to see how it looks in the local environment. Typically they will have to restart the `gatsby develop` server to see changes. In order to improve the development experience of using a plugin, you can reduce the time it takes to sync between Gatsby and the data source by enabling faster synchronization of data changes. There are two approaches for doing this: + +- **Proactively fetch updates**. You can avoid having to restart the `gatsby develop` server by proactively fetching updates from the remote server. For example, [gatsby-source-sanity](https://github.com/sanity-io/gatsby-source-sanity) listens to changes to Sanity content when `watchMode` is enabled and pulls them into the Gatsby develop server. The [example source plugin repository](https://github.com/gatsbyjs/gatsby/tree/master/examples/creating-source-plugins) uses GraphQL subscriptions to listen for changes and update data. +- **Add event-based sync**. Some data sources keep event logs and are able to return a list of objects modified since a given time. If you're building a source plugin, you can store the last time you fetched data using the [cache](/docs/creating-a-source-plugin/#caching-data-between-runs) or [`setPluginStatus`](/docs/actions/#setPluginStatus) and then only sync down nodes that have been modified since that time. [gatsby-source-contentful](https://github.com/gatsbyjs/gatsby/tree/master/packages/gatsby-source-contentful) is an example of a source plugin that does this. + +If possible, the proactive listener approach creates the best experience if existing APIs in the data source can support it (or you have access to build support into the data source). + +The code to support this behavior looks like this: + +```javascript:title=source-plugin/gatsby-node.js +exports.sourceNodes = async ({ actions }, pluginOptions) => { + const { createNode, touchNode, deleteNode } = actions + + // highlight-start + // touch nodes to ensure they aren't garbage collected + getNodesByType(`YourSourceType`).forEach(node => touchNode({ nodeId: node.id })) + + // ensure a plugin is in a preview mode and/or supports listening + if (pluginOptions.preview) { + const subscription = await subscription(SUBSCRIPTION_TO_WEBSOCKET) + subscription.subscribe(({ data: newData }) => { + newData.forEach(newDatum => { + switch (newDatum.status) { + case "deleted": + deleteNode({ + node: getNode(createNodeId(`YourSourceType-${newDatum.uuid}`)), + }) + break + case "created": + case "updated": + default: + // created and updated can be handled by the same code path + // the post's id is presumed to stay constant (or can be inferred) + createNode(processDatum(newDatum)) + break + ) + } + }) + } + // highlight-end + + const data = await client.query(QUERY_TO_API) + + // Process data and create nodes.using a custom processDatum function + data.forEach(datum => createNode(processDatum(datum))) + + // You're done, return. + return +} +``` + +_Note: This is pseudo code to illustrate the logic and concept of how these plugins function, you can see an example in the [creating source plugins](https://github.com/gatsbyjs/gatsby/tree/master/examples/creating-source-plugins) repository._ + +Because the code in `sourceNodes` is reinvoked when changes in the data source occur, a few steps need to be taken to ensure that Gatsby is tracking the existing nodes as well as the new data. A first step is ensuring that the existing nodes created are not garbage collected which is done by "touching" the nodes with the [`touchNode` action](/docs/actions/#touchNode). -- **Add event-based sync**. Some data sources keep event logs and are able to return a list of objects modified since a given time. If you're building a source plugin, you can store - the last time you fetched data using - [`setPluginStatus`](/docs/actions/#setPluginStatus) and then only sync down nodes that have been modified since that time. [gatsby-source-contentful](https://github.com/gatsbyjs/gatsby/tree/master/packages/gatsby-source-contentful) is an example of a source plugin that does this. -- **Proactively fetch updates**. One challenge when developing locally is that a developer might make modifications in a remote data source, like a CMS, and then want to see how it looks in the local environment. Typically they will have to restart the `gatsby develop` server to see changes. This can be avoided if your source plugin knows to proactively fetch updates from the remote server. For example, [gatsby-source-sanity](https://github.com/sanity-io/gatsby-source-sanity), listens to changes to Sanity content when `watchMode` is enabled and pulls them into the Gatsby develop server. +Then the new data needs to be pulled in via a live update like a websocket (in the example above with a subscription). The new data needs to have some information attached that dictates whether the data was created, updated, or deleted; that way, when it is processed, a new node can be created/updated (with `createNode`) or deleted (with `deleteNode`). In the example above that information is coming from `newDatum.status`. ## Additional resources -- Tutorial: [Creating a Pixabay Image Source Plugin](/tutorial/pixabay-source-plugin-tutorial/) +- Working example repository on [creating source plugins](https://github.com/gatsbyjs/gatsby/tree/master/examples/creating-source-plugins) with the features in this guide implemented +- Tutorial on [Creating a Pixabay Image Source Plugin](/tutorial/pixabay-source-plugin-tutorial/) +- [`gatsby-node-helpers`](https://github.com/angeloashmore/gatsby-node-helpers), a community-made npm package with helper functions to generate Node objects with required fields like IDs and the `contentDigest` MD5 hash. diff --git a/docs/docs/creating-a-transformer-plugin.md b/docs/docs/creating-a-transformer-plugin.md index 5ec01d2a7945a..19358dc4989eb 100644 --- a/docs/docs/creating-a-transformer-plugin.md +++ b/docs/docs/creating-a-transformer-plugin.md @@ -102,6 +102,8 @@ Now you have a `File` node to work with: Now, transform the newly created `File` nodes by hooking into the `onCreateNode` API in `gatsby-node.js`. +#### Convert yaml into JSON for storage in Gatsby nodes + If you're following along in an example project, install the following packages: ```shell @@ -137,41 +139,67 @@ File content: Parsed YAML content: -```javascript -;[ +```json +[ { - id: "Jane Doe", - bio: "Developer based in Somewhere, USA", + "id": "Jane Doe", + "bio": "Developer based in Somewhere, USA" }, { - id: "John Smith", - bio: "Developer based in Maintown, USA", - }, + "id": "John Smith", + "bio": "Developer based in Maintown, USA" + } ] ``` Now you'll write a helper function to transform the parsed YAML content into new Gatsby nodes: -```javascript +```javascript:title=gatsby-node.js function transformObject(obj, id, type) { const yamlNode = { ...obj, id, children: [], - parent: node.id, + parent: null, internal: { contentDigest: createContentDigest(obj), type, }, } createNode(yamlNode) - createParentChildLink({ parent: node, child: yamlNode }) } ``` Above, you create a `yamlNode` object with the shape expected by the [`createNode` action](/docs/actions/#createNode). -You then create a link between the parent node (file) and the child node (yaml content). +#### Creating the transformer relationship + +You then need to create a link between the parent node (file) and the child node (yaml content) using the `createParentChildLink` function after adding the parent node's id to the `yamlNode`: + +```javascript:title=gatsby-node.js +function transformObject(obj, id, type) { + const yamlNode = { + ...obj, + id, + children: [], + parent: node.id, // highlight-line + internal: { + contentDigest: createContentDigest(obj), + type, + }, + } + createNode(yamlNode) + createParentChildLink({ parent: node, child: yamlNode }) // highlight-line +} +``` + +Another example of a transformation relationship is the `gatsby-source-filesystem` plugin used with the `gatsby-transformer-remark` plugin. This combination transforms a parent `File` node's markdown string into a `MarkdownRemark` node. The remark transformer plugin adds its newly created child node as a child of the parent node using the action [`createParentChildLink`](/docs/actions/#createParentChildLink). Transformation relationships like this are used when a new node is _completely_ derived from a single parent node. E.g. the markdown node is derived from the parent `File` node and would not exist if the parent `File` node hadn't been created. + +Because all children nodes are derived from their parent, when a parent node is deleted or changed, Gatsby deletes all of the child nodes (and their child nodes, and so on). Gatsby does so with the expectation that they'll be recreated again by transformer plugins. This is done to ensure there are no nodes left over that were derived from older versions of data but should no longer exist. + +_For examples of other plugins creating transformation relationships, you can see the [`gatsby-transformer-remark` plugin](https://github.com/gatsbyjs/gatsby/blob/72077527b4acd3f2109ed5a2fcb780cddefee35a/packages/gatsby-transformer-remark/src/on-node-create.js#L39-L67) (from the above example) or the [`gatsby-transformer-sharp` plugin](https://github.com/gatsbyjs/gatsby/blob/1fb19f9ad16618acdac7eda33d295d8ceba7f393/packages/gatsby-transformer-sharp/src/on-node-create.js#L3-L25)._ + +#### Create new nodes from the derived data In your updated `gatsby-node.js`, you'll then iterate through the parsed YAML content, using the helper function to transform each into a new node: @@ -227,6 +255,8 @@ async function onCreateNode({ exports.onCreateNode = onCreateNode ``` +#### Query for the transformed data + Now you can query for your new nodes containing our transformed YAML data: ```graphql diff --git a/docs/docs/deploying-to-firebase.md b/docs/docs/deploying-to-firebase.md index 2fb8a440705ab..d0d159489f34a 100644 --- a/docs/docs/deploying-to-firebase.md +++ b/docs/docs/deploying-to-firebase.md @@ -47,61 +47,61 @@ In this guide, you will learn how to deploy your Gatsby site to Firebase Hosting 1. Update the `firebase.json` with the following cache settings -```json -{ - "hosting": { - "public": "public", - "ignore": ["firebase.json", "**/.*", "**/node_modules/**"], - "headers": [ - { - "source": "**/*", - "headers": [ - { - "key": "cache-control", - "value": "cache-control: public, max-age=0, must-revalidate" - } - ] - }, - { - "source": "static/**", - "headers": [ - { - "key": "cache-control", - "value": "public, max-age=31536000, immutable" - } - ] - }, - { - "source": "**/*.@(css|js)", - "headers": [ - { - "key": "cache-control", - "value": "public, max-age=31536000, immutable" - } - ] - }, - { - "source": "sw.js", - "headers": [ - { - "key": "cache-control", - "value": "cache-control: public, max-age=0, must-revalidate" - } - ] - }, - { - "source": "page-data/**", - "headers": [ - { - "key": "cache-control", - "value": "cache-control: public, max-age=0, must-revalidate" - } - ] - } - ] - } -} -``` + ```json + { + "hosting": { + "public": "public", + "ignore": ["firebase.json", "**/.*", "**/node_modules/**"], + "headers": [ + { + "source": "**/*", + "headers": [ + { + "key": "cache-control", + "value": "cache-control: public, max-age=0, must-revalidate" + } + ] + }, + { + "source": "static/**", + "headers": [ + { + "key": "cache-control", + "value": "public, max-age=31536000, immutable" + } + ] + }, + { + "source": "**/*.@(css|js)", + "headers": [ + { + "key": "cache-control", + "value": "public, max-age=31536000, immutable" + } + ] + }, + { + "source": "sw.js", + "headers": [ + { + "key": "cache-control", + "value": "cache-control: public, max-age=0, must-revalidate" + } + ] + }, + { + "source": "page-data/**", + "headers": [ + { + "key": "cache-control", + "value": "cache-control: public, max-age=0, must-revalidate" + } + ] + } + ] + } + } + ``` 1. Prepare your site for deployment by running `gatsby build`. This generates a publishable version of your site in the `public` folder. diff --git a/docs/docs/glossary.md b/docs/docs/glossary.md index 52b1fc0849c10..d2dbb5c97a0ae 100644 --- a/docs/docs/glossary.md +++ b/docs/docs/glossary.md @@ -220,9 +220,9 @@ A way of writing HTML content with plain text, using special characters to denot ## N -### NPM +### [npm](/docs/glossary/npm) -[Node](#node) [Package](#package) Manager. Allows you to install and update other packages that your project depends on. [Gatsby](#gatsby) and [React](#react) are examples of your project's dependencies. See also: [Yarn](#yarn). +[Node](#node) [package](#package) manager. Allows you to install and update other packages that your project depends on. [Gatsby](#gatsby) and [React](#react) are examples of your project's dependencies. See also: [Yarn](#yarn). ### Node diff --git a/docs/docs/glossary/npm.md b/docs/docs/glossary/npm.md new file mode 100644 index 0000000000000..f6caff679440f --- /dev/null +++ b/docs/docs/glossary/npm.md @@ -0,0 +1,59 @@ +--- +title: npm or Node package manager +disableTableOfContents: true +--- + +Learn what _npm_ is, how to use it, and how it fits in to the Gatsby ecosystem. + +## What is npm? + + + npm +, or Node package manager, is the default package manager for the [Node.js](/docs/glossary/node) JavaScript runtime. It lets you install and update libraries and frameworks (dependencies) for Node-based projects, and interact with the npm Registry. You'll use npm to install and upgrade Gatsby and its plugins. + +npm is a [command line](/docs/glossary#command-line) tool. You'll need Terminal (Mac, Linux) or Command Prompt (Windows) in order to run its commands. To use one of npm's features, type `npm ` . For example, `npm help` displays a list of available features, including `install`, `uninstall`, `update`, and `search`. + +npm is installed alongside Node during the default [installation process](/tutorial/part-zero/#install-nodejs-for-your-appropriate-operating-system). You don't need to take any additional steps to add it to your environment. + +### Using npm to install Gatsby + +You'll need to install Gatsby globally to use Gatsby CLI commands such as `gatsby new`. To do so, use `npm install` with the `--global` or `-g` flag. + +```shell +npm install -g gatsby-cli +``` + +Once the installation completes, you can run `gatsby new my-project` to create a new Gatsby project. + +### Using npx to install Gatsby + +> **Note:** `npx` requires npm version 5.2 or later. If you've installed the latest versions of Node and npm, you should also have npx. Otherwise, you should upgrade Node and/or npm. + +You can also use [npx](https://www.npmjs.com/package/npx) to install Gatsby. npx ships with npm. It allows you to install a package and run a command in one step. For example, instead of running `npm install -g gatsby-cli` then `gatsby new my-project`, you could use the following command. + +```shell +npx gatsby new my-project +``` + +This will download and install the latest version of Gatsby, then create a new Gatsby project in the `my-project` folder. Choosing this method will not make the Gatsby CLI globally available, however. If you install Gatsby using npx, you'll need to use `npx gatsby` or `npm run` to execute Gatsby commands, e.g.: `npx gatsby develop` or `npm run develop`. + +### Using npm to install Gatsby plugins + +Gatsby has a robust collection of [plugins](/plugins/) that add functionality or data sourcing to your Gatsby sites. Adding a plugin as a project dependency uses the same process as installing Gatsby itself. Use `npm install `. To add the [gatsby-source-filesystem](/packages/gatsby-source-filesystem), plugin, for example, you'd use the following command. + +```shell +npm install gatsby-source-filesystem +``` + +> **Note:** Use `npm install` to add plugins, even if you installed Gatsby using npx. + +> **Note:** You'll still need to update `gatsby-config.js` to add the plugin's functionality to your site. + +This will update the dependencies list of `package.json` and `package-lock.json`. Commit both files to your project's repository. Doing so makes it easy to keep your Gatsby project consistent across team members and computers. When another team member clones your repository, they can use `npm install` to install the dependencies included in `package-lock.json`. + +### Learn more about npm + +- [npm](https://www.npmjs.com/) official website +- [Node.js](https://nodejs.org/en/) official website +- [An introduction to the npm package manager](https://nodejs.dev/an-introduction-to-the-npm-package-manager) from Nodejs.dev +- [Set Up Your Development Environment](/tutorial/part-zero/) from the Gatsby docs diff --git a/docs/docs/mdx/programmatically-creating-pages.md b/docs/docs/mdx/programmatically-creating-pages.md index c84402cf24500..0393573b580e9 100644 --- a/docs/docs/mdx/programmatically-creating-pages.md +++ b/docs/docs/mdx/programmatically-creating-pages.md @@ -274,13 +274,17 @@ component should look like: ```jsx:title=src/components/posts-page-layout.js import React from "react" import { graphql } from "gatsby" +import { MDXProvider } from "@mdx-js/react" import { MDXRenderer } from "gatsby-plugin-mdx" +import { Link } from "gatsby" export default function PageTemplate({ data: { mdx } }) { return (

{mdx.frontmatter.title}

- {mdx.body} + + {mdx.body} +
) } diff --git a/docs/docs/security-in-gatsby.md b/docs/docs/security-in-gatsby.md index 2a82f6c793b45..bcb751a70e50d 100644 --- a/docs/docs/security-in-gatsby.md +++ b/docs/docs/security-in-gatsby.md @@ -10,7 +10,7 @@ Cross-Site Scripting is a type of attack that injects a script or an unexpected JSX elements automatically escape HTML tags by design. See the following example: -```js +```jsx // highlight-next-line const username = `` @@ -23,7 +23,7 @@ On the other hand, fields in your application may need to render inner HTML tags In order to render those HTML tags you need to use an HTML parser (e.g. [html-react-parser](https://github.com/remarkablemark/html-react-parser)) or the `dangerouslySetInnerHTML` prop, like so: -```js +```jsx const CommentRenderer = comment => ( // highlight-next-line

@@ -51,15 +51,20 @@ For example, assume that the comments in your blog are sent in a form similar to A malicious website could inspect your site and copy this snippet to theirs. If the user is logged in, the associated cookies are sent with the form and the server cannot distinguish the origin of it. Even worse, the form could be sent when the page loads with information you don't control: -```js +```html // highlight-next-line - -

- // highlight-next-line - - -
+ +
+ // highlight-next-line + + +
+ ``` ### How can you prevent cross-site request forgery? @@ -76,7 +81,7 @@ Actions that do not simply read data should be handled in a POST request. In the If you want to protect a page your server will provide an encrypted, hard to guess **token**. That token is tied to a user's session and must be included in every POST request. See the following example: -```js +```html
// highlight-next-line @@ -121,18 +126,18 @@ Similar to npm, you can use the `yarn audit` command. It is available starting w Gatsby allows you to [fetch data from various APIs](/docs/content-and-data/) and those APIs often require a key to access them. These keys should be stored in your build environment using [Environment Variables](/docs/environment-variables/). See the following example for fetching data from GitHub with an Authorization Header: ```js - { - resolve: "gatsby-source-graphql", - options: { - typeName: "GitHub", - fieldName: "github", - url: "https://api.github.com/graphql", - headers: { - // highlight-next-line - Authorization: `Bearer ${process.env.GITHUB_TOKEN}`, - }, - } - } +{ + resolve: "gatsby-source-graphql", + options: { + typeName: "GitHub", + fieldName: "github", + url: "https://api.github.com/graphql", + headers: { + // highlight-next-line + Authorization: `Bearer ${process.env.GITHUB_TOKEN}`, + }, + } +} ``` ### Storing keys in client-side @@ -150,11 +155,11 @@ currently there is a [compatibility issue](https://github.com/gatsbyjs/gatsby/is ## Other Resources -- [Security for Modern Web Frameworks](https://www.gatsbyjs.org/blog/2019-04-06-security-for-modern-web-frameworks/) -- [Docs ReactJS: DOM Elements](https://reactjs.org/docs/dom-elements.html#dangerouslysetinnerhtml) +- [Security for Modern Web Frameworks](/blog/2019-04-06-security-for-modern-web-frameworks/) +- [Docs React: DOM Elements](https://reactjs.org/docs/dom-elements.html#dangerouslysetinnerhtml) - [OWASP XSS filter evasion cheatsheet](https://owasp.org/www-community/xss-filter-evasion-cheatsheet) - [OWASP CSRF prevention cheat sheet](https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#samesite-cookie-attribute) -- [Warn for javascript: URLs in DOM sinks #15047](https://github.com/facebook/react/pull/15047) +- [Warn for JavaScript: URLs in DOM sinks #15047](https://github.com/facebook/react/pull/15047) - [How to prevent XSS attacks when using dangerouslySetInnerHTML in React](https://medium.com/@Jam3/how-to-prevent-xss-attacks-when-using-dangerouslysetinnerhtml-in-react-f669f778cebb) - [Exploiting XSS via Markdown](https://medium.com/taptuit/exploiting-xss-via-markdown-72a61e774bf8) - [Auditing package dependencies for security vulnerabilities](https://docs.npmjs.com/auditing-package-dependencies-for-security-vulnerabilities) diff --git a/docs/docs/tailwind-css.md b/docs/docs/tailwind-css.md index e7b9ff5be2dfd..4e8cb0bfc8838 100644 --- a/docs/docs/tailwind-css.md +++ b/docs/docs/tailwind-css.md @@ -66,7 +66,7 @@ To learn more about how to use Tailwind in your CSS, visit the [Tailwind Documen ### Option #2: CSS-in-JS -These steps assume you have a CSS-in-JS library already installed, and the examples are based on Styled Components. +These steps assume you have a CSS-in-JS library already installed, and the examples are based on Emotion. 1. Install Tailwind Babel Macro @@ -108,7 +108,7 @@ npm install tailwind.macro 2. Use the Babel Macro (`tailwind.macro`) in your styled component ```javascript -import styled from "styled-components" +import styled from "@emotion/styled" import tw from "tailwind.macro" // All versions diff --git a/docs/docs/third-party-graphql.md b/docs/docs/third-party-graphql.md index 723e7db95a24d..d9b90ed34ab4f 100644 --- a/docs/docs/third-party-graphql.md +++ b/docs/docs/third-party-graphql.md @@ -100,3 +100,4 @@ exports.createPages = async ({ actions, graphql }) => { - [Example with GraphCMS](https://github.com/freiksenet/gatsby-graphcms) - [Example with Hasura](https://github.com/hasura/graphql-engine/tree/master/community/sample-apps/gatsby-postgres-graphql) - [Example with AWS AppSync](https://github.com/aws-samples/aws-appsync-gatsby-sample) +- [Example with Dgraph](https://github.com/dgraph-io/gatsby-dgraph-graphql) diff --git a/docs/docs/using-cloudinary-image-service.md b/docs/docs/using-cloudinary-image-service.md new file mode 100644 index 0000000000000..fab73d0bd0d6c --- /dev/null +++ b/docs/docs/using-cloudinary-image-service.md @@ -0,0 +1,156 @@ +--- +title: Using Cloudinary image service for media optimization +--- + +Cloudinary is a cloud-based end-to-end media management platform that provides solutions to help site creators serve optimized media files (images and videos) to their audiences. It also provides a lot of optional transformations that can be carried out on these media assets. + +In this guide you will take a look at the [gatsby-source-cloudinary](/packages/gatsby-source-cloudinary/) and [gatsby-transformer-cloudinary](/packages/gatsby-transformer-cloudinary/) plugins which you can use to improve the experience of handling images on Gatsby sites. + +Plugins are generally used to abstract functionality in Gatsby. In this case, the `gatsby-source-cloudinary` plugin is a [source plugin](/docs/creating-a-source-plugin/) which helps to connect Cloudinary media storage capabilities to your site. + +> Here's a [demo site that uses the gatsby-source-cloudinary](https://gsc-sample.netlify.com) showcasing optimized images in a masonry grid, served from Cloudinary. + +## The problem with handling images on the web + +Dealing with images on the web has always been a problem as unoptimized images can slow down your site. The processes put in place to create the best media experience can take a lot of time to implement. + +## Solutions Cloudinary provides + +Cloudinary provides a couple of amazing solutions to this problem, namely: + +- Remote storage and delivery of images via CDN +- Offers a wider range of transformations than [gatsby-image](/docs/using-gatsby-image/). +- [Digital Asset Management](https://cloudinary.com/documentation/digital_asset_management_overview) for enterprise assets + +## Gatsby-source-cloudinary + +This plugin fetches media assets from Cloudinary that are specified in a folder. It then transforms these images into Cloudinary file nodes, which can be queried with GraphQL in a Gatsby project. +`gatsby-source-cloudinary` applies [f_auto and q_auto](https://cloudinary.com/documentation/image_transformations) transformation parameters which aid in automatic optimisation of format and quality for media assets by over 70 percent. + +### Prerequisites + +Before using the `gatsby-source-cloudinary` plugin you should do the following: + +- Upload your images to a folder on Cloudinary. This folder can have any name of your choosing. +- Obtain your API key and API secret from your Cloudinary dashboard. +- Have the [dotenv](https://www.npmjs.com/package/dotenv) module installed for loading environment variables from a `.env` file. + +### Using gatsby-source-cloudinary + +Add the plugin to your project. + +1. Install `gatsby-source-cloudinary` + +```shell + npm install gatsby-source-cloudinary +``` + +2. In the root of your project, create an environment file called `.env` and add your Cloudinary credentials and their values + +``` +CLOUDINARY_API_KEY=xxxxxxxxxxxxxx +CLOUDINARY_API_SECRET=xxxxxxxxxxxxxxxxxxxx +CLOUDINARY_CLOUD_NAME=xxxxx +``` + +3. Configure `gatsby-config.js` + +```js:title=gatsby-config.js +require('dotenv').config(); // highlight-line +module.exports = { + ... + plugins:[ + ... + { + resolve: `gatsby-source-cloudinary`, + options: { + cloudName: process.env.CLOUDINARY_CLOUD_NAME, + apiKey: process.env.CLOUDINARY_API_KEY, + apiSecret: process.env.CLOUDINARY_API_SECRET, + resourceType: `image`, + prefix: `gatsby-source-cloudinary/` + } + } + ] +} +``` + +Note that `gatsby-source-cloudinary` takes the following options: + +- **`cloudName`** , **`apiKey`** , and **`apiSecret`** **:** These are credentials from your Cloudinary console, stored as three separate environment variables for security. +- **`resourceType`** **:** This is the resource type of the media assets - either an image or a video. +- **`prefix`** **:** This is the folder (in your Cloudinary account) in which the files reside. In the example above, the folder is called `gatsby-source-cloudinary`. Assign a name of your choice. + Other optional options are `type`, `tags`, and `maxResult`. + +Here's a [link to the README](https://github.com/Chuloo/gatsby-source-cloudinary#query-parameters) for more information. + +## Gatsby-transformer-cloudinary + +After sourcing media files from Cloudinary, you will be able to leverage Cloudinary’s media transformation capabilities. To do so, use `gatsby-transformer-cloudinary` which is a type of [transformer plugin](/docs/creating-a-transformer-plugin/) that is used to change image formats, styles and dimensions. It also optimizes images for minimal file size alongside high visual quality for an improved user experience and minimal bandwidth. + +Here's a [demo site that uses the gatsby-transformer-plugin](https://gatsby-transformer-cloudinary.netlify.com/fluid/) + +### Prerequisites + +Before using the `gatsby-transformer-cloudinary` plugin you should do the following: + +- Upload your images to a folder on Cloudinary. This folder can have any name of your choosing. +- Have the `gatsby-source-cloudinary` plugin installed and configured. +- Obtain your API key and API secret from your Cloudinary dashboard. +- Have the [dotenv](https://www.npmjs.com/package/dotenv) module installed for loading environment variables from a `.env` file. + +### Using gatsby-transformer-cloudinary + +1. Install `gatsby-transformer-cloudinary` and `gatsby-source-filesystem` which creates the File nodes that the Cloudinary transformer plugin works on. + +```shell + npm install gatsby-transformer-cloudinary gatsby-source-filesystem +``` + +2. In the root of your project, create an environment file called `.env` to which to add your Cloudinary credentials and their values. + +``` +CLOUDINARY_API_KEY=xxxxxxxxxxxxxx +CLOUDINARY_API_SECRET=xxxxxxxxxxxxxxxxxxxx +CLOUDINARY_CLOUD_NAME=xxxxx +``` + +3. Configure `gatsby-config.js` + +```js:title=gatsby-config.js +require('dotenv').config({ + path: `.env.${process.env.NODE_ENV}`, +}); + +module.exports = { + plugins: [ + { + resolve: `gatsby-source-filesystem`, + options: { + name: `images`, + path: `${__dirname}/src/images`, + }, + }, + { + resolve: 'gatsby-transformer-cloudinary', + options: { + cloudName: process.env.CLOUDINARY_CLOUD_NAME, + apiKey: process.env.CLOUDINARY_API_KEY, + apiSecret: process.env.CLOUDINARY_API_SECRET, + + // This folder will be created if it doesn’t exist. + uploadFolder: 'gatsby-cloudinary', + + }, + ], +}; +``` + +> In `gatsby-config.js`, responsive breakpoints can be created for each image, use the `fluidMaxWidth` and `fluidMinWidth` options to set them. Take a look at the [plugin documentation](https://www.npmjs.com/package/gatsby-transformer-cloudinary#api) for more information on how these parameters can be set. + +## Additional resources + +- [Faster Sites with Optimized Media Assets by William Imoh](/blog/2020-01-12-faster-sites-with-optimized-media-assets/) +- [Gatsby Transformer Cloudinary](https://www.npmjs.com/package/gatsby-transformer-cloudinary) +- [Gatsby Source Cloudinary](/packages/gatsby-source-cloudinary/) +- [Aspect ratio parameter](https://cloudinary.com/documentation/image_transformation_reference#aspect_ratio_parameter) diff --git a/docs/docs/using-gatsby-image.md b/docs/docs/using-gatsby-image.md index 4e6d7b4a1fb0e..17b18536a6e10 100644 --- a/docs/docs/using-gatsby-image.md +++ b/docs/docs/using-gatsby-image.md @@ -29,7 +29,7 @@ But creating optimized images for websites has long been a thorny problem. Ideal - Use the “blur-up” technique or a “traced placeholder” SVG to show a preview of the image while it loads - Hold the image position so your page doesn’t jump while the images load -Doing this consistently across a site feels like Sisyphean labor. You manually optimize your images and then… several images are swapped in at the last minute or a design-tweak shaves 100px of width off your images. +Doing this consistently across a site feels like a task that can never be completed. You manually optimize your images and then… several images are swapped in at the last minute or a design-tweak shaves 100px of width off your images. Most solutions involve a lot of manual labor and bookkeeping to ensure every image is optimized. diff --git a/docs/sites.yml b/docs/sites.yml index cf3b4e573a35c..178631243b5de 100644 --- a/docs/sites.yml +++ b/docs/sites.yml @@ -10354,3 +10354,46 @@ built_by: Masatoshi Nishiguchi built_by_url: https://mnishiguchi.com featured: false +- title: WhileNext + url: https://whilenext.com + main_url: https://whilenext.com + description: > + A Blog on Software Development + categories: + - Blog + - Learning + - Programming + - Web Development + built_by: Masoud Karimi + built_by_url: https://github.com/masoudkarimif +- title: Jamify.me + description: > + We build websites & PWAs with JAMstack. Delivering faster, more secure web. + main_url: https://jamify.me + url: https://jamify.me + categories: + - Agency + - Web Development + featured: false +- title: The Cares Family + main_url: https://thecaresfamily.org.uk/home + url: https://thecaresfamily.org.uk/home + description: > + The Cares Family helps people find connection and community in a disconnected age. They relaunched their website in Gatsby during the COVID-19 outbreak of 2020 to help connect neighbours. + categories: + - Nonprofit + - Blog + - Community + built_by: Mutual + built_by_url: https://www.madebymutual.com +- title: "Due to COVID-19: Documenting the Signs of the Pandemic" + url: https://duetocovid19.com + main_url: https://duetocovid19.com + description: > + A project to document all the signs that have gone up on the storefronts of our cities in response to the coronavirus pandemic. + categories: + - Photography + - Community + built_by: Andrew Louis + built_by_url: https://hyfen.net + featured: false diff --git a/docs/starters.yml b/docs/starters.yml index 729783be16311..613eeac9a6a62 100644 --- a/docs/starters.yml +++ b/docs/starters.yml @@ -5300,12 +5300,13 @@ - Language:TypeScript - Linting - Netlify + - Testing features: - - ✔️ Gatsby - - ✔️ TypeScript - - ✔️ Prettier - - ✔️ ESLint - - ✔️ Deploy to Netlify through GitHub Actions + - TypeScript + - ESLint for JS linting + - Prettier code formatting + - Jest for testing + - Deploy to Netlify through GitHub Actions - url: https://answer.netlify.com/ repo: https://github.com/passwd10/gatsby-starter-answer description: A simple Gatsby blog to show your Future Action on top of the page @@ -5852,6 +5853,41 @@ - CSS-in-Reason support - StaticQuery GraphQL support in ReasonML - Similar to gatsby-starter-blog + +- url: https://gct.mozart409.space/ + repo: https://github.com/Mozart409/gatsby-custom-tailwind + description: A minimal tailwind css starter, with custom fonts, purgecss, automatic linting when committing to master, awesome lighthouse audit, custom zeit/serve server for production build, visible to all in your network, so you can test it with your phone. + tags: + - Linting + - PWA + - SEO + - Styling:Tailwind + - Styling:PostCSS + features: + - Minimal Tailwind Starter + - Custom Fonts predefined + - Automatic Linting on Commit using husky and pretty-quick + - Custom server to test Production Builds on your local network via zeit/serve + - Extensive Readme in the repo +- url: https://gatsby-redux-toolkit-typescript.netlify.com/ + repo: https://github.com/saimirkapaj/gatsby-redux-toolkit-typescript-starter + description: Gatsby Starter using Redux-Toolkit, Typescript, Styled Components and Tailwind CSS. + tags: + - Redux + - Language:TypeScript + - Styling:Tailwind + features: + - Redux-Toolkit + - Typescript + - Styled Components + - Tailwind CSS + - Removes unused CSS with Purgecss + - Font Awesome Icons + - Responsive Design + - Change between light and dark themes + - SEO + - React Helmet + - Offline Support - url: https://gatsby-ts-tw-styled-eslint.netlify.com repo: https://github.com/Miloshinjo/gatsby-ts-tw-styled-eslint-starter description: Gatsby starter with Typescript, TailwindCSS, @emotion/styled and eslint. @@ -5912,3 +5948,30 @@ - Stylesheet built using Sass and BEM-Style naming. - Syntax highlighting in code blocks. - Google Analytics support. +- url: https://gatsby-starter-ts.now.sh/ + repo: https://github.com/jpedroschmitz/gatsby-starter-ts + description: A TypeScript starter for Gatsby. No plugins and styling. Exactly the necessary to start! + tags: + - Language:TypeScript + - Styling:None + - Linting + features: + - TypeScript + - ESLint and Prettier + - Husky and lint-staged + - Commitizen and Commitlint + - TypeScript absolute paths +- url: https://rolwinreevan.com + repo: https://github.com/rolwin100/rolwinreevan_gatsby_blog + description: This starter consists of ant design system you can use it for your personal blog. I have give a lot of time in developing this starter because I found that there were not much starters with a very good design. Please give a star to this project if you have like it to encourage me 😄. Thank you. + tags: + - Blog + - Portfolio + - Markdown + - SEO + - PWA + features: + - Blog designed using Markdown. + - Beautifully designed landing page. + - First project in the starters list to use ant design. + - Supports SSR and is also a PWA. diff --git a/docs/tutorial/building-a-theme.md b/docs/tutorial/building-a-theme.md index b12ec21b77f20..b5727aa11314f 100644 --- a/docs/tutorial/building-a-theme.md +++ b/docs/tutorial/building-a-theme.md @@ -1047,6 +1047,8 @@ exports.createPages = async ({ actions, graphql, reporter }, options) => { } ``` +Note that the example above sets default values for `options`. This behavior was also included in the prior `gatsby-config.js` example. You only need to set default values once, but both mechanisms for doing so are valid. + > 💡 Up till now, you've mostly worked in the `gatsby-theme-events` space. Because you've converted the theme to use a function export, you can no longer run the theme on its own. The function export in `gatsby-config.js` is only supported for themes. From now on you'll be running `site` -- the Gatsby site consuming `gatsby-theme-events`, instead. Gatsby sites still require the object export in `gatsby-config.js`. Test out this new options-setting by making some adjustments to `site`. diff --git a/docs/tutorial/e-commerce-with-datocms-and-snipcart/index.md b/docs/tutorial/e-commerce-with-datocms-and-snipcart/index.md index 6700c1358dda5..04df1906b6389 100644 --- a/docs/tutorial/e-commerce-with-datocms-and-snipcart/index.md +++ b/docs/tutorial/e-commerce-with-datocms-and-snipcart/index.md @@ -25,11 +25,11 @@ You can sign up for the following accounts now or as you need to use each of the - [Snipcart](https://snipcart.com/): add a shopping cart to your site - [Netlify](https://www.netlify.com/): host your site and register a domain -To edit code locally (affecting files stored on your computer), you'll need the following software. If you don't already know what these are or want additional background information, check out [Step 0 of the Gatsby tutorial](https://www.gatsbyjs.org/tutorial/part-zero/). It includes detailed instructions on how to set up a local development environment. +To edit code locally (affecting files stored on your computer), you'll need the following software. If you don't already know what these are or want additional background information, check out [Step 0 of the Gatsby tutorial](/tutorial/part-zero/). It includes detailed instructions on how to set up a local development environment. -- [node](https://nodejs.org): run JavaScript on your computer +- [Node.js](https://nodejs.org): run JavaScript on your computer - [Git](https://git-scm.com/downloads): track changes to your code -- [Gatsby command line interface (CLI)](https://www.gatsbyjs.org/tutorial/part-zero/#using-the-gatsby-cli): run Gatsby commands on your computer +- [Gatsby command line interface (CLI)](/tutorial/part-zero/#using-the-gatsby-cli): run Gatsby commands on your computer ## Provisioning Your Site on Gatsby Cloud @@ -53,7 +53,7 @@ Authorize Gatsby to connect with your DatoCMS account and hit the "Start my site > This automatic provisioning of your website is one of Gatsby Cloud's strengths. Integrations with content management systems like this make it possible for you to spin up and begin publishing content to new sites without ever touching any code. -![celebratory screen titled successfully set up Gatsby provisioned Snipcart!](/images/successfully-set-up.png) +![Celebratory screen titled successfully set up Gatsby provisioned Snipcart!](/images/successfully-set-up.png) ## Exploring the DatoCMS Project @@ -62,7 +62,7 @@ follow the link in Gatsby Cloud to go straight to your project in DatoCMS. Click on "Enter project" and select "Products" from the secondary navigation menu. You'll see a number of products already defined for you. You can delete or edit these as you'd like. -![edit product screen featuring a single sock product called street faces](/images/edit-product.png) +![Edit product screen featuring a single sock product called street faces](/images/edit-product.png) ### Editing models in DatoCMS @@ -70,7 +70,7 @@ Clicking on "Settings" in the primary navigation menu reveals the "Models" menu Maybe you also want each product to have a description or a release date. You can edit this model to be whatever you want it to be by adding and customizing fields. DatoCMS provides a wide variety of options that you can tweak to fit your needs. -![choose a field type menu featuring options for text, modular content, media, data and timeline, number, boolean, location, color, SEO, links, and JSON](/images/choose-field-type.png) +![Choose a field type menu featuring options for text, modular content, media, data and timeline, number, boolean, location, color, SEO, links, and JSON](/images/choose-field-type.png) If you already have items you know you want to sell, replace the sample products with a few of your own to get a feel for the interface. If not, you can continue on without making any changes in the CMS for now. @@ -84,7 +84,7 @@ Even if you've never used Git/GitHub, there's no need to fear. You can edit indi You can make small changes to the code from your browser by editing files directly in GitHub. Say, for example, that you always want to display prices in some other currency besides euros. To switch the symbols, head to your repository's page on GitHub and navigate to `src/pages/index.js`. -![github menu shows the current branch and file location as well as the first few lines of code in this file. Also includes view options raw, blame, and history](/images/github-menu.png) +![GitHub menu shows the current branch and file location as well as the first few lines of code in this file. Also includes view options raw, blame, and history](/images/github-menu.png) Select "Edit this file" (the pencil icon) from the menu. Where `product.price` is being displayed, you'll need to update the currency symbol. @@ -94,7 +94,7 @@ Select "Edit this file" (the pencil icon) from the menu. Where `product.price` i Once you've made your change, you'll need to commit that change. A commit is like a snapshot in time or a save point in a video game. Add a message that concisely describes the changes you've made and then commit those changes. -![commit changes menu with message 'Euro to dollar'. Email address is blurred and there's a big, green 'Commit changes' button](/images/commit-change.png) +![Commit changes menu with message 'Euro to dollar'. Email address is blurred and there's a big, green 'Commit changes' button](/images/commit-change.png) You've made a change to your master branch, which means Gatsby will build a new version of your site! @@ -104,7 +104,7 @@ The rest of this process is described in the context of using a text editor to m You can edit your site's code locally by following these steps: -- Clone the project by Selecting the "Clone or download" button on your repository's main page, which will display a link for you to copy. +- Clone the project by selecting the "Clone or download" button on your repository's main page, which will display a link for you to copy. - Copy the project to your computer and install its dependencies, other JavaScript projects that your site relies on to work. @@ -120,7 +120,7 @@ You'll need to make one change before you try to run the site. The version that In `gatsby-config.js`, you'll find an array of plugins already installed on your site, including `gatsby-source-datocms`. That section references a variable `process.env.DATO_API_TOKEN`. `process.env` refers to the _environment_ in which you're running your code and `DATO_API_TOKEN` is the name of the particular variable in question. -```jsx:title=gatsby-config.js +```js:title=gatsby-config.js { resolve: `gatsby-source-datocms`, options: { apiToken: process.env.DATO_API_TOKEN }, @@ -131,13 +131,13 @@ In order to set this variable's value, create a new file in your project's root Add your DatoCMS API Token environment variable to this file. You can find your API token in Gatsby under Site Settings > Environment variables. Opening the "Edit" menu will allow you to copy the full token. Set that as the value in `.env.development`. -``` +```text DATO_API_TOKEN=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ``` -Now you should be able to run `gatsby develop` to run your site. If you're using Visual Studio Code, you can open a terminal within the text editor (control + `) and keep everything in one window. Navigate to [localhost:8000](http://localhost:8000/) in your browser to see the example site. It should include whatever products you created in DatoCMS. +Now you should be able to run `gatsby develop` to run your site. If you're using Visual Studio Code, you can open a terminal within the text editor (control + \`) and keep everything in one window. Navigate to `http://locahost:8000/` in your browser to see the example site. It should include whatever products you created in DatoCMS. -![sample shop has a bright blue, pink, and green gradient as a background. Stacking rings, rhodochrosite ring, and statement earrings have been added as products](/images/sample-with-products.png) +![Sample shop has a bright blue, pink, and green gradient as a background. Stacking rings, rhodochrosite ring, and statement earrings have been added as products](/images/sample-with-products.png) ## Modifying styles @@ -145,7 +145,7 @@ Chances are you'll want to apply your own branding to this site, so try making s If you're not sure where to look for the code you'll need to change, search for the term "background" and see what comes up! In this case, you'll find one entry in the results that _also_ says something about a gradient... -![searching the word background in VS Code reveals 7 instances in 5 files, including one in reset.scss](/images/background.png) +![Searching the word background in VS Code reveals 7 instances in 5 files, including one in reset.scss](/images/background.png) Remove the following CSS from `src/style/reset.scss`. The result should be a plain white background for your store. diff --git a/examples/creating-source-plugins/README.md b/examples/creating-source-plugins/README.md new file mode 100644 index 0000000000000..fb1face66c326 --- /dev/null +++ b/examples/creating-source-plugins/README.md @@ -0,0 +1,83 @@ +# Creating First Class Gatsby Source Plugins + +Create Gatsby plugins that leverage Gatsby's most impactful native features like remote image optimization, caching, customized GraphQL schemas and node relationships, and more. + +This monorepo serves as an example of a site using a first class source plugin to pull in data from a Node.js API. It is meant to show the 3 pieces that work together when building a source plugin: the API, the site, and the source plugin. + +## Setup + +This monorepo uses yarn workspaces to manage the 3 indivdual projects: + +- api: a Node.js API with in-memory data, and a Post and Author type, as well as support for subscriptions when Posts are mutated +- example-site: a barebones Gatsby site that implements the source plugin +- source-plugin: a plugin that uses several Gatsby APIs to source data from the API, create responsive/optimized images from remote locations, and link the nodes in the example site + +To install dependencies for all projects run the install command in the root of the yarn workspace (which requires yarn to be installed): + +``` +yarn install +``` + +_Note: if you aren't using yarn, you can navigate into each of the 3 folders and run `npm install` instead_ + +Then you can run the api or example projects in separate terminal windows with the commands below. + +For the API which runs at `localhost:4000`, use this command: + +``` +yarn workspace api start +``` + +And to run the example site with `gatsby develop` at `localhost:8000`, use this command: + +``` +yarn workspace example-site develop +``` + +Running the example site also runs the plugin because it is included in the site's config. You'll see output in the console for different functionality and then can open up the browser to `localhost:8000` to see the site. + +## Developing and Experimenting + +You can open up `localhost:4000` with the API running, which will load a GraphQL Playground, which is a GraphQL IDE (like GraphiQL, that Gatsby runs at `localhost:8000/___graphql`) for running queries and mutations on the data from the API. + +You can test a query like this to see data returned: + +```graphql +query { + posts { + id + slug + } +} +``` + +This query will return the IDs for all posts in the API. You can copy one of these IDs and provide it as an argument to a mutation to update information about that post. + +You can run 3 different mutations from the GraphQL Playground (at `localhost:4000`): `createPost`, `updatePost`, and `deletePost`. These methods would mimic CRUD operations happening on the API of the data source like a headless CMS. An example `updatePost` mutation is outlined below. + +When you run a mutation on a post, a subscription event is published, which lets the plugin know it should respond and update nodes. The following mutation can be copied into the left side of the GraphQL playground so long as you replace "post-id" with a value returned for an ID from a query (like the one above). + +```graphql +mutation { + updatePost(id: "post-id", description: "Some data!") { + id + slug + description + } +} +``` + +The website's homepage will update with any changes while the source plugin is subscribed to changes, which is when the `preview: true` is provided in the example site's `gatsby-config`. + +You can also optionally listen for subscription events with this query in the playground which will display data when a mutation is run: + +```graphql +subscription { + posts { + id + description + } +} +``` + +A similar subscription is registered when the plugin is run, so you can also see subscription events logged when the plugin is running. diff --git a/examples/creating-source-plugins/api/README.md b/examples/creating-source-plugins/api/README.md new file mode 100644 index 0000000000000..c9fa5261a5770 --- /dev/null +++ b/examples/creating-source-plugins/api/README.md @@ -0,0 +1,3 @@ +# Example API + +A small GraphQL server with in-memory data, powered by [graphql-yoga](https://github.com/graphcool/graphql-yoga) 🧘. See the root of the monorepo for details about running this API alongisde the `example-site` and `source-plugin`. diff --git a/examples/creating-source-plugins/api/package.json b/examples/creating-source-plugins/api/package.json new file mode 100644 index 0000000000000..48e9f8626d1e9 --- /dev/null +++ b/examples/creating-source-plugins/api/package.json @@ -0,0 +1,43 @@ +{ + "name": "api", + "description": "A simple GraphQL server example with in-memory data", + "version": "1.0.0", + "license": "MIT", + "homepage": "https://general-repair.glitch.me", + "author": { + "name": "Risan Bagja Pradana", + "email": "risanbagja@gmail.com", + "url": "https://risan.io" + }, + "main": "src/index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/risan/simple-graphql-server-example.git" + }, + "bugs": { + "url": "https://github.com/risan/simple-graphql-server-example/issues" + }, + "keywords": [ + "graphql", + "graphql-server", + "graphql-yoga" + ], + "scripts": { + "start": "node src/index.js", + "lint": "eslint *.js src", + "lint-fix": "eslint *.js src --fix" + }, + "dependencies": { + "dotenv": "^5.0.1", + "graphql-yoga": "^1.8.2", + "uniqid": "^4.1.1" + }, + "devDependencies": { + "eslint": "^4.19.1", + "eslint-config-airbnb-base": "^12.1.0", + "eslint-config-prettier": "^2.9.0", + "eslint-plugin-import": "^2.10.0", + "eslint-plugin-prettier": "^2.6.0", + "prettier": "^1.11.1" + } +} diff --git a/examples/creating-source-plugins/api/src/index.js b/examples/creating-source-plugins/api/src/index.js new file mode 100644 index 0000000000000..70b3e159cfa73 --- /dev/null +++ b/examples/creating-source-plugins/api/src/index.js @@ -0,0 +1,138 @@ +require("dotenv").config() +const { GraphQLServer, PubSub } = require("graphql-yoga") +const uniqid = require("uniqid") + +const CREATED = "created" +const UPDATED = "updated" +const DELETED = "deleted" + +const authors = [ + { + id: 1, + name: "Jay Gatsby", + }, + { + id: 2, + name: "Daisy Buchanan", + }, +] + +const posts = [ + { + id: uniqid(), + slug: "hello-world", + description: "Our first post on our site.", + imgUrl: "https://images.unsplash.com/photo-1534432586043-ead5b99229fb", + imgAlt: "Pug in a sweater", + authorId: 1, + }, + { + id: uniqid(), + slug: "company-vision", + description: "Our vision for a welcoming company.", + imgUrl: "https://images.unsplash.com/photo-1530041539828-114de669390e", + imgAlt: "Pug in a rainjacket", + authorId: 1, + }, + { + id: uniqid(), + slug: "redesigning-our-logo", + description: "What went into the new logo.", + imgUrl: "https://images.unsplash.com/photo-1541364983171-a8ba01e95cfc", + imgAlt: "Pug in glasses", + authorId: 2, + }, +] + +const resolvers = { + Query: { + info: () => "A simple GraphQL server example with in-memory data.", + posts: () => posts, + authors: () => authors, + }, + + Mutation: { + createPost: (root, { slug, description }) => { + const post = { + id: uniqid(), + slug, + description, + imgUrl: "https://images.unsplash.com/photo-1534432586043-ead5b99229fb", + imgAlt: "pug in a sweater", + authorId: 1, + } + + posts.push(post) + pubsub.publish(CREATED, { posts: [{ status: CREATED, ...post }] }) + + return post + }, + + updatePost: (root, { id, description }) => { + const postIdx = posts.findIndex(p => id === p.id) + + if (postIdx === null) { + return null + } + + posts[postIdx] = { ...posts[postIdx], description } + pubsub.publish(UPDATED, { + posts: [{ status: UPDATED, ...posts[postIdx] }], + }) + + return posts[postIdx] + }, + + deletePost: (root, { id }) => { + const postIdx = posts.findIndex(p => id === p.id) + + if (postIdx === null) { + return null + } + + const post = posts[postIdx] + pubsub.publish(DELETED, { + posts: [{ status: DELETED, ...posts[postIdx] }], + }) + + posts.splice(postIdx, 1) + + return post + }, + }, + + Post: { + id: root => root.id, + slug: root => root.slug, + description: root => root.description, + author: root => authors.find(author => author.id === root.authorId), + }, + + Author: { + id: root => root.id, + name: root => root.name, + }, + + Subscription: { + posts: { + subscribe: (parent, args, { pubsub }) => { + return pubsub.asyncIterator([CREATED, UPDATED, DELETED]) + }, + }, + }, +} + +const pubsub = new PubSub() +const server = new GraphQLServer({ + typeDefs: "./src/schema.graphql", + resolvers, + context: { pubsub }, +}) + +server.start( + { + port: + (process.env.PORT ? parseInt(process.env.PORT, 10) : undefined) || 4000, + }, + ({ port }) => console.log(`🏃🏻‍ Server is running on port ${port}.`) +) diff --git a/examples/creating-source-plugins/api/src/schema.graphql b/examples/creating-source-plugins/api/src/schema.graphql new file mode 100644 index 0000000000000..2dd1f9c5a67c6 --- /dev/null +++ b/examples/creating-source-plugins/api/src/schema.graphql @@ -0,0 +1,30 @@ +type Query { + info: String! + posts: [Post!]! + authors: [Author!]! +} + +type Mutation { + createPost(slug: String!, description: String!): Post! + updatePost(id: ID!, description: String!): Post + deletePost(id: ID!): Post +} + +type Post { + id: ID! + slug: String! + description: String! + imgUrl: String! + imgAlt: String! + author: Author! + status: String +} + +type Author { + id: ID! + name: String! +} + +type Subscription { + posts: [Post!]! +} diff --git a/examples/creating-source-plugins/example-site/README.md b/examples/creating-source-plugins/example-site/README.md new file mode 100644 index 0000000000000..5c03365ce2895 --- /dev/null +++ b/examples/creating-source-plugins/example-site/README.md @@ -0,0 +1,3 @@ +# Example Site + +See the root of the monorepo for details about running this site with the example `source-plugin` installed inside it. diff --git a/examples/creating-source-plugins/example-site/gatsby-config.js b/examples/creating-source-plugins/example-site/gatsby-config.js new file mode 100644 index 0000000000000..1d16c1582500d --- /dev/null +++ b/examples/creating-source-plugins/example-site/gatsby-config.js @@ -0,0 +1,22 @@ +/** + * Configure your Gatsby site with this file. + * + * See: https://www.gatsbyjs.org/docs/gatsby-config/ + */ + +module.exports = { + plugins: [ + // loads the source-plugin + { + resolve: `source-plugin`, + options: { + spaceId: "123", + preview: true, + cacheResponse: false, + }, + }, + // required to generate optimized images + `gatsby-plugin-sharp`, + `gatsby-transformer-sharp`, + ], +} diff --git a/examples/creating-source-plugins/example-site/package.json b/examples/creating-source-plugins/example-site/package.json new file mode 100644 index 0000000000000..51dc41c4900d2 --- /dev/null +++ b/examples/creating-source-plugins/example-site/package.json @@ -0,0 +1,34 @@ +{ + "name": "example-site", + "private": true, + "description": "A simplified bare-bones starter for Gatsby", + "version": "0.1.0", + "license": "MIT", + "scripts": { + "build": "gatsby build", + "develop": "gatsby develop", + "format": "prettier --write \"**/*.{js,jsx,json,md}\"", + "start": "npm run develop", + "serve": "gatsby serve", + "clean": "gatsby clean", + "test": "echo \"Write tests! -> https://gatsby.dev/unit-testing\" && exit 1" + }, + "dependencies": { + "gatsby": "^2.19.45", + "gatsby-image": "^2.3.1", + "gatsby-plugin-sharp": "^2.5.3", + "gatsby-transformer-sharp": "^2.4.3", + "react": "^16.12.0", + "react-dom": "^16.12.0" + }, + "devDependencies": { + "prettier": "^1.19.1" + }, + "repository": { + "type": "git", + "url": "https://github.com/gatsbyjs/gatsby-starter-hello-world" + }, + "bugs": { + "url": "https://github.com/gatsbyjs/gatsby/issues" + } +} diff --git a/examples/creating-source-plugins/example-site/src/pages/index.js b/examples/creating-source-plugins/example-site/src/pages/index.js new file mode 100644 index 0000000000000..e8491f64ec941 --- /dev/null +++ b/examples/creating-source-plugins/example-site/src/pages/index.js @@ -0,0 +1,72 @@ +import React from "react" +import { graphql } from "gatsby" +import Img from "gatsby-image" + +export default ({ data }) => ( +
+

Posts

+
+ {data.allPost.nodes.map(post => ( +
+

{post.slug}

+ By: {post.author.name} +

{post.description}

+ {post.imgAlt} +
+ ))} +
+
+) + +export const query = graphql` + { + allPost { + nodes { + id + slug + description + imgAlt + author { + id + name + } + slug + remoteImage { + id + childImageSharp { + id + fluid { + ...GatsbyImageSharpFluid + } + } + } + } + } + } +` diff --git a/examples/creating-source-plugins/example-site/static/favicon.ico b/examples/creating-source-plugins/example-site/static/favicon.ico new file mode 100644 index 0000000000000..1a466ba8852cf Binary files /dev/null and b/examples/creating-source-plugins/example-site/static/favicon.ico differ diff --git a/examples/creating-source-plugins/package.json b/examples/creating-source-plugins/package.json new file mode 100644 index 0000000000000..34071a17c59ee --- /dev/null +++ b/examples/creating-source-plugins/package.json @@ -0,0 +1,14 @@ +{ + "name": "creating-source-plugins", + "version": "1.0.0", + "description": "Monorepo for examples, api, and plugins for creating first class source plugins", + "main": "index.js", + "author": "@gillkyle", + "license": "MIT", + "workspaces": [ + "api", + "example-site", + "source-plugin" + ], + "private": true +} diff --git a/examples/creating-source-plugins/source-plugin/README.md b/examples/creating-source-plugins/source-plugin/README.md new file mode 100644 index 0000000000000..1d88f045e1984 --- /dev/null +++ b/examples/creating-source-plugins/source-plugin/README.md @@ -0,0 +1,3 @@ +# Example Source Plugin + +See the root of the monorepo for details about running this plugin inside of the `example-site` folder. It is installed in the example site and can be debugged and developed while running there. diff --git a/examples/creating-source-plugins/source-plugin/gatsby-node.js b/examples/creating-source-plugins/source-plugin/gatsby-node.js new file mode 100644 index 0000000000000..935297827e34f --- /dev/null +++ b/examples/creating-source-plugins/source-plugin/gatsby-node.js @@ -0,0 +1,274 @@ +const { createRemoteFileNode } = require(`gatsby-source-filesystem`) +const WebSocket = require("ws") +const { ApolloClient } = require("apollo-client") +const { InMemoryCache } = require("apollo-cache-inmemory") +const { split } = require("apollo-link") +const { HttpLink } = require("apollo-link-http") +const { WebSocketLink } = require("apollo-link-ws") +const { getMainDefinition } = require("apollo-utilities") +const fetch = require("node-fetch") +const gql = require("graphql-tag") + +/** + * ============================================================================ + * Create a GraphQL client to subscribe to live data changes + * ============================================================================ + */ + +// Create an http link: +const httpLink = new HttpLink({ + uri: "http://localhost:4000", + fetch, +}) + +// Create a WebSocket link: +const wsLink = new WebSocketLink({ + uri: `ws://localhost:4000`, + options: { + reconnect: true, + }, + webSocketImpl: WebSocket, +}) + +// using the ability to split links, you can send data to each link/url +// depending on what kind of operation is being sent +const link = split( + // split based on operation type + ({ query }) => { + const definition = getMainDefinition(query) + return ( + definition.kind === "OperationDefinition" && + definition.operation === "subscription" + ) + }, + wsLink, + httpLink +) + +const client = new ApolloClient({ + link, + cache: new InMemoryCache(), +}) + +/** + * ============================================================================ + * Helper functions and constants + * ============================================================================ + */ + +const POST_NODE_TYPE = `Post` +const AUTHOR_NODE_TYPE = `Author` + +// helper function for creating nodes +const createNodeFromData = (item, nodeType, helpers) => { + const nodeMetadata = { + id: helpers.createNodeId(`${nodeType}-${item.id}`), + parent: null, // this is used if nodes are derived from other nodes, a little different than a foreign key relationship, more fitting for a transformer plugin that is changing the node + children: [], + internal: { + type: nodeType, + content: JSON.stringify(item), + contentDigest: helpers.createContentDigest(item), + }, + } + + const node = Object.assign({}, item, nodeMetadata) + helpers.createNode(node) + return node +} + +/** + * ============================================================================ + * Verify plugin loads + * ============================================================================ + */ + +// should see message in console when running `gatsby develop` in example-site +exports.onPreInit = () => console.log("Loaded source-plugin") + +/** + * ============================================================================ + * Link nodes together with a customized GraphQL Schema + * ============================================================================ + */ + +exports.createSchemaCustomization = ({ actions }) => { + const { createTypes } = actions + createTypes(` + type Post implements Node { + id: ID! + slug: String! + description: String! + imgUrl: String! + imgAlt: String! + # create relationships between Post and File nodes for optimized images + remoteImage: File @link + # create relationships between Post and Author nodes + author: Author @link(from: "author.name" by: "name") + } + + type Author implements Node { + id: ID! + name: String! + }`) +} + +/** + * ============================================================================ + * Source and cache nodes from the API + * ============================================================================ + */ + +exports.sourceNodes = async function sourceNodes( + { + actions, + cache, + createContentDigest, + createNodeId, + getNodesByType, + getNode, + }, + pluginOptions +) { + const { createNode, touchNode, deleteNode } = actions + const helpers = Object.assign({}, actions, { + createContentDigest, + createNodeId, + }) + + // you can access plugin options here if need be + console.log(`Space ID: ${pluginOptions.spaceId}`) + + // simple caching example, you can find in .cache/caches/source-plugin/some-diskstore + await cache.set(`hello`, `world`) + console.log(await cache.get(`hello`)) + + // touch nodes to ensure they aren't garbage collected + getNodesByType(POST_NODE_TYPE).forEach(node => touchNode({ nodeId: node.id })) + getNodesByType(AUTHOR_NODE_TYPE).forEach(node => + touchNode({ nodeId: node.id }) + ) + + // listen for updates using subscriptions from the API + if (pluginOptions.preview) { + console.log( + "Subscribing to updates on ws://localhost:4000 (plugin is in Preview mode)" + ) + const subscription = await client.subscribe({ + query: gql` + subscription { + posts { + id + slug + description + imgUrl + imgAlt + author { + id + name + } + status + } + } + `, + }) + subscription.subscribe(({ data }) => { + console.log(`Subscription received:`) + console.log(data.posts) + data.posts.forEach(post => { + const nodeId = createNodeId(`${POST_NODE_TYPE}-${post.id}`) + switch (post.status) { + case "deleted": + deleteNode({ + node: getNode(nodeId), + }) + break + case "created": + case "updated": + default: + // created and updated can be handled by the same code path + // the post's id is presumed to stay constant (or can be inferred) + createNodeFromData(post, POST_NODE_TYPE, helpers) + break + } + }) + }) + } + + // store the response from the API in the cache + const cacheKey = "your-source-data-key" + let sourceData = await cache.get(cacheKey) + + // fetch fresh data if nothiing is found in the cache or a plugin option says not to cache data + if (!sourceData || !pluginOptions.cacheResponse) { + console.log("Not using cache for source data, fetching fresh content") + const { data } = await client.query({ + query: gql` + query { + posts { + id + slug + description + imgUrl + imgAlt + author { + id + name + } + } + authors { + id + name + } + } + `, + }) + await cache.set(cacheKey, data) + sourceData = data + } + + // loop through data returned from the api and create Gatsby nodes for them + sourceData.posts.forEach(post => + createNodeFromData(post, POST_NODE_TYPE, helpers) + ) + sourceData.authors.forEach(author => + createNodeFromData(author, AUTHOR_NODE_TYPE, helpers) + ) + + return +} + +/** + * ============================================================================ + * Transform remote file nodes + * ============================================================================ + */ + +exports.onCreateNode = async ({ + actions: { createNode }, + getCache, + createNodeId, + node, +}) => { + // transfrom remote file nodes using Gatsby sharp plugins + // because onCreateNode is called for all nodes, verify that you are only running this code on nodes created by your plugin + if (node.internal.type === POST_NODE_TYPE) { + // create a FileNode in Gatsby that gatsby-transformer-sharp will create optimized images for + const fileNode = await createRemoteFileNode({ + // the url of the remote image to generate a node for + url: node.imgUrl, + getCache, + createNode, + createNodeId, + parentNodeId: node.id, + }) + + if (fileNode) { + // used to add a field `remoteImage` to the Post node from the File node in the schemaCustomization API + node.remoteImage = fileNode.id + + // inference can link these without schemaCustomization like this, but creates a less sturdy schema + // node.remoteImage___NODE = fileNode.id + } + } +} diff --git a/examples/creating-source-plugins/source-plugin/index.js b/examples/creating-source-plugins/source-plugin/index.js new file mode 100644 index 0000000000000..172f1ae6a468c --- /dev/null +++ b/examples/creating-source-plugins/source-plugin/index.js @@ -0,0 +1 @@ +// noop diff --git a/examples/creating-source-plugins/source-plugin/package.json b/examples/creating-source-plugins/source-plugin/package.json new file mode 100644 index 0000000000000..53449361a87c5 --- /dev/null +++ b/examples/creating-source-plugins/source-plugin/package.json @@ -0,0 +1,28 @@ +{ + "name": "source-plugin", + "version": "1.0.0", + "description": "A minimal boilerplate for the essential files Gatsby looks for in a plugin", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [ + "gatsby", + "gatsby-plugin" + ], + "author": "Kyle Gill ", + "license": "MIT", + "dependencies": { + "apollo-cache-inmemory": "^1.6.5", + "apollo-client": "^2.6.8", + "apollo-link": "^1.2.13", + "apollo-link-http": "^1.5.16", + "apollo-link-ws": "^1.0.19", + "apollo-utilities": "^1.3.3", + "gatsby-source-filesystem": "^2.2.2", + "graphql": "^15.0.0", + "graphql-tag": "^2.10.3", + "node-fetch": "^2.6.0", + "ws": "^7.2.3" + } +} diff --git a/package.json b/package.json index 70c0465e96888..3e0502d69e897 100644 --- a/package.json +++ b/package.json @@ -8,6 +8,7 @@ "@types/babel__code-frame": "^7.0.1", "@types/bluebird": "^3.5.30", "@types/cache-manager": "^2.10.2", + "@types/common-tags": "^1.8.0", "@types/eslint": "^6.1.8", "@types/express": "^4.17.3", "@types/fast-levenshtein": "^0.0.1", @@ -18,6 +19,8 @@ "@types/lodash": "^4.14.149", "@types/node": "^12.12.30", "@types/node-fetch": "^2.5.5", + "@types/semver": "^7.1.0", + "@types/signal-exit": "^3.0.0", "@types/react": "^16.9.31", "@types/stack-trace": "^0.0.29", "@types/webpack": "^4.41.7", diff --git a/packages/gatsby-cli/CHANGELOG.md b/packages/gatsby-cli/CHANGELOG.md index a8392ff136ed7..00831b5584efc 100644 --- a/packages/gatsby-cli/CHANGELOG.md +++ b/packages/gatsby-cli/CHANGELOG.md @@ -3,6 +3,24 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [2.11.8](https://github.com/gatsbyjs/gatsby/compare/gatsby-cli@2.11.7...gatsby-cli@2.11.8) (2020-04-14) + +### Bug Fixes + +- **gatsby-cli:** Fix console methods incorrectly handling falsy values ([#23021](https://github.com/gatsbyjs/gatsby/issues/23021)) ([66a1b7f](https://github.com/gatsbyjs/gatsby/commit/66a1b7f)) + +## [2.11.7](https://github.com/gatsbyjs/gatsby/compare/gatsby-cli@2.11.6...gatsby-cli@2.11.7) (2020-04-10) + +### Bug Fixes + +- **gatsby-cli:** Address an issue that caused empty logs to print undefined ([#23000](https://github.com/gatsbyjs/gatsby/issues/23000)) ([be85f2e](https://github.com/gatsbyjs/gatsby/commit/be85f2e)) + +## [2.11.6](https://github.com/gatsbyjs/gatsby/compare/gatsby-cli@2.11.5...gatsby-cli@2.11.6) (2020-04-10) + +### Features + +- **gatsby-cli:** allow --recursive git url ([#22747](https://github.com/gatsbyjs/gatsby/issues/22747)) ([f4198e2](https://github.com/gatsbyjs/gatsby/commit/f4198e2)) + ## [2.11.5](https://github.com/gatsbyjs/gatsby/compare/gatsby-cli@2.11.4...gatsby-cli@2.11.5) (2020-04-03) **Note:** Version bump only for package gatsby-cli diff --git a/packages/gatsby-cli/package.json b/packages/gatsby-cli/package.json index 33c6763dd20f7..10d3f0633300d 100644 --- a/packages/gatsby-cli/package.json +++ b/packages/gatsby-cli/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-cli", "description": "Gatsby command-line interface for creating new sites and running Gatsby commands", - "version": "2.11.5", + "version": "2.11.8", "author": "Kyle Mathews ", "bin": { "gatsby": "lib/index.js" diff --git a/packages/gatsby-cli/src/index.ts b/packages/gatsby-cli/src/index.ts index f215a5f04a257..a08464ba5aed9 100755 --- a/packages/gatsby-cli/src/index.ts +++ b/packages/gatsby-cli/src/index.ts @@ -56,7 +56,7 @@ process.on(`unhandledRejection`, reason => { reason = new Error(util.format(reason)) } - report.panic(`UNHANDLED REJECTION`, reason) + report.panic(`UNHANDLED REJECTION`, reason as Error) }) process.on(`uncaughtException`, error => { diff --git a/packages/gatsby-cli/src/init-starter.ts b/packages/gatsby-cli/src/init-starter.ts index 8ed209a001734..5310cd00e0a0e 100644 --- a/packages/gatsby-cli/src/init-starter.ts +++ b/packages/gatsby-cli/src/init-starter.ts @@ -179,9 +179,14 @@ const clone = async (hostInfo: any, rootPath: string): Promise => { report.info(`Creating new site from git: ${url}`) - const args = [`clone`, ...branch, url, rootPath, `--depth=1`].filter(arg => - Boolean(arg) - ) + const args = [ + `clone`, + ...branch, + url, + rootPath, + `--recursive`, + `--depth=1`, + ].filter(arg => Boolean(arg)) await spawnWithArgs(`git`, args) diff --git a/packages/gatsby-cli/src/reporter/__tests__/index.js b/packages/gatsby-cli/src/reporter/__tests__/index.js index 68ac39f2947e2..b0423ebd15381 100644 --- a/packages/gatsby-cli/src/reporter/__tests__/index.js +++ b/packages/gatsby-cli/src/reporter/__tests__/index.js @@ -1,4 +1,4 @@ -const reporter = require(`../index.js`) +const reporter = require(`../`) const reporterActions = require(`../redux/actions`) // TODO: report.error now DOES return something. Get rid of this spying mocking stuff diff --git a/packages/gatsby-cli/src/reporter/__tests__/patch-console.ts b/packages/gatsby-cli/src/reporter/__tests__/patch-console.ts new file mode 100644 index 0000000000000..72d1cb43127e9 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/__tests__/patch-console.ts @@ -0,0 +1,60 @@ +import { patchConsole } from "../patch-console" +import { reporter as gatsbyReporter } from "../reporter" + +describe(`patchConsole`, () => { + const reporter = { + log: jest.fn(), + warn: jest.fn(), + info: jest.fn(), + } + + patchConsole((reporter as unknown) as typeof gatsbyReporter) + ;[`info`, `log`, `warn`].forEach(method => { + describe(method, () => { + beforeEach(reporter[method].mockReset) + + it(`handles an empty call`, () => { + console[method]() + + expect(reporter[method]).toBeCalledWith(``) + }) + + it(`handles multiple arguments`, () => { + console[method](`foo`, `bar`, `baz`) + + expect(reporter[method]).toBeCalledWith(`foo bar baz`) + }) + + it(`handles formatting`, () => { + console[method](`%s %d`, `bar`, true) + + expect(reporter[method]).toBeCalledWith(`bar 1`) + }) + + it(`handles normal values`, () => { + console[method](1) + console[method](0) + console[method](true) + console[method](false) + console[method]([1, true, false, {}]) + console[method]({ 1: 1, true: true, false: `false`, obj: {} }) + + expect(reporter[method].mock.calls[0][0]).toBe(`1`) + expect(reporter[method].mock.calls[1][0]).toBe(`0`) + expect(reporter[method].mock.calls[2][0]).toBe(`true`) + expect(reporter[method].mock.calls[3][0]).toBe(`false`) + expect(reporter[method].mock.calls[4][0]).toBe(`[ 1, true, false, {} ]`) + expect(reporter[method].mock.calls[5][0]).toBe( + `{ '1': 1, true: true, false: 'false', obj: {} }` + ) + }) + + it(`handles undefined variables`, () => { + let a + console[method](a) + + expect(reporter[method]).toBeCalledWith(``) + }) + }) + }) +}) diff --git a/packages/gatsby-cli/src/reporter/catch-exit-signals.ts b/packages/gatsby-cli/src/reporter/catch-exit-signals.ts new file mode 100644 index 0000000000000..67c54680f50e1 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/catch-exit-signals.ts @@ -0,0 +1,42 @@ +/* + * This module is used to catch if the user kills the gatsby process via cmd+c + * When this happens, there is some clean up logic we need to fire offf + */ +import signalExit from "signal-exit" +import { getStore } from "./redux" +import reporterActions from "./redux/actions" +import { ActivityStatuses } from "./constants" +import { reporter } from "./reporter" + +const interruptActivities = (): void => { + const { activities } = getStore().getState().logs + Object.keys(activities).forEach(activityId => { + const activity = activities[activityId] + if ( + activity.status === ActivityStatuses.InProgress || + activity.status === ActivityStatuses.NotStarted + ) { + reporter.completeActivity(activityId, ActivityStatuses.Interrupted) + } + }) +} + +export const prematureEnd = (): void => { + // hack so at least one activity is surely failed, so + // we are guaranteed to generate FAILED status + // if none of activity did explicitly fail + reporterActions.createPendingActivity({ + id: `panic`, + status: ActivityStatuses.Failed, + }) + + interruptActivities() +} + +export const catchExitSignals = (): void => { + signalExit((code, signal) => { + if (code !== 0 && signal !== `SIGINT` && signal !== `SIGTERM`) + prematureEnd() + else interruptActivities() + }) +} diff --git a/packages/gatsby-cli/src/reporter/index.js b/packages/gatsby-cli/src/reporter/index.js deleted file mode 100644 index 009064fcea216..0000000000000 --- a/packages/gatsby-cli/src/reporter/index.js +++ /dev/null @@ -1,430 +0,0 @@ -// @flow - -const semver = require(`semver`) -const { isCI } = require(`gatsby-core-utils`) -const signalExit = require(`signal-exit`) -const reporterActions = require(`./redux/actions`) - -const { LogLevels, ActivityStatuses, ActivityTypes } = require(`./constants`) - -let inkExists = false -try { - inkExists = require.resolve(`ink`) - // eslint-disable-next-line no-empty -} catch (err) {} - -if (!process.env.GATSBY_LOGGER) { - if ( - inkExists && - semver.satisfies(process.version, `>=8`) && - !isCI() && - typeof jest === `undefined` - ) { - process.env.GATSBY_LOGGER = `ink` - } else { - process.env.GATSBY_LOGGER = `yurnalist` - } -} -// if child process - use ipc logger -if (process.send) { - // process.env.FORCE_COLOR = `0` - - require(`./loggers/ipc`) -} - -if (process.env.GATSBY_LOGGER.includes(`json`)) { - require(`./loggers/json`) -} else if (process.env.GATSBY_LOGGER.includes(`yurnalist`)) { - require(`./loggers/yurnalist`) -} else { - require(`./loggers/ink`) -} - -const util = require(`util`) -const { stripIndent } = require(`common-tags`) -const chalk = require(`chalk`) -const { trackError } = require(`gatsby-telemetry`) -const tracer = require(`opentracing`).globalTracer() - -const { getErrorFormatter } = require(`./errors`) -const { getStore } = require(`./redux`) -import constructError from "../structured-errors/construct-error" - -const errorFormatter = getErrorFormatter() - -import type { ActivityTracker, ActivityArgs, Reporter } from "./types" - -const addMessage = level => text => reporterActions.createLog({ level, text }) - -let isVerbose = false - -const interruptActivities = () => { - const { activities } = getStore().getState().logs - Object.keys(activities).forEach(activityId => { - const activity = activities[activityId] - if ( - activity.status === ActivityStatuses.InProgress || - activity.status === ActivityStatuses.NotStarted - ) { - reporter.completeActivity(activityId, ActivityStatuses.Interrupted) - } - }) -} - -const prematureEnd = () => { - // hack so at least one activity is surely failed, so - // we are guaranteed to generate FAILED status - // if none of activity did explicitly fail - reporterActions.createPendingActivity({ - id: `panic`, - status: ActivityStatuses.Failed, - }) - - interruptActivities() -} - -signalExit((code, signal) => { - if (code !== 0 && signal !== `SIGINT` && signal !== `SIGTERM`) prematureEnd() - else interruptActivities() -}) - -/** - * Reporter module. - * @module reporter - */ -const reporter: Reporter = { - /** - * Strip initial indentation template function. - */ - stripIndent, - format: chalk, - /** - * Toggle verbosity. - * @param {boolean} [_isVerbose=true] - */ - setVerbose: (_isVerbose = true) => { - isVerbose = _isVerbose - }, - /** - * Turn off colors in error output. - * @param {boolean} [isNoColor=false] - */ - setNoColor(isNoColor = false) { - if (isNoColor) { - errorFormatter.withoutColors() - } - - // disables colors in popular terminal output coloring packages - // - chalk: see https://www.npmjs.com/package/chalk#chalksupportscolor - // - ansi-colors: see https://github.com/doowb/ansi-colors/blob/8024126c7115a0efb25a9a0e87bc5e29fd66831f/index.js#L5-L7 - if (isNoColor) { - process.env.FORCE_COLOR = `0` - // chalk determines color level at import time. Before we reach this point, - // chalk was already imported, so we need to retroactively adjust level - chalk.level = 0 - } - }, - /** - * Log arguments and exit process with status 1. - * @param {*} args - */ - panic(...args) { - const error = reporter.error(...args) - trackError(`GENERAL_PANIC`, { error }) - prematureEnd() - process.exit(1) - }, - - panicOnBuild(...args) { - const error = reporter.error(...args) - trackError(`BUILD_PANIC`, { error }) - if (process.env.gatsby_executing_command === `build`) { - prematureEnd() - process.exit(1) - } - return error - }, - - error(errorMeta, error) { - let details = {} - // Many paths to retain backcompat :scream: - if (arguments.length === 2) { - if (Array.isArray(error)) { - return error.map(errorItem => this.error(errorMeta, errorItem)) - } - details.error = error - details.context = { - sourceMessage: errorMeta + ` ` + error.message, - } - } else if (arguments.length === 1 && errorMeta instanceof Error) { - details.error = errorMeta - details.context = { - sourceMessage: errorMeta.message, - } - } else if (arguments.length === 1 && Array.isArray(errorMeta)) { - // when we get an array of messages, call this function once for each error - return errorMeta.map(errorItem => this.error(errorItem)) - } else if (arguments.length === 1 && typeof errorMeta === `object`) { - details = Object.assign({}, errorMeta) - } else if (arguments.length === 1 && typeof errorMeta === `string`) { - details.context = { - sourceMessage: errorMeta, - } - } - - const structuredError = constructError({ details }) - if (structuredError) { - reporterActions.createLog(structuredError) - } - - // TODO: remove this once Error component can render this info - // log formatted stacktrace - if (structuredError.error) { - this.log(errorFormatter.render(structuredError.error)) - } - return structuredError - }, - - /** - * Set prefix on uptime. - * @param {string} prefix - A string to prefix uptime with. - */ - uptime(prefix) { - this.verbose(`${prefix}: ${(process.uptime() * 1000).toFixed(3)}ms`) - }, - - verbose: text => { - if (isVerbose) { - reporterActions.createLog({ - level: LogLevels.Debug, - text, - }) - } - }, - - success: addMessage(LogLevels.Success), - info: addMessage(LogLevels.Info), - warn: addMessage(LogLevels.Warning), - log: addMessage(LogLevels.Log), - - pendingActivity: reporterActions.createPendingActivity, - - completeActivity: (id: string, status: string = ActivityStatuses.Success) => { - reporterActions.endActivity({ id, status }) - }, - - /** - * Time an activity. - * @param {string} text - Name of activity. - * @param {ActivityArgs} activityArgs - optional object with tracer parentSpan - * @returns {ActivityTracker} The activity tracker. - */ - activityTimer( - text: string, - activityArgs: ActivityArgs = {} - ): ActivityTracker { - let { parentSpan, id } = activityArgs - const spanArgs = parentSpan ? { childOf: parentSpan } : {} - if (!id) { - id = text - } - - const span = tracer.startSpan(text, spanArgs) - - return { - start: () => { - reporterActions.startActivity({ - id, - text, - type: ActivityTypes.Spinner, - }) - }, - setStatus: statusText => { - reporterActions.setActivityStatusText({ - id, - statusText, - }) - }, - panicOnBuild(...args) { - span.finish() - - reporterActions.setActivityErrored({ - id, - }) - - return reporter.panicOnBuild(...args) - }, - panic(...args) { - span.finish() - - reporterActions.endActivity({ - id, - status: ActivityStatuses.Failed, - }) - - return reporter.panic(...args) - }, - end() { - span.finish() - - reporterActions.endActivity({ - id, - status: ActivityStatuses.Success, - }) - }, - span, - } - }, - - /** - * Create an Activity that is not visible to the user - * - * During the lifecycle of the Gatsby process, sometimes we need to do some - * async work and wait for it to complete. A typical example of this is a job. - * This work should set the status of the process to `in progress` while running and - * `complete` (or `failure`) when complete. Activities do just this! However, they - * are visible to the user. So this function can be used to create a _hidden_ activity - * that while not displayed in the CLI, still triggers a change in process status. - * - * @param {string} text - Name of activity. - * @param {ActivityArgs} activityArgs - optional object with tracer parentSpan - * @returns {ActivityTracker} The activity tracker. - */ - phantomActivity( - text: string, - activityArgs: ActivityArgs = {} - ): ActivityTracker { - let { parentSpan, id } = activityArgs - const spanArgs = parentSpan ? { childOf: parentSpan } : {} - if (!id) { - id = text - } - - const span = tracer.startSpan(text, spanArgs) - - return { - start: () => { - reporterActions.startActivity({ - id, - text, - type: ActivityTypes.Hidden, - }) - }, - end() { - span.finish() - - reporterActions.endActivity({ - id, - status: ActivityStatuses.Success, - }) - }, - span, - } - }, - - /** - * Create a progress bar for an activity - * @param {string} text - Name of activity. - * @param {number} total - Total items to be processed. - * @param {number} start - Start count to show. - * @param {ActivityArgs} activityArgs - optional object with tracer parentSpan - * @returns {ActivityTracker} The activity tracker. - */ - createProgress( - text: string, - total = 0, - start = 0, - activityArgs: ActivityArgs = {} - ): ActivityTracker { - let { parentSpan, id } = activityArgs - const spanArgs = parentSpan ? { childOf: parentSpan } : {} - if (!id) { - id = text - } - const span = tracer.startSpan(text, spanArgs) - - let lastUpdateTime = 0 - let unflushedProgress = 0 - let unflushedTotal = 0 - const progressUpdateDelay = Math.round(1000 / 10) // 10 fps *shrug* - - const updateProgress = forced => { - const t = Date.now() - if (!forced && t - lastUpdateTime <= progressUpdateDelay) return - - if (unflushedTotal > 0) { - reporterActions.setActivityTotal({ id, total: unflushedTotal }) - unflushedTotal = 0 - } - if (unflushedProgress > 0) { - reporterActions.activityTick({ id, increment: unflushedProgress }) - unflushedProgress = 0 - } - lastUpdateTime = t - } - - return { - start: () => { - reporterActions.startActivity({ - id, - text, - type: ActivityTypes.Progress, - current: start, - total, - }) - }, - setStatus: statusText => { - reporterActions.setActivityStatusText({ - id, - statusText, - }) - }, - tick: (increment = 1) => { - unflushedProgress += increment // Have to manually track this :/ - updateProgress() - }, - panicOnBuild(...args) { - span.finish() - - reporterActions.setActivityErrored({ - id, - }) - - return reporter.panicOnBuild(...args) - }, - panic(...args) { - span.finish() - - reporterActions.endActivity({ - id, - status: ActivityStatuses.Failed, - }) - - return reporter.panic(...args) - }, - done: () => { - updateProgress(true) - span.finish() - reporterActions.endActivity({ - id, - status: ActivityStatuses.Success, - }) - }, - set total(value) { - unflushedTotal = value - updateProgress() - }, - span, - } - }, - // This method was called in older versions of gatsby, so we need to keep it to avoid - // "reporter._setStage is not a function" error when gatsby@<2.16 is used with gatsby-cli@>=2.8 - _setStage() {}, -} - -console.log = (...args) => reporter.log(util.format(...args)) -console.warn = (...args) => reporter.warn(util.format(...args)) -console.info = (...args) => reporter.info(util.format(...args)) -console.error = (...args) => reporter.error(util.format(...args)) - -module.exports = reporter diff --git a/packages/gatsby-cli/src/reporter/index.ts b/packages/gatsby-cli/src/reporter/index.ts new file mode 100644 index 0000000000000..1417ea5e2d44d --- /dev/null +++ b/packages/gatsby-cli/src/reporter/index.ts @@ -0,0 +1,11 @@ +import { startLogger } from "./start-logger" +import { patchConsole } from "./patch-console" +import { catchExitSignals } from "./catch-exit-signals" +import { reporter } from "./reporter" + +catchExitSignals() +startLogger() +patchConsole(reporter) + +export default reporter +module.exports = reporter diff --git a/packages/gatsby-cli/src/reporter/patch-console.ts b/packages/gatsby-cli/src/reporter/patch-console.ts new file mode 100644 index 0000000000000..f10d27e0e96c6 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/patch-console.ts @@ -0,0 +1,24 @@ +/* + * This module is used to patch console through our reporter so we can track + * these logs + */ +import util from "util" +import { reporter as gatsbyReporter } from "./reporter" + +export const patchConsole = (reporter: typeof gatsbyReporter): void => { + console.log = (...args: any[]): void => { + const [format, ...rest] = args + reporter.log(util.format(format === undefined ? `` : format, ...rest)) + } + console.warn = (...args: any[]): void => { + const [format, ...rest] = args + reporter.warn(util.format(format === undefined ? `` : format, ...rest)) + } + console.info = (...args: any[]): void => { + const [format, ...rest] = args + reporter.info(util.format(format === undefined ? `` : format, ...rest)) + } + console.error = (format: any, ...args: any[]): void => { + reporter.error(util.format(format, ...args)) + } +} diff --git a/packages/gatsby-cli/src/reporter/reporter-phantom.ts b/packages/gatsby-cli/src/reporter/reporter-phantom.ts new file mode 100644 index 0000000000000..b052561c91eb9 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/reporter-phantom.ts @@ -0,0 +1,42 @@ +import reporterActions from "./redux/actions" +import { ActivityStatuses, ActivityTypes } from "./constants" +import { Span } from "opentracing" + +interface ICreatePhantomReporterArguments { + text: string + id: string + span: Span +} + +export interface IPhantomReporter { + start(): void + end(): void + span: Span +} + +export const createPhantomReporter = ({ + text, + id, + span, +}: ICreatePhantomReporterArguments): IPhantomReporter => { + return { + start(): void { + reporterActions.startActivity({ + id, + text, + type: ActivityTypes.Hidden, + }) + }, + + end(): void { + span.finish() + + reporterActions.endActivity({ + id, + status: ActivityStatuses.Success, + }) + }, + + span, + } +} diff --git a/packages/gatsby-cli/src/reporter/reporter-progress.ts b/packages/gatsby-cli/src/reporter/reporter-progress.ts new file mode 100644 index 0000000000000..eb0477bb8d177 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/reporter-progress.ts @@ -0,0 +1,122 @@ +import reporterActions from "./redux/actions" +import { ActivityStatuses, ActivityTypes } from "./constants" +import { Span } from "opentracing" +import { reporter as gatsbyReporter } from "./reporter" +import { IStructuredError } from "../structured-errors/types" +import { ErrorMeta } from "./types" + +interface ICreateProgressReporterArguments { + id: string + text: string + start: number + total: number + span: Span + reporter: typeof gatsbyReporter +} + +export interface IProgressReporter { + start(): void + setStatus(statusText: string): void + tick(increment?: number): void + panicOnBuild( + arg: any, + ...otherArgs: any[] + ): IStructuredError | IStructuredError[] + panic(arg: any, ...otherArgs: any[]): void + done(): void + total: number + span: Span +} + +export const createProgressReporter = ({ + id, + text, + start, + total, + span, + reporter, +}: ICreateProgressReporterArguments): IProgressReporter => { + let lastUpdateTime = 0 + let unflushedProgress = 0 + let unflushedTotal = 0 + const progressUpdateDelay = Math.round(1000 / 10) // 10 fps *shrug* + + const updateProgress = (forced: boolean = false): void => { + const t = Date.now() + if (!forced && t - lastUpdateTime <= progressUpdateDelay) return + + if (unflushedTotal > 0) { + reporterActions.setActivityTotal({ id, total: unflushedTotal }) + unflushedTotal = 0 + } + if (unflushedProgress > 0) { + reporterActions.activityTick({ id, increment: unflushedProgress }) + unflushedProgress = 0 + } + lastUpdateTime = t + } + + return { + start(): void { + reporterActions.startActivity({ + id, + text, + type: ActivityTypes.Progress, + current: start, + total, + }) + }, + + setStatus(statusText: string): void { + reporterActions.setActivityStatusText({ + id, + statusText, + }) + }, + + tick(increment: number = 1): void { + unflushedProgress += increment // Have to manually track this :/ + updateProgress() + }, + + panicOnBuild( + errorMeta: ErrorMeta, + error?: Error | Error[] + ): IStructuredError | IStructuredError[] { + span.finish() + + reporterActions.setActivityErrored({ + id, + }) + + return reporter.panicOnBuild(errorMeta, error) + }, + + panic(errorMeta: ErrorMeta, error?: Error | Error[]): void { + span.finish() + + reporterActions.endActivity({ + id, + status: ActivityStatuses.Failed, + }) + + return reporter.panic(errorMeta, error) + }, + + done(): void { + updateProgress(true) + span.finish() + reporterActions.endActivity({ + id, + status: ActivityStatuses.Success, + }) + }, + + set total(value: number) { + unflushedTotal = value + updateProgress() + }, + + span, + } +} diff --git a/packages/gatsby-cli/src/reporter/reporter-timer.ts b/packages/gatsby-cli/src/reporter/reporter-timer.ts new file mode 100644 index 0000000000000..3d19179422fdf --- /dev/null +++ b/packages/gatsby-cli/src/reporter/reporter-timer.ts @@ -0,0 +1,88 @@ +/* + * This module is used when calling reporter. + * these logs + */ +import reporterActions from "./redux/actions" +import { ActivityStatuses, ActivityTypes } from "./constants" +import { Span } from "opentracing" +import { reporter as gatsbyReporter } from "./reporter" +import { IStructuredError } from "../structured-errors/types" +import { ErrorMeta } from "./types" + +interface ICreateTimerReporterArguments { + text: string + id: string + span: Span + reporter: typeof gatsbyReporter +} + +export interface ITimerReporter { + start(): void + setStatus(statusText: string): void + panicOnBuild( + arg: any, + ...otherArgs: any[] + ): IStructuredError | IStructuredError[] + panic(arg: any, ...otherArgs: any[]): void + end(): void + span: Span +} + +export const createTimerReporter = ({ + text, + id, + span, + reporter, +}: ICreateTimerReporterArguments): ITimerReporter => { + return { + start(): void { + reporterActions.startActivity({ + id, + text, + type: ActivityTypes.Spinner, + }) + }, + + setStatus(statusText: string): void { + reporterActions.setActivityStatusText({ + id, + statusText, + }) + }, + + panicOnBuild( + errorMeta: ErrorMeta, + error?: Error | Error[] + ): IStructuredError | IStructuredError[] { + span.finish() + + reporterActions.setActivityErrored({ + id, + }) + + return reporter.panicOnBuild(errorMeta, error) + }, + + panic(errorMeta: ErrorMeta, error?: Error | Error[]): void { + span.finish() + + reporterActions.endActivity({ + id, + status: ActivityStatuses.Failed, + }) + + return reporter.panic(errorMeta, error) + }, + + end(): void { + span.finish() + + reporterActions.endActivity({ + id, + status: ActivityStatuses.Success, + }) + }, + + span, + } +} diff --git a/packages/gatsby-cli/src/reporter/reporter.ts b/packages/gatsby-cli/src/reporter/reporter.ts new file mode 100644 index 0000000000000..87be5a5b65f35 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/reporter.ts @@ -0,0 +1,259 @@ +import { stripIndent } from "common-tags" +import chalk from "chalk" +import { trackError } from "gatsby-telemetry" +import { globalTracer, Span } from "opentracing" + +import reporterActions from "./redux/actions" +import { LogLevels, ActivityStatuses } from "./constants" +import { getErrorFormatter } from "./errors" +import constructError from "../structured-errors/construct-error" +import { prematureEnd } from "./catch-exit-signals" +import { IStructuredError } from "../structured-errors/types" +import { createTimerReporter, ITimerReporter } from "./reporter-timer" +import { createPhantomReporter, IPhantomReporter } from "./reporter-phantom" +import { createProgressReporter, IProgressReporter } from "./reporter-progress" +import { ErrorMeta, CreateLogAction } from "./types" + +const errorFormatter = getErrorFormatter() +const tracer = globalTracer() + +interface IActivityArgs { + id?: string + parentSpan?: Span +} + +let isVerbose = false + +/** + * Reporter module. + * @module reporter + */ +class Reporter { + /** + * Strip initial indentation template function. + */ + stripIndent = stripIndent + format = chalk + + /** + * Toggle verbosity. + */ + setVerbose = (_isVerbose: boolean = true): void => { + isVerbose = _isVerbose + } + + /** + * Turn off colors in error output. + */ + setNoColor = (isNoColor: boolean = false): void => { + if (isNoColor) { + errorFormatter.withoutColors() + } + + // disables colors in popular terminal output coloring packages + // - chalk: see https://www.npmjs.com/package/chalk#chalksupportscolor + // - ansi-colors: see https://github.com/doowb/ansi-colors/blob/8024126c7115a0efb25a9a0e87bc5e29fd66831f/index.js#L5-L7 + if (isNoColor) { + process.env.FORCE_COLOR = `0` + // chalk determines color level at import time. Before we reach this point, + // chalk was already imported, so we need to retroactively adjust level + chalk.level = 0 + } + } + + /** + * Log arguments and exit process with status 1. + */ + panic = (errorMeta: ErrorMeta, error?: Error | Error[]): void => { + const reporterError = this.error(errorMeta, error) + trackError(`GENERAL_PANIC`, { error: reporterError }) + prematureEnd() + process.exit(1) + } + + panicOnBuild = ( + errorMeta: ErrorMeta, + error?: Error | Error[] + ): IStructuredError | IStructuredError[] => { + const reporterError = this.error(errorMeta, error) + trackError(`BUILD_PANIC`, { error: reporterError }) + if (process.env.gatsby_executing_command === `build`) { + prematureEnd() + process.exit(1) + } + return reporterError + } + + error = ( + errorMeta: ErrorMeta, + error?: Error | Error[] + ): IStructuredError | IStructuredError[] => { + let details: { + error?: Error + context: {} + } = { + context: {}, + } + + // Many paths to retain backcompat :scream: + // 1. + // reporter.error(any, Error); + // reporter.error(any, [Error]); + if (error) { + if (Array.isArray(error)) { + return error.map(errorItem => + this.error(errorMeta, errorItem) + ) as IStructuredError[] + } + details.error = error + details.context = { + sourceMessage: errorMeta + ` ` + error.message, + } + // 2. + // reporter.error(Error); + } else if (errorMeta instanceof Error) { + details.error = errorMeta + details.context = { + sourceMessage: errorMeta.message, + } + // 3. + // reporter.error([Error]); + } else if (Array.isArray(errorMeta)) { + // when we get an array of messages, call this function once for each error + return errorMeta.map(errorItem => + this.error(errorItem) + ) as IStructuredError[] + // 4. + // reporter.error(errorMeta); + } else if (typeof errorMeta === `object`) { + details = { ...errorMeta } + // 5. + // reporter.error('foo'); + } else if (typeof errorMeta === `string`) { + details.context = { + sourceMessage: errorMeta, + } + } + + const structuredError = constructError({ details }) + if (structuredError) { + reporterActions.createLog(structuredError) + } + + // TODO: remove this once Error component can render this info + // log formatted stacktrace + if (structuredError.error) { + this.log(errorFormatter.render(structuredError.error)) + } + return structuredError + } + + /** + * Set prefix on uptime. + */ + uptime = (prefix: string): void => { + this.verbose(`${prefix}: ${(process.uptime() * 1000).toFixed(3)}ms`) + } + + verbose = (text: string): void => { + if (isVerbose) { + reporterActions.createLog({ + level: LogLevels.Debug, + text, + }) + } + } + + success = (text?: string): CreateLogAction => + reporterActions.createLog({ level: LogLevels.Success, text }) + info = (text?: string): CreateLogAction => + reporterActions.createLog({ level: LogLevels.Info, text }) + warn = (text?: string): CreateLogAction => + reporterActions.createLog({ level: LogLevels.Warning, text }) + log = (text?: string): CreateLogAction => + reporterActions.createLog({ level: LogLevels.Log, text }) + + pendingActivity = reporterActions.createPendingActivity + + completeActivity = ( + id: string, + status: ActivityStatuses = ActivityStatuses.Success + ): void => { + reporterActions.endActivity({ id, status }) + } + + /** + * Time an activity. + */ + activityTimer = ( + text: string, + activityArgs: IActivityArgs = {} + ): ITimerReporter => { + let { parentSpan, id } = activityArgs + const spanArgs = parentSpan ? { childOf: parentSpan } : {} + if (!id) { + id = text + } + + const span = tracer.startSpan(text, spanArgs) + + return createTimerReporter({ text, id, span, reporter: this }) + } + + /** + * Create an Activity that is not visible to the user + * + * During the lifecycle of the Gatsby process, sometimes we need to do some + * async work and wait for it to complete. A typical example of this is a job. + * This work should set the status of the process to `in progress` while running and + * `complete` (or `failure`) when complete. Activities do just this! However, they + * are visible to the user. So this function can be used to create a _hidden_ activity + * that while not displayed in the CLI, still triggers a change in process status. + */ + phantomActivity = ( + text: string, + activityArgs: IActivityArgs = {} + ): IPhantomReporter => { + let { parentSpan, id } = activityArgs + const spanArgs = parentSpan ? { childOf: parentSpan } : {} + if (!id) { + id = text + } + + const span = tracer.startSpan(text, spanArgs) + + return createPhantomReporter({ id, text, span }) + } + + /** + * Create a progress bar for an activity + */ + createProgress = ( + text: string, + total = 0, + start = 0, + activityArgs: IActivityArgs = {} + ): IProgressReporter => { + let { parentSpan, id } = activityArgs + const spanArgs = parentSpan ? { childOf: parentSpan } : {} + if (!id) { + id = text + } + const span = tracer.startSpan(text, spanArgs) + + return createProgressReporter({ + id, + text, + total, + start, + span, + reporter: this, + }) + } + + // This method was called in older versions of gatsby, so we need to keep it to avoid + // "reporter._setStage is not a function" error when gatsby@<2.16 is used with gatsby-cli@>=2.8 + _setStage = (): void => {} +} + +export const reporter = new Reporter() diff --git a/packages/gatsby-cli/src/reporter/start-logger.ts b/packages/gatsby-cli/src/reporter/start-logger.ts new file mode 100644 index 0000000000000..bb6166fb0cee9 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/start-logger.ts @@ -0,0 +1,40 @@ +/* + * This module is a side-effect filled module to load in the proper logger. + */ +import semver from "semver" +import { isCI } from "gatsby-core-utils" + +export const startLogger = (): void => { + let inkExists = false + try { + inkExists = !!require.resolve(`ink`) + // eslint-disable-next-line no-empty + } catch (err) {} + + if (!process.env.GATSBY_LOGGER) { + if ( + inkExists && + semver.satisfies(process.version, `>=8`) && + !isCI() && + typeof jest === `undefined` + ) { + process.env.GATSBY_LOGGER = `ink` + } else { + process.env.GATSBY_LOGGER = `yurnalist` + } + } + // if child process - use ipc logger + if (process.send) { + // process.env.FORCE_COLOR = `0` + + require(`./loggers/ipc`) + } + + if (process.env.GATSBY_LOGGER.includes(`json`)) { + require(`./loggers/json`) + } else if (process.env.GATSBY_LOGGER.includes(`yurnalist`)) { + require(`./loggers/yurnalist`) + } else { + require(`./loggers/ink`) + } +} diff --git a/packages/gatsby-cli/src/reporter/types.js b/packages/gatsby-cli/src/reporter/types.js deleted file mode 100644 index e0131dbc55965..0000000000000 --- a/packages/gatsby-cli/src/reporter/types.js +++ /dev/null @@ -1,31 +0,0 @@ -// @flow - -export type ActivityTracker = { - start(): Function, - end(): Function, - setStatus(status: string): Function, - span: Object, -} - -export type ActivityArgs = { - parentSpan?: Object, -} - -type LogMessageType = (format: string, ...args: Array) => void - -export interface Reporter { - stripIndent: Function; - format: Object; - setVerbose(isVerbose: boolean): void; - setNoColor(isNoColor: boolean): void; - panic(...args: Array): void; - panicOnBuild(...args: Array): void; - error(errorMeta: string | Object, error?: Object): void; - uptime(prefix: string): void; - success: LogMessageType; - verbose: LogMessageType; - info: LogMessageType; - warn: LogMessageType; - log: LogMessageType; - activityTimer(name: string, activityArgs: ActivityArgs): ActivityTracker; -} diff --git a/packages/gatsby-cli/src/reporter/types.ts b/packages/gatsby-cli/src/reporter/types.ts new file mode 100644 index 0000000000000..7a97a7118b832 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/types.ts @@ -0,0 +1,12 @@ +// TODO: This needs to be implemented when redux/acitons is converted to TS +export type CreateLogAction = any + +export type ErrorMeta = + | { + id: string + error?: Error + context: Record + [id: string]: any + } + | string + | Error diff --git a/packages/gatsby-plugin-fullstory/CHANGELOG.md b/packages/gatsby-plugin-fullstory/CHANGELOG.md index 100ebfc71327d..c78663b3ceb69 100644 --- a/packages/gatsby-plugin-fullstory/CHANGELOG.md +++ b/packages/gatsby-plugin-fullstory/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [2.2.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-fullstory@2.2.1...gatsby-plugin-fullstory@2.2.2) (2020-04-14) + +### Bug Fixes + +- **gatsby-plugin-fullstory:** Updated to Edge URL + Add Update… ([#23089](https://github.com/gatsbyjs/gatsby/issues/23089)) ([843cf3a](https://github.com/gatsbyjs/gatsby/commit/843cf3a)), closes [#23088](https://github.com/gatsbyjs/gatsby/issues/23088) + ## [2.2.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-fullstory@2.2.0...gatsby-plugin-fullstory@2.2.1) (2020-03-23) **Note:** Version bump only for package gatsby-plugin-fullstory diff --git a/packages/gatsby-plugin-fullstory/package.json b/packages/gatsby-plugin-fullstory/package.json index 5f7a2cfa248a8..55855af082567 100644 --- a/packages/gatsby-plugin-fullstory/package.json +++ b/packages/gatsby-plugin-fullstory/package.json @@ -1,6 +1,6 @@ { "name": "gatsby-plugin-fullstory", - "version": "2.2.1", + "version": "2.2.2", "description": "Plugin to add the tracking code for Fullstory.com", "main": "index.js", "scripts": { diff --git a/packages/gatsby-plugin-fullstory/src/__tests__/__snapshots__/gatsby-ssr.js.snap b/packages/gatsby-plugin-fullstory/src/__tests__/__snapshots__/gatsby-ssr.js.snap index c5abc86c05878..9bb812c868d87 100644 --- a/packages/gatsby-plugin-fullstory/src/__tests__/__snapshots__/gatsby-ssr.js.snap +++ b/packages/gatsby-plugin-fullstory/src/__tests__/__snapshots__/gatsby-ssr.js.snap @@ -10,7 +10,7 @@ Array [ "__html": " window['_fs_debug'] = false; window['_fs_host'] = 'fullstory.com'; -window['_fs_script'] = 'fullstory.com/s/fs.js'; +window['_fs_script'] = 'edge.fullstory.com/s/fs.js'; window['_fs_org'] = 'test-org'; window['_fs_namespace'] = 'FS'; (function(m,n,e,t,l,o,g,y){ @@ -19,11 +19,15 @@ window['_fs_namespace'] = 'FS'; o=n.createElement(t);o.async=1;o.crossOrigin='anonymous';o.src='https://'+_fs_script; y=n.getElementsByTagName(t)[0];y.parentNode.insertBefore(o,y); g.identify=function(i,v,s){g(l,{uid:i},s);if(v)g(l,v,s)};g.setUserVars=function(v,s){g(l,v,s)};g.event=function(i,v,s){g('event',{n:i,p:v},s)}; + g.anonymize=function(){g.identify(!!0)}; g.shutdown=function(){g(\\"rec\\",!1)};g.restart=function(){g(\\"rec\\",!0)}; - g.log = function(a,b) { g(\\"log\\", [a,b]) }; + g.log = function(a,b){g(\\"log\\",[a,b])}; g.consent=function(a){g(\\"consent\\",!arguments.length||a)}; g.identifyAccount=function(i,v){o='account';v=v||{};v.acctId=i;g(o,v)}; g.clearUserCookie=function(){}; + g._w={};y='XMLHttpRequest';g._w[y]=m[y];y='fetch';g._w[y]=m[y]; + if(m[y])m[y]=function(){return g._w[y].apply(this,arguments)}; + g._v=\\"1.2.0\\"; })(window,document,window['_fs_namespace'],'script','user'); ", } diff --git a/packages/gatsby-plugin-fullstory/src/gatsby-ssr.js b/packages/gatsby-plugin-fullstory/src/gatsby-ssr.js index 759645d98eab7..da3c4b444cf19 100644 --- a/packages/gatsby-plugin-fullstory/src/gatsby-ssr.js +++ b/packages/gatsby-plugin-fullstory/src/gatsby-ssr.js @@ -9,7 +9,7 @@ export const onRenderBody = ({ setHeadComponents }, pluginOptions) => { __html: ` window['_fs_debug'] = false; window['_fs_host'] = 'fullstory.com'; -window['_fs_script'] = 'fullstory.com/s/fs.js'; +window['_fs_script'] = 'edge.fullstory.com/s/fs.js'; window['_fs_org'] = '${pluginOptions.fs_org}'; window['_fs_namespace'] = 'FS'; (function(m,n,e,t,l,o,g,y){ @@ -18,11 +18,15 @@ window['_fs_namespace'] = 'FS'; o=n.createElement(t);o.async=1;o.crossOrigin='anonymous';o.src='https://'+_fs_script; y=n.getElementsByTagName(t)[0];y.parentNode.insertBefore(o,y); g.identify=function(i,v,s){g(l,{uid:i},s);if(v)g(l,v,s)};g.setUserVars=function(v,s){g(l,v,s)};g.event=function(i,v,s){g('event',{n:i,p:v},s)}; + g.anonymize=function(){g.identify(!!0)}; g.shutdown=function(){g("rec",!1)};g.restart=function(){g("rec",!0)}; - g.log = function(a,b) { g("log", [a,b]) }; + g.log = function(a,b){g("log",[a,b])}; g.consent=function(a){g("consent",!arguments.length||a)}; g.identifyAccount=function(i,v){o='account';v=v||{};v.acctId=i;g(o,v)}; g.clearUserCookie=function(){}; + g._w={};y='XMLHttpRequest';g._w[y]=m[y];y='fetch';g._w[y]=m[y]; + if(m[y])m[y]=function(){return g._w[y].apply(this,arguments)}; + g._v="1.2.0"; })(window,document,window['_fs_namespace'],'script','user'); `, }} diff --git a/packages/gatsby-plugin-mdx/CHANGELOG.md b/packages/gatsby-plugin-mdx/CHANGELOG.md index 4e2e592fd360b..1b227a566c288 100644 --- a/packages/gatsby-plugin-mdx/CHANGELOG.md +++ b/packages/gatsby-plugin-mdx/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.1.7](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-mdx@1.1.6...gatsby-plugin-mdx@1.1.7) (2020-04-10) + +### Bug Fixes + +- **gatsby-plugin-mdx:** Truncate non-latin language excerpts correctly ([#22638](https://github.com/gatsbyjs/gatsby/issues/22638)) ([ec80671](https://github.com/gatsbyjs/gatsby/commit/ec80671)) + ## [1.1.6](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-mdx@1.1.5...gatsby-plugin-mdx@1.1.6) (2020-04-07) ### Bug Fixes diff --git a/packages/gatsby-plugin-mdx/README.md b/packages/gatsby-plugin-mdx/README.md index e84135696b459..ebadfca7cec7e 100644 --- a/packages/gatsby-plugin-mdx/README.md +++ b/packages/gatsby-plugin-mdx/README.md @@ -562,6 +562,22 @@ export const pageQuery = graphql` ` ``` +## Troubleshooting + +### Excerpts for non-latin languages + +By default, `excerpt` uses `underscore.string/prune` which doesn't handle non-latin characters ([https://github.com/epeli/underscore.string/issues/418](https://github.com/epeli/underscore.string/issues/418)). + +If that is the case, you can set `truncate` option on `excerpt` field, like: + +```graphql +{ + markdownRemark { + excerpt(truncate: true) + } +} +``` + ## License MIT diff --git a/packages/gatsby-plugin-mdx/gatsby/source-nodes.js b/packages/gatsby-plugin-mdx/gatsby/source-nodes.js index e22e5dc0a0a6a..04063f32f2a34 100644 --- a/packages/gatsby-plugin-mdx/gatsby/source-nodes.js +++ b/packages/gatsby-plugin-mdx/gatsby/source-nodes.js @@ -1,4 +1,5 @@ const _ = require(`lodash`) +const { GraphQLBoolean } = require(`gatsby/graphql`) const remark = require(`remark`) const english = require(`retext-english`) const remark2retext = require(`remark-retext`) @@ -151,8 +152,12 @@ module.exports = ( type: `Int`, defaultValue: 140, }, + truncate: { + type: GraphQLBoolean, + defaultValue: false, + }, }, - async resolve(mdxNode, { pruneLength }) { + async resolve(mdxNode, { pruneLength, truncate }) { if (mdxNode.excerpt) { return Promise.resolve(mdxNode.excerpt) } @@ -166,7 +171,14 @@ module.exports = ( return }) - return prune(excerptNodes.join(` `), pruneLength, `…`) + if (!truncate) { + return prune(excerptNodes.join(` `), pruneLength, `…`) + } + + return _.truncate(excerptNodes.join(` `), { + length: pruneLength, + omission: `…`, + }) }, }, headings: { diff --git a/packages/gatsby-plugin-mdx/package.json b/packages/gatsby-plugin-mdx/package.json index c4dd73eed21c9..99dfaefc4f21b 100644 --- a/packages/gatsby-plugin-mdx/package.json +++ b/packages/gatsby-plugin-mdx/package.json @@ -1,6 +1,6 @@ { "name": "gatsby-plugin-mdx", - "version": "1.1.6", + "version": "1.1.7", "description": "MDX integration for Gatsby", "main": "index.js", "license": "MIT", diff --git a/packages/gatsby-plugin-react-helmet/CHANGELOG.md b/packages/gatsby-plugin-react-helmet/CHANGELOG.md index 31c5c8978630e..f6dd327340ae9 100644 --- a/packages/gatsby-plugin-react-helmet/CHANGELOG.md +++ b/packages/gatsby-plugin-react-helmet/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [3.2.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-react-helmet@3.2.1...gatsby-plugin-react-helmet@3.2.2) (2020-04-10) + +### Bug Fixes + +- **gatsby-plugin-react-helmet:** allow the use of `react-helmet@6` ([#22993](https://github.com/gatsbyjs/gatsby/issues/22993)) ([ed2762f](https://github.com/gatsbyjs/gatsby/commit/ed2762f)) + ## [3.2.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-react-helmet@3.2.0...gatsby-plugin-react-helmet@3.2.1) (2020-03-23) **Note:** Version bump only for package gatsby-plugin-react-helmet diff --git a/packages/gatsby-plugin-react-helmet/package.json b/packages/gatsby-plugin-react-helmet/package.json index e38c12715069a..c7f42cf1db541 100644 --- a/packages/gatsby-plugin-react-helmet/package.json +++ b/packages/gatsby-plugin-react-helmet/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-plugin-react-helmet", "description": "Manage document head data with react-helmet. Provides drop-in server rendering support for Gatsby.", - "version": "3.2.1", + "version": "3.2.2", "author": "Kyle Mathews ", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" @@ -36,7 +36,7 @@ "main": "index.js", "peerDependencies": { "gatsby": "^2.0.0", - "react-helmet": "^5.1.3" + "react-helmet": "^5.1.3 || ^6.0.0" }, "repository": { "type": "git", diff --git a/packages/gatsby-plugin-sitemap/CHANGELOG.md b/packages/gatsby-plugin-sitemap/CHANGELOG.md index ccca8926378b1..291cb26cbd6f6 100644 --- a/packages/gatsby-plugin-sitemap/CHANGELOG.md +++ b/packages/gatsby-plugin-sitemap/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [2.3.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-sitemap@2.3.1...gatsby-plugin-sitemap@2.3.2) (2020-04-14) + +### Bug Fixes + +- **plugin-sitemap:** missing data in query results ([#22843](https://github.com/gatsbyjs/gatsby/issues/22843)) ([cff1d7f](https://github.com/gatsbyjs/gatsby/commit/cff1d7f)), closes [#22703](https://github.com/gatsbyjs/gatsby/issues/22703) + ## [2.3.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-sitemap@2.3.0...gatsby-plugin-sitemap@2.3.1) (2020-03-23) **Note:** Version bump only for package gatsby-plugin-sitemap diff --git a/packages/gatsby-plugin-sitemap/package.json b/packages/gatsby-plugin-sitemap/package.json index 9832f1809401b..09277f50e83b1 100644 --- a/packages/gatsby-plugin-sitemap/package.json +++ b/packages/gatsby-plugin-sitemap/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-plugin-sitemap", "description": "Gatsby plugin that automatically creates a sitemap for your site", - "version": "2.3.1", + "version": "2.3.2", "author": "Nicholas Young <nicholas@nicholaswyoung.com>", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" diff --git a/packages/gatsby-plugin-sitemap/src/__tests__/internals.js b/packages/gatsby-plugin-sitemap/src/__tests__/internals.js index 8b047261d2c74..7a8b5652f25be 100644 --- a/packages/gatsby-plugin-sitemap/src/__tests__/internals.js +++ b/packages/gatsby-plugin-sitemap/src/__tests__/internals.js @@ -137,6 +137,16 @@ describe(`results using non default alternatives`, () => { }, ], }, + otherData: { + nodes: [ + { + name: `test`, + }, + { + name: `test 2`, + }, + ], + }, }, } } @@ -161,5 +171,6 @@ describe(`results using non default alternatives`, () => { const queryRecords = filterQuery(results, [], ``, customSiteResolver) expect(queryRecords.site.siteMetadata.siteUrl).toEqual(customUrl) + expect(queryRecords).toHaveProperty(`otherData`) }) }) diff --git a/packages/gatsby-plugin-sitemap/src/internals.js b/packages/gatsby-plugin-sitemap/src/internals.js index 8e61b4afec0cc..7b5a8356a8425 100644 --- a/packages/gatsby-plugin-sitemap/src/internals.js +++ b/packages/gatsby-plugin-sitemap/src/internals.js @@ -20,7 +20,9 @@ export function filterQuery( throw new Error(errors.join(`, `)) } - let { allPages, originalType } = getNodes(data.allSitePage) + const { allSitePage, ...otherData } = data + + let { allPages, originalType } = getNodes(allSitePage) // Removing excluded paths allPages = allPages.filter( @@ -53,6 +55,7 @@ export function filterQuery( siteUrl = withoutTrailingSlash(siteUrl) return { + ...otherData, allSitePage: { [originalType]: originalType === `nodes` diff --git a/packages/gatsby-remark-embed-snippet/CHANGELOG.md b/packages/gatsby-remark-embed-snippet/CHANGELOG.md index 042457e287261..a748cc1e70d1f 100644 --- a/packages/gatsby-remark-embed-snippet/CHANGELOG.md +++ b/packages/gatsby-remark-embed-snippet/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [4.2.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-remark-embed-snippet@4.2.1...gatsby-remark-embed-snippet@4.2.2) (2020-04-11) + +### Features + +- **remark-embed-snippet:** embed specific lines ([#21907](https://github.com/gatsbyjs/gatsby/issues/21907)) ([109b905](https://github.com/gatsbyjs/gatsby/commit/109b905)) + ## [4.2.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-remark-embed-snippet@4.2.0...gatsby-remark-embed-snippet@4.2.1) (2020-03-23) **Note:** Version bump only for package gatsby-remark-embed-snippet diff --git a/packages/gatsby-remark-embed-snippet/README.md b/packages/gatsby-remark-embed-snippet/README.md index 2e34e16a62f11..f1568b03c06fc 100644 --- a/packages/gatsby-remark-embed-snippet/README.md +++ b/packages/gatsby-remark-embed-snippet/README.md @@ -185,9 +185,8 @@ The resulting HTML generated from the markdown file above would look something l ### Highlighting Lines -You can also specify specific lines for Prism to highlight using -`highlight-line` and `highlight-next-line` comments. You can also specify a -range of lines to highlight, relative to a `highlight-range` comment. +You can specify specific lines for Prism to highlight using +`highlight-line` and `highlight-next-line` comments. You can also specify a range of lines to highlight, relative to a `highlight-range` comment. **JavaScript example**: @@ -250,8 +249,49 @@ quz: "highlighted" It's also possible to specify a range of lines to be hidden. +You can either specify line ranges in the embed using the syntax: + +- #Lx - Embed one line from a file +- #Lx-y - Embed a range of lines from a file +- #Lx-y,a-b - Embed non-consecutive ranges of lines from a file + +**Markdown example**: + +```markdown +This is the JSX of my app: + +`embed:App.js#L6-8` +``` + +With this example snippet: + +```js +import React from "react" +import ReactDOM from "react-dom" + +function App() { + return ( +
+

Hello world

+
+ ) +} +``` + +Will produce something like this: + +```markdown +This is the JSX of my app: + +
+

Hello world

+
+``` + **JavaScript example**: +You can also add `// hide-range` comments to your files. + ```jsx // hide-range{1-2} import React from "react" diff --git a/packages/gatsby-remark-embed-snippet/package.json b/packages/gatsby-remark-embed-snippet/package.json index 15862533f3f20..750f64af93a3c 100644 --- a/packages/gatsby-remark-embed-snippet/package.json +++ b/packages/gatsby-remark-embed-snippet/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-remark-embed-snippet", "description": "Gatsby plugin to embed formatted code snippets within markdown", - "version": "4.2.1", + "version": "4.2.2", "author": "Brian Vaughn ", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" diff --git a/packages/gatsby-remark-embed-snippet/src/__tests__/index.js b/packages/gatsby-remark-embed-snippet/src/__tests__/index.js index a1c1fca9e2831..65bac278bbd0b 100644 --- a/packages/gatsby-remark-embed-snippet/src/__tests__/index.js +++ b/packages/gatsby-remark-embed-snippet/src/__tests__/index.js @@ -36,6 +36,56 @@ describe(`gatsby-remark-embed-snippet`, () => { ) }) + it(`should display a code block of a single line`, () => { + const codeBlockValue = ` console.log('hello world')` + fs.readFileSync.mockReturnValue(`function test() { +${codeBlockValue} +}`) + + const markdownAST = remark.parse(`\`embed:hello-world.js#L2\``) + const transformed = plugin({ markdownAST }, { directory: `examples` }) + + const codeBlock = transformed.children[0].children[0] + + expect(codeBlock.value).toEqual(codeBlockValue) + }) + + it(`should display a code block of a range of lines`, () => { + const codeBlockValue = ` if (window.location.search.indexOf('query') > -1) { + console.log('The user is searching') +}` + fs.readFileSync.mockReturnValue(`function test() { +${codeBlockValue} +}`) + + const markdownAST = remark.parse(`\`embed:hello-world.js#L2-4\``) + const transformed = plugin({ markdownAST }, { directory: `examples` }) + + const codeBlock = transformed.children[0].children[0] + + expect(codeBlock.value).toEqual(codeBlockValue) + }) + + it(`should display a code block of a range of non-consecutive lines`, () => { + const notInSnippet = `lineShouldNotBeInSnippet();` + fs.readFileSync.mockReturnValue(`function test() { + if (window.location.search.indexOf('query') > -1) { + console.log('The user is searching') + } +} +${notInSnippet} +window.addEventListener('resize', () => { + test(); +})`) + + const markdownAST = remark.parse(`\`embed:hello-world.js#L2-4,7-9\``) + const transformed = plugin({ markdownAST }, { directory: `examples` }) + + const codeBlock = transformed.children[0].children[0] + + expect(codeBlock.value).not.toContain(notInSnippet) + }) + it(`should error if an invalid file path is specified`, () => { fs.existsSync.mockImplementation(path => path !== `examples/hello-world.js`) diff --git a/packages/gatsby-remark-embed-snippet/src/index.js b/packages/gatsby-remark-embed-snippet/src/index.js index dd9fec7dcdd92..c5fcb8d6ff9a4 100644 --- a/packages/gatsby-remark-embed-snippet/src/index.js +++ b/packages/gatsby-remark-embed-snippet/src/index.js @@ -4,6 +4,7 @@ const path = require(`path`) const fs = require(`fs`) const normalizePath = require(`normalize-path`) const visit = require(`unist-util-visit`) +const rangeParser = require(`parse-numeric-range`) // Language defaults to extension.toLowerCase(); // This map tracks languages that don't match their extension. @@ -46,13 +47,33 @@ module.exports = ({ markdownAST, markdownNode }, { directory } = {}) => { if (value.startsWith(`embed:`)) { const file = value.substr(6) - const snippetPath = normalizePath(path.join(directory, file)) + let snippetPath = normalizePath(path.join(directory, file)) + + // Embed specific lines numbers of a file + let lines = [] + const rangePrefixIndex = snippetPath.indexOf(`#L`) + if (rangePrefixIndex > -1) { + const range = snippetPath.slice(rangePrefixIndex + 2) + if (range.length === 1) { + lines = [Number.parseInt(range, 10)] + } else { + lines = rangeParser.parse(range) + } + // Remove everything after the range prefix from file path + snippetPath = snippetPath.slice(0, rangePrefixIndex) + } if (!fs.existsSync(snippetPath)) { throw Error(`Invalid snippet specified; no such file "${snippetPath}"`) } - const code = fs.readFileSync(snippetPath, `utf8`).trim() + let code = fs.readFileSync(snippetPath, `utf8`).trim() + if (lines.length) { + code = code + .split(`\n`) + .filter((_, lineNumber) => lines.includes(lineNumber + 1)) + .join(`\n`) + } // PrismJS's theme styles are targeting pre[class*="language-"] // to apply its styles. We do the same here so that users @@ -60,7 +81,7 @@ module.exports = ({ markdownAST, markdownNode }, { directory } = {}) => { // outcome without any additional CSS. // // @see https://github.com/PrismJS/prism/blob/1d5047df37aacc900f8270b1c6215028f6988eb1/themes/prism.css#L49-L54 - const language = getLanguage(file) + const language = getLanguage(snippetPath) // Change the node type to code, insert our file as value and set language. node.type = `code` diff --git a/packages/gatsby-source-drupal/CHANGELOG.md b/packages/gatsby-source-drupal/CHANGELOG.md index fbd96ff044527..4d4deb18e7461 100644 --- a/packages/gatsby-source-drupal/CHANGELOG.md +++ b/packages/gatsby-source-drupal/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [3.4.3](https://github.com/gatsbyjs/gatsby/compare/gatsby-source-drupal@3.4.2...gatsby-source-drupal@3.4.3) (2020-04-10) + +### Bug Fixes + +- **gatsby-source-drupal:** Verify nodes exist before looping through them ([#22898](https://github.com/gatsbyjs/gatsby/issues/22898)) ([cdbe734](https://github.com/gatsbyjs/gatsby/commit/cdbe734)) + ## [3.4.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-source-drupal@3.4.1...gatsby-source-drupal@3.4.2) (2020-03-23) **Note:** Version bump only for package gatsby-source-drupal diff --git a/packages/gatsby-source-drupal/package.json b/packages/gatsby-source-drupal/package.json index 8416ff92bfce2..77c3868d5ffac 100644 --- a/packages/gatsby-source-drupal/package.json +++ b/packages/gatsby-source-drupal/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-source-drupal", "description": "Gatsby source plugin for building websites using the Drupal CMS as a data source", - "version": "3.4.2", + "version": "3.4.3", "author": "Kyle Mathews ", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" diff --git a/packages/gatsby-source-drupal/src/utils.js b/packages/gatsby-source-drupal/src/utils.js index 44cf6e110d66a..ba8020c98c783 100644 --- a/packages/gatsby-source-drupal/src/utils.js +++ b/packages/gatsby-source-drupal/src/utils.js @@ -147,10 +147,10 @@ const handleWebhookUpdate = async ( nodesToUpdate.push(...addedReferencedNodes) } else { // if we are inserting new node, we need to update all referenced nodes - const newNodeReferencedNodes = referencedNodesLookup - .get(newNode) - .map(id => getNode(id)) - nodesToUpdate.push(...newNodeReferencedNodes) + const newNodes = referencedNodesLookup.get(newNode) + if (typeof newNodes !== `undefined`) { + newNodes.forEach(id => nodesToUpdate.push(getNode(id))) + } } // download file diff --git a/packages/gatsby-source-graphql/CHANGELOG.md b/packages/gatsby-source-graphql/CHANGELOG.md index c2cd2581bccb6..ac5b6f24e95e7 100644 --- a/packages/gatsby-source-graphql/CHANGELOG.md +++ b/packages/gatsby-source-graphql/CHANGELOG.md @@ -3,6 +3,10 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +# [2.4.0](https://github.com/gatsbyjs/gatsby/compare/gatsby-source-graphql@2.3.2...gatsby-source-graphql@2.4.0) (2020-04-14) + +**Note:** Version bump only for package gatsby-source-graphql + ## [2.3.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-source-graphql@2.3.1...gatsby-source-graphql@2.3.2) (2020-04-06) ### Bug Fixes diff --git a/packages/gatsby-source-graphql/package.json b/packages/gatsby-source-graphql/package.json index 1345e4e467375..8b3c007ab6d54 100644 --- a/packages/gatsby-source-graphql/package.json +++ b/packages/gatsby-source-graphql/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-source-graphql", "description": "Gatsby plugin which adds a third-party GraphQL API to Gatsby GraphQL", - "version": "2.3.2", + "version": "2.4.0", "author": "Mikhail Novikov ", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" @@ -12,7 +12,7 @@ "apollo-link-http": "^1.5.16", "dataloader": "^2.0.0", "graphql": "^14.6.0", - "graphql-tools-fork": "^8.9.6", + "graphql-tools": "^5.0.0", "invariant": "^2.2.4", "node-fetch": "^1.7.3", "uuid": "^3.4.0" diff --git a/packages/gatsby-source-graphql/src/__tests__/gatsby-node.js b/packages/gatsby-source-graphql/src/__tests__/gatsby-node.js index 2678363539881..679add513536d 100644 --- a/packages/gatsby-source-graphql/src/__tests__/gatsby-node.js +++ b/packages/gatsby-source-graphql/src/__tests__/gatsby-node.js @@ -1,4 +1,4 @@ -jest.mock(`graphql-tools-fork`, () => { +jest.mock(`graphql-tools`, () => { return { transformSchema: jest.fn(), introspectSchema: jest.fn(), diff --git a/packages/gatsby-source-graphql/src/gatsby-node.js b/packages/gatsby-source-graphql/src/gatsby-node.js index 3fddfb6445734..55463559c53ce 100644 --- a/packages/gatsby-source-graphql/src/gatsby-node.js +++ b/packages/gatsby-source-graphql/src/gatsby-node.js @@ -4,7 +4,7 @@ const { transformSchema, introspectSchema, RenameTypes, -} = require(`graphql-tools-fork`) +} = require(`graphql-tools`) const { createHttpLink } = require(`apollo-link-http`) const nodeFetch = require(`node-fetch`) const invariant = require(`invariant`) diff --git a/packages/gatsby-source-graphql/src/transforms.js b/packages/gatsby-source-graphql/src/transforms.js index c39d53fdcff3c..4f700d4470c48 100644 --- a/packages/gatsby-source-graphql/src/transforms.js +++ b/packages/gatsby-source-graphql/src/transforms.js @@ -4,7 +4,7 @@ const { cloneType, healSchema, visitSchema, -} = require(`graphql-tools-fork`) +} = require(`graphql-tools`) class NamespaceUnderFieldTransform { constructor({ typeName, fieldName, resolver }) { diff --git a/packages/gatsby-theme-blog-core/CHANGELOG.md b/packages/gatsby-theme-blog-core/CHANGELOG.md index 558d44108191f..d8d99f6a9b614 100644 --- a/packages/gatsby-theme-blog-core/CHANGELOG.md +++ b/packages/gatsby-theme-blog-core/CHANGELOG.md @@ -3,6 +3,30 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.3.22](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.21...gatsby-theme-blog-core@1.3.22) (2020-04-14) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.21](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.20...gatsby-theme-blog-core@1.3.21) (2020-04-14) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.20](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.19...gatsby-theme-blog-core@1.3.20) (2020-04-11) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.19](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.18...gatsby-theme-blog-core@1.3.19) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.18](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.17...gatsby-theme-blog-core@1.3.18) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.17](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.16...gatsby-theme-blog-core@1.3.17) (2020-04-09) + +**Note:** Version bump only for package gatsby-theme-blog-core + ## [1.3.16](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.15...gatsby-theme-blog-core@1.3.16) (2020-04-08) **Note:** Version bump only for package gatsby-theme-blog-core diff --git a/packages/gatsby-theme-blog-core/package.json b/packages/gatsby-theme-blog-core/package.json index 553ab72beec27..4d8403f5826e2 100644 --- a/packages/gatsby-theme-blog-core/package.json +++ b/packages/gatsby-theme-blog-core/package.json @@ -1,6 +1,6 @@ { "name": "gatsby-theme-blog-core", - "version": "1.3.16", + "version": "1.3.22", "main": "index.js", "author": "christopherbiscardi (@chrisbiscardi)", "license": "MIT", @@ -19,7 +19,7 @@ "dependencies": { "@mdx-js/mdx": "^1.5.7", "gatsby-core-utils": "^1.1.1", - "gatsby-plugin-mdx": "^1.1.6", + "gatsby-plugin-mdx": "^1.1.7", "gatsby-plugin-sharp": "^2.5.4", "gatsby-remark-copy-linked-files": "^2.2.1", "gatsby-remark-images": "^3.2.2", @@ -30,7 +30,7 @@ }, "devDependencies": { "@mdx-js/react": "^1.5.7", - "gatsby": "^2.20.14", + "gatsby": "^2.20.20", "prettier": "^1.19.1", "react": "^16.12.0", "react-dom": "^16.12.0" diff --git a/packages/gatsby-theme-blog/CHANGELOG.md b/packages/gatsby-theme-blog/CHANGELOG.md index a7f04f3eb6ad0..cef3adc23e7be 100644 --- a/packages/gatsby-theme-blog/CHANGELOG.md +++ b/packages/gatsby-theme-blog/CHANGELOG.md @@ -3,6 +3,30 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.4.22](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.21...gatsby-theme-blog@1.4.22) (2020-04-14) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.21](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.20...gatsby-theme-blog@1.4.21) (2020-04-14) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.20](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.19...gatsby-theme-blog@1.4.20) (2020-04-11) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.19](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.18...gatsby-theme-blog@1.4.19) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.18](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.17...gatsby-theme-blog@1.4.18) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.17](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.16...gatsby-theme-blog@1.4.17) (2020-04-09) + +**Note:** Version bump only for package gatsby-theme-blog + ## [1.4.16](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.15...gatsby-theme-blog@1.4.16) (2020-04-08) **Note:** Version bump only for package gatsby-theme-blog diff --git a/packages/gatsby-theme-blog/package.json b/packages/gatsby-theme-blog/package.json index 8feaef9759545..90d00de18b78c 100644 --- a/packages/gatsby-theme-blog/package.json +++ b/packages/gatsby-theme-blog/package.json @@ -1,6 +1,6 @@ { "name": "gatsby-theme-blog", - "version": "1.4.16", + "version": "1.4.22", "description": "A Gatsby theme for miscellaneous blogging with a dark/light mode", "main": "index.js", "keywords": [ @@ -26,10 +26,10 @@ "gatsby-image": "^2.3.2", "gatsby-plugin-emotion": "^4.2.1", "gatsby-plugin-feed": "^2.4.1", - "gatsby-plugin-react-helmet": "^3.2.1", + "gatsby-plugin-react-helmet": "^3.2.2", "gatsby-plugin-theme-ui": "^0.2.53", "gatsby-plugin-twitter": "^2.2.2", - "gatsby-theme-blog-core": "^1.3.16", + "gatsby-theme-blog-core": "^1.3.22", "mdx-utils": "0.2.0", "react-helmet": "^5.2.1", "react-switch": "^5.0.1", @@ -39,7 +39,7 @@ "typography-theme-wordpress-2016": "^0.16.19" }, "devDependencies": { - "gatsby": "^2.20.14", + "gatsby": "^2.20.20", "prettier": "^1.19.1", "react": "^16.12.0", "react-dom": "^16.12.0" diff --git a/packages/gatsby-theme-notes/CHANGELOG.md b/packages/gatsby-theme-notes/CHANGELOG.md index dee08540f6112..561790739a57c 100644 --- a/packages/gatsby-theme-notes/CHANGELOG.md +++ b/packages/gatsby-theme-notes/CHANGELOG.md @@ -3,6 +3,30 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.2.21](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.20...gatsby-theme-notes@1.2.21) (2020-04-14) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.20](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.19...gatsby-theme-notes@1.2.20) (2020-04-14) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.19](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.18...gatsby-theme-notes@1.2.19) (2020-04-11) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.18](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.17...gatsby-theme-notes@1.2.18) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.17](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.16...gatsby-theme-notes@1.2.17) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.16](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.15...gatsby-theme-notes@1.2.16) (2020-04-09) + +**Note:** Version bump only for package gatsby-theme-notes + ## [1.2.15](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.14...gatsby-theme-notes@1.2.15) (2020-04-08) **Note:** Version bump only for package gatsby-theme-notes diff --git a/packages/gatsby-theme-notes/package.json b/packages/gatsby-theme-notes/package.json index eabcbd183d9d2..38c6a44c2287b 100644 --- a/packages/gatsby-theme-notes/package.json +++ b/packages/gatsby-theme-notes/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-theme-notes", "description": "Gatsby Theme for adding a notes section to your website", - "version": "1.2.15", + "version": "1.2.21", "author": "John Otander", "license": "MIT", "main": "index.js", @@ -20,7 +20,7 @@ }, "homepage": "https://github.com/gatsbyjs/gatsby/tree/master/packages/gatsby-theme-notes#readme", "devDependencies": { - "gatsby": "^2.20.14", + "gatsby": "^2.20.20", "react": "^16.12.0", "react-dom": "^16.12.0" }, @@ -39,7 +39,7 @@ "gatsby-core-utils": "^1.1.1", "gatsby-plugin-compile-es6-packages": "^2.1.0", "gatsby-plugin-emotion": "^4.2.1", - "gatsby-plugin-mdx": "^1.1.6", + "gatsby-plugin-mdx": "^1.1.7", "gatsby-plugin-meta-redirect": "^1.1.1", "gatsby-plugin-og-image": "0.0.1", "gatsby-plugin-redirects": "^1.0.0", diff --git a/packages/gatsby-transformer-documentationjs/CHANGELOG.md b/packages/gatsby-transformer-documentationjs/CHANGELOG.md index 56f5d57f6acd8..408e701ce7a84 100644 --- a/packages/gatsby-transformer-documentationjs/CHANGELOG.md +++ b/packages/gatsby-transformer-documentationjs/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [4.2.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-transformer-documentationjs@4.2.1...gatsby-transformer-documentationjs@4.2.2) (2020-04-10) + +### Bug Fixes + +- create unique Nodes ([#22774](https://github.com/gatsbyjs/gatsby/issues/22774)) ([1381a0b](https://github.com/gatsbyjs/gatsby/commit/1381a0b)) + ## [4.2.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-transformer-documentationjs@4.2.0...gatsby-transformer-documentationjs@4.2.1) (2020-03-23) **Note:** Version bump only for package gatsby-transformer-documentationjs diff --git a/packages/gatsby-transformer-documentationjs/package.json b/packages/gatsby-transformer-documentationjs/package.json index 5bbbf2db25b74..42a5a9c675150 100644 --- a/packages/gatsby-transformer-documentationjs/package.json +++ b/packages/gatsby-transformer-documentationjs/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-transformer-documentationjs", "description": "Gatsby transformer plugin which uses Documentation.js to extract JavaScript documentation", - "version": "4.2.1", + "version": "4.2.2", "author": "Kyle Mathews", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" diff --git a/packages/gatsby-transformer-documentationjs/src/__tests__/__snapshots__/gatsby-node.js.snap b/packages/gatsby-transformer-documentationjs/src/__tests__/__snapshots__/gatsby-node.js.snap index 105d9376297cd..8c960d06742fe 100644 --- a/packages/gatsby-transformer-documentationjs/src/__tests__/__snapshots__/gatsby-node.js.snap +++ b/packages/gatsby-transformer-documentationjs/src/__tests__/__snapshots__/gatsby-node.js.snap @@ -21,11 +21,11 @@ Object { exports[`transformer-react-doc-gen: onCreateNode Complex example should handle typedefs should handle type applications 1`] = ` Object { "children": Array [ - "documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3--DocumentationJSComponentDescription--comment.description", + "documentationJS documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"}] line 12 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3--DocumentationJSComponentDescription--comment.description", ], "commentNumber": null, - "description___NODE": "documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3--DocumentationJSComponentDescription--comment.description", - "id": "documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3", + "description___NODE": "documentationJS documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"}] line 12 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3--DocumentationJSComponentDescription--comment.description", + "id": "documentationJS documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"}] line 12 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3", "internal": Object { "contentDigest": "content-digest", "type": "DocumentationJs", diff --git a/packages/gatsby-transformer-documentationjs/src/gatsby-node.js b/packages/gatsby-transformer-documentationjs/src/gatsby-node.js index a45bddd7bc9c8..d3e34aaf4323f 100644 --- a/packages/gatsby-transformer-documentationjs/src/gatsby-node.js +++ b/packages/gatsby-transformer-documentationjs/src/gatsby-node.js @@ -274,7 +274,7 @@ exports.onCreateNode = async ({ node, actions, ...helpers }) => { const docSkeletonNode = { commentNumber, level, - id: createNodeId(docId(node.id, docsJson)), + id: createNodeId(docId(parent, docsJson)), parent, children: [], internal: { diff --git a/packages/gatsby/CHANGELOG.md b/packages/gatsby/CHANGELOG.md index b73d8dffcc0e5..0ff6dacd72283 100644 --- a/packages/gatsby/CHANGELOG.md +++ b/packages/gatsby/CHANGELOG.md @@ -3,6 +3,42 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [2.20.20](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.19...gatsby@2.20.20) (2020-04-14) + +### Performance Improvements + +- **gatsby:** Support `lte` for indexed fast filters ([#22932](https://github.com/gatsbyjs/gatsby/issues/22932)) ([fd57224](https://github.com/gatsbyjs/gatsby/commit/fd57224)) + +## [2.20.19](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.18...gatsby@2.20.19) (2020-04-14) + +### Bug Fixes + +- **gatsby:** call schema rebuild manually on \_\_refresh ([#23009](https://github.com/gatsbyjs/gatsby/issues/23009)) ([8493de8](https://github.com/gatsbyjs/gatsby/commit/8493de8)) +- **gatsby:** Set a timeout of 15 seconds on queries ([#23036](https://github.com/gatsbyjs/gatsby/issues/23036)) ([1e81c76](https://github.com/gatsbyjs/gatsby/commit/1e81c76)) +- Ensure component order is deterministic ([#22965](https://github.com/gatsbyjs/gatsby/issues/22965)) ([94267be](https://github.com/gatsbyjs/gatsby/commit/94267be)) + +## [2.20.18](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.17...gatsby@2.20.18) (2020-04-11) + +### Bug Fixes + +- **gatsby:** Use `moveSync` over `renameSync` to fix cross mount cases ([#23029](https://github.com/gatsbyjs/gatsby/issues/23029)) ([96f8d4b](https://github.com/gatsbyjs/gatsby/commit/96f8d4b)) + +## [2.20.17](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.16...gatsby@2.20.17) (2020-04-10) + +**Note:** Version bump only for package gatsby + +## [2.20.16](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.15...gatsby@2.20.16) (2020-04-10) + +### Bug Fixes + +- **gatsby:** Use tmp dir for tmp redux cache folder ([#22959](https://github.com/gatsbyjs/gatsby/issues/22959)) ([86cf920](https://github.com/gatsbyjs/gatsby/commit/86cf920)) + +## [2.20.15](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.14...gatsby@2.20.15) (2020-04-09) + +### Bug Fixes + +- **gatsby:** improve async commons chunking ([#22879](https://github.com/gatsbyjs/gatsby/issues/22879)) ([7cf056f](https://github.com/gatsbyjs/gatsby/commit/7cf056f)) + ## [2.20.14](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.13...gatsby@2.20.14) (2020-04-08) ### Performance Improvements diff --git a/packages/gatsby/index.d.ts b/packages/gatsby/index.d.ts index 020f0c8fa9a50..e5919b29a1663 100644 --- a/packages/gatsby/index.d.ts +++ b/packages/gatsby/index.d.ts @@ -1,4 +1,5 @@ import * as React from "react" +import { Renderer } from "react-dom" import { EventEmitter } from "events" import { WindowLocation, NavigateFn } from "@reach/router" import { createContentDigest } from "gatsby-core-utils" @@ -537,7 +538,7 @@ export interface GatsbyBrowser { args: ReplaceComponentRendererArgs, options: PluginOptions ): any - replaceHydrateFunction?(args: BrowserPluginArgs, options: PluginOptions): any + replaceHydrateFunction?(args: BrowserPluginArgs, options: PluginOptions): Renderer shouldUpdateScroll?(args: ShouldUpdateScrollArgs, options: PluginOptions): any wrapPageElement?( args: WrapPageElementBrowserArgs, @@ -959,7 +960,13 @@ interface ActionOptions { } export interface BuildArgs extends ParentSpanPluginArgs { - graphql: Function + graphql( + query: string, + variables?: TVariables + ): Promise<{ + errors?: any + data?: TData + }> } export interface Actions { diff --git a/packages/gatsby/package.json b/packages/gatsby/package.json index b65f1673acda9..3a1d55e15c45f 100644 --- a/packages/gatsby/package.json +++ b/packages/gatsby/package.json @@ -1,7 +1,7 @@ { "name": "gatsby", "description": "Blazing fast modern site generator for React", - "version": "2.20.14", + "version": "2.20.20", "author": "Kyle Mathews ", "bin": { "gatsby": "./dist/bin/gatsby.js" @@ -80,7 +80,7 @@ "flat": "^4.1.0", "fs-exists-cached": "1.0.0", "fs-extra": "^8.1.0", - "gatsby-cli": "^2.11.5", + "gatsby-cli": "^2.11.8", "gatsby-core-utils": "^1.1.1", "gatsby-graphiql-explorer": "^0.3.1", "gatsby-link": "^2.3.2", diff --git a/packages/gatsby/src/bootstrap/__tests__/__snapshots__/graphql-runner.js.snap b/packages/gatsby/src/bootstrap/__tests__/__snapshots__/create-graphql-runner.js.snap similarity index 100% rename from packages/gatsby/src/bootstrap/__tests__/__snapshots__/graphql-runner.js.snap rename to packages/gatsby/src/bootstrap/__tests__/__snapshots__/create-graphql-runner.js.snap diff --git a/packages/gatsby/src/bootstrap/__tests__/graphql-runner.js b/packages/gatsby/src/bootstrap/__tests__/create-graphql-runner.js similarity index 88% rename from packages/gatsby/src/bootstrap/__tests__/graphql-runner.js rename to packages/gatsby/src/bootstrap/__tests__/create-graphql-runner.js index e9dba17611223..3445cc43dec90 100644 --- a/packages/gatsby/src/bootstrap/__tests__/graphql-runner.js +++ b/packages/gatsby/src/bootstrap/__tests__/create-graphql-runner.js @@ -1,6 +1,6 @@ jest.mock(`graphql`) -const createGraphqlRunner = require(`../graphql-runner`) +import { createGraphQLRunner } from "../create-graphql-runner" const { execute, validate, parse } = require(`graphql`) parse.mockImplementation(() => { @@ -31,7 +31,7 @@ describe(`grapqhl-runner`, () => { }) it(`should return the result when grapqhl has no errors`, async () => { - const graphqlRunner = createGraphqlRunner(createStore(), reporter) + const graphqlRunner = createGraphQLRunner(createStore(), reporter) const expectation = { data: { @@ -46,7 +46,7 @@ describe(`grapqhl-runner`, () => { }) it(`should return an errors array when structured errors found`, async () => { - const graphqlRunner = createGraphqlRunner(createStore(), reporter) + const graphqlRunner = createGraphQLRunner(createStore(), reporter) const expectation = { errors: [ @@ -64,7 +64,7 @@ describe(`grapqhl-runner`, () => { }) it(`should throw a structured error when created from createPage file`, async () => { - const graphqlRunner = createGraphqlRunner(createStore(), reporter) + const graphqlRunner = createGraphQLRunner(createStore(), reporter) const errorObject = { stack: `Error diff --git a/packages/gatsby/src/bootstrap/__tests__/requires-writer.js b/packages/gatsby/src/bootstrap/__tests__/requires-writer.js index 5506421f45541..b82fa698dc0e6 100644 --- a/packages/gatsby/src/bootstrap/__tests__/requires-writer.js +++ b/packages/gatsby/src/bootstrap/__tests__/requires-writer.js @@ -326,4 +326,29 @@ describe(`requires-writer`, () => { }) }) }) + + describe(`getComponents`, () => { + it(`should return components in a deterministic order`, () => { + const pagesInput = generatePagesState([ + { + component: `component1`, + componentChunkName: `chunkName1`, + matchPath: `matchPath1`, + path: `/path1`, + }, + { + component: `component2`, + componentChunkName: `chunkName2`, + path: `/path2`, + }, + ]) + + const pages = [...pagesInput.values()] + const pagesReversed = [...pagesInput.values()].reverse() + + expect(requiresWriter.getComponents(pages)).toEqual( + requiresWriter.getComponents(pagesReversed) + ) + }) + }) }) diff --git a/packages/gatsby/src/bootstrap/graphql-runner.js b/packages/gatsby/src/bootstrap/create-graphql-runner.ts similarity index 54% rename from packages/gatsby/src/bootstrap/graphql-runner.js rename to packages/gatsby/src/bootstrap/create-graphql-runner.ts index 3d6fb71b2b94d..35967ec130eb1 100644 --- a/packages/gatsby/src/bootstrap/graphql-runner.js +++ b/packages/gatsby/src/bootstrap/create-graphql-runner.ts @@ -1,14 +1,27 @@ -const stackTrace = require(`stack-trace`) +import stackTrace from "stack-trace" +import { ExecutionResultDataDefault } from "graphql/execution/execute" +import { Store } from "redux" -const GraphQLRunner = require(`../query/graphql-runner`).default -const errorParser = require(`../query/error-parser`).default +import GraphQLRunner from "../query/graphql-runner" +import errorParser from "../query/error-parser" +import { emitter } from "../redux" +import { Reporter } from "../.." +import { ExecutionResult, Source } from "../../graphql" +import { IGatsbyState } from "../redux/types" -const { emitter } = require(`../redux`) +type Runner = ( + query: string | Source, + context: Record +) => Promise> -module.exports = (store, reporter) => { +export const createGraphQLRunner = ( + store: Store, + reporter: Reporter +): Runner => { // TODO: Move tracking of changed state inside GraphQLRunner itself. https://github.com/gatsbyjs/gatsby/issues/20941 let runner = new GraphQLRunner(store) - ;[ + + const eventTypes: string[] = [ `DELETE_CACHE`, `CREATE_NODE`, `DELETE_NODE`, @@ -17,12 +30,15 @@ module.exports = (store, reporter) => { `SET_SCHEMA`, `ADD_FIELD_TO_NODE`, `ADD_CHILD_NODE_TO_PARENT_NODE`, - ].forEach(eventType => { - emitter.on(eventType, event => { + ] + + eventTypes.forEach(type => { + emitter.on(type, () => { runner = new GraphQLRunner(store) }) }) - return (query, context) => + + return (query, context): ReturnType => runner.query(query, context).then(result => { if (result.errors) { const structuredErrors = result.errors @@ -30,15 +46,18 @@ module.exports = (store, reporter) => { // Find the file where graphql was called. const file = stackTrace .parse(e) - .find(file => /createPages/.test(file.functionName)) + .find(file => /createPages/.test(file.getFunctionName())) if (file) { const structuredError = errorParser({ message: e.message, location: { - start: { line: file.lineNumber, column: file.columnNumber }, + start: { + line: file.getLineNumber(), + column: file.getColumnNumber(), + }, }, - filePath: file.fileName, + filePath: file.getFileName(), }) structuredError.context = { ...structuredError.context, diff --git a/packages/gatsby/src/bootstrap/index.js b/packages/gatsby/src/bootstrap/index.js index 8bee7678798a5..ffc8fc8eb413e 100644 --- a/packages/gatsby/src/bootstrap/index.js +++ b/packages/gatsby/src/bootstrap/index.js @@ -28,7 +28,7 @@ process.on(`unhandledRejection`, (reason, p) => { report.panic(reason) }) -const createGraphqlRunner = require(`./graphql-runner`) +import { createGraphQLRunner } from "./create-graphql-runner" const { extractQueries } = require(`../query/query-watcher`) const requiresWriter = require(`./requires-writer`) const { writeRedirects } = require(`./redirects-writer`) @@ -469,7 +469,7 @@ module.exports = async (args: BootstrapArgs) => { payload: _.flattenDeep([extensions, apiResults]), }) - const graphqlRunner = createGraphqlRunner(store, report) + const graphqlRunner = createGraphQLRunner(store, report) // Collect pages. activity = report.activityTimer(`createPages`, { diff --git a/packages/gatsby/src/bootstrap/requires-writer.js b/packages/gatsby/src/bootstrap/requires-writer.js index f1c24b6061791..e34c0c72a948e 100644 --- a/packages/gatsby/src/bootstrap/requires-writer.js +++ b/packages/gatsby/src/bootstrap/requires-writer.js @@ -52,6 +52,7 @@ const getComponents = pages => _(pages) .map(pickComponentFields) .uniqBy(c => c.componentChunkName) + .orderBy(c => c.componentChunkName) .value() /** @@ -254,4 +255,5 @@ module.exports = { writeAll, resetLastHash, startListener, + getComponents, } diff --git a/packages/gatsby/src/bootstrap/schema-hot-reloader.ts b/packages/gatsby/src/bootstrap/schema-hot-reloader.ts index 808b0ab70b89c..2cdd1efcaeab1 100644 --- a/packages/gatsby/src/bootstrap/schema-hot-reloader.ts +++ b/packages/gatsby/src/bootstrap/schema-hot-reloader.ts @@ -7,7 +7,6 @@ import report from "gatsby-cli/lib/reporter" import { IGatsbyState } from "../redux/types" type TypeMap = IGatsbyState["inferenceMetadata"]["typeMap"] -type SchemaCustomization = IGatsbyState["schemaCustomization"] type InferenceMetadata = IGatsbyState["inferenceMetadata"] const inferredTypesChanged = ( @@ -19,41 +18,46 @@ const inferredTypesChanged = ( typeMap[type].dirty && !haveEqualFields(typeMap[type], prevTypeMap[type]) ) -const schemaChanged = ( - schemaCustomization: SchemaCustomization, - lastSchemaCustomization: SchemaCustomization -): boolean => - [`fieldExtensions`, `printConfig`, `thirdPartySchemas`, `types`].some( - key => schemaCustomization[key] !== lastSchemaCustomization[key] - ) - let lastMetadata: InferenceMetadata -let lastSchemaCustomization: SchemaCustomization // API_RUNNING_QUEUE_EMPTY could be emitted multiple types // in a short period of time, so debounce seems reasonable const maybeRebuildSchema = debounce(async (): Promise => { - const { inferenceMetadata, schemaCustomization } = store.getState() + const { inferenceMetadata } = store.getState() - if ( - !inferredTypesChanged(inferenceMetadata.typeMap, lastMetadata.typeMap) && - !schemaChanged(schemaCustomization, lastSchemaCustomization) - ) { + if (!inferredTypesChanged(inferenceMetadata.typeMap, lastMetadata.typeMap)) { return } const activity = report.activityTimer(`rebuild schema`) activity.start() - lastMetadata = cloneDeep(inferenceMetadata) - lastSchemaCustomization = schemaCustomization await rebuild({ parentSpan: activity }) await updateStateAndRunQueries(false, { parentSpan: activity }) activity.end() }, 1000) -export const bootstrapSchemaHotReloader = (): void => { - const { inferenceMetadata, schemaCustomization } = store.getState() +function snapshotInferenceMetadata(): void { + const { inferenceMetadata } = store.getState() lastMetadata = cloneDeep(inferenceMetadata) - lastSchemaCustomization = schemaCustomization +} + +export function bootstrapSchemaHotReloader(): void { + // Snapshot inference metadata at the time of the last schema rebuild + // (even if schema was rebuilt elsewhere) + // Using the snapshot later to check if inferred types actually changed since the last rebuild + snapshotInferenceMetadata() + emitter.on(`SET_SCHEMA`, snapshotInferenceMetadata) + + startSchemaHotReloader() +} + +export function startSchemaHotReloader(): void { + // Listen for node changes outside of a regular sourceNodes API call, + // e.g. markdown file update via watcher emitter.on(`API_RUNNING_QUEUE_EMPTY`, maybeRebuildSchema) } + +export function stopSchemaHotReloader(): void { + emitter.off(`API_RUNNING_QUEUE_EMPTY`, maybeRebuildSchema) + maybeRebuildSchema.cancel() +} diff --git a/packages/gatsby/src/commands/develop.ts b/packages/gatsby/src/commands/develop.ts index 3f06dea88627c..d647ea85b41bf 100644 --- a/packages/gatsby/src/commands/develop.ts +++ b/packages/gatsby/src/commands/develop.ts @@ -31,12 +31,17 @@ import * as WorkerPool from "../utils/worker/pool" import http from "http" import https from "https" -import { bootstrapSchemaHotReloader } from "../bootstrap/schema-hot-reloader" +import { + bootstrapSchemaHotReloader, + startSchemaHotReloader, + stopSchemaHotReloader, +} from "../bootstrap/schema-hot-reloader" import bootstrapPageHotReloader from "../bootstrap/page-hot-reloader" import { developStatic } from "./develop-static" import withResolverContext from "../schema/context" import sourceNodes from "../utils/source-nodes" import { createSchemaCustomization } from "../utils/create-schema-customization" +import { rebuild as rebuildSchema } from "../schema" import { websocketManager } from "../utils/websocket-manager" import getSslCert from "../utils/get-ssl-cert" import { slash } from "gatsby-core-utils" @@ -203,6 +208,7 @@ async function startServer(program: IProgram): Promise { **/ const REFRESH_ENDPOINT = `/__refresh` const refresh = async (req: express.Request): Promise => { + stopSchemaHotReloader() let activity = report.activityTimer(`createSchemaCustomization`, {}) activity.start() await createSchemaCustomization({ @@ -215,6 +221,11 @@ async function startServer(program: IProgram): Promise { webhookBody: req.body, }) activity.end() + activity = report.activityTimer(`rebuild schema`) + activity.start() + await rebuildSchema({ parentSpan: activity }) + activity.end() + startSchemaHotReloader() } app.use(REFRESH_ENDPOINT, express.json()) app.post(REFRESH_ENDPOINT, (req, res) => { diff --git a/packages/gatsby/src/redux/__tests__/index.js b/packages/gatsby/src/redux/__tests__/index.js index e90eb1742765f..addc4d52aaf4f 100644 --- a/packages/gatsby/src/redux/__tests__/index.js +++ b/packages/gatsby/src/redux/__tests__/index.js @@ -16,7 +16,7 @@ jest.mock(`fs-extra`, () => { mockWrittenContent.set(file, content) ), readFileSync: jest.fn(file => mockWrittenContent.get(file)), - renameSync: jest.fn((from, to) => { + moveSync: jest.fn((from, to) => { // This will only work for folders if they are always the full prefix // of the file... (that goes for both input dirs). That's the case here. if (mockWrittenContent.has(to)) { diff --git a/packages/gatsby/src/redux/actions/internal.ts b/packages/gatsby/src/redux/actions/internal.ts index 19c811ce50087..04a33541a1e1f 100644 --- a/packages/gatsby/src/redux/actions/internal.ts +++ b/packages/gatsby/src/redux/actions/internal.ts @@ -1,4 +1,5 @@ import { + IGatsbyPlugin, ProgramStatus, ICreatePageDependencyAction, IDeleteComponentDependenciesAction, @@ -81,7 +82,7 @@ export const replaceComponentQuery = ({ */ export const replaceStaticQuery = ( args: any, - plugin: Plugin | null | undefined = null + plugin: IGatsbyPlugin | null | undefined = null ): IReplaceStaticQueryAction => { return { type: `REPLACE_STATIC_QUERY`, @@ -98,7 +99,7 @@ export const replaceStaticQuery = ( */ export const queryExtracted = ( { componentPath, query }: { componentPath: string; query: string }, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IQueryExtractedAction => { return { @@ -116,7 +117,7 @@ export const queryExtracted = ( */ export const queryExtractionGraphQLError = ( { componentPath, error }: { componentPath: string; error: string }, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IQueryExtractionGraphQLErrorAction => { return { @@ -135,7 +136,7 @@ export const queryExtractionGraphQLError = ( */ export const queryExtractedBabelSuccess = ( { componentPath }, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IQueryExtractedBabelSuccessAction => { return { @@ -153,7 +154,7 @@ export const queryExtractedBabelSuccess = ( */ export const queryExtractionBabelError = ( { componentPath, error }: { componentPath: string; error: Error }, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IQueryExtractionBabelErrorAction => { return { @@ -170,7 +171,7 @@ export const queryExtractionBabelError = ( */ export const setProgramStatus = ( status: ProgramStatus, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): ISetProgramStatusAction => { return { @@ -187,7 +188,7 @@ export const setProgramStatus = ( */ export const pageQueryRun = ( { path, componentPath, isPage }, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IPageQueryRunAction => { return { @@ -204,7 +205,7 @@ export const pageQueryRun = ( */ export const removeStaleJob = ( contentDigest: string, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IRemoveStaleJobAction => { return { diff --git a/packages/gatsby/src/redux/actions/restricted.js b/packages/gatsby/src/redux/actions/restricted.js deleted file mode 100644 index 85be2a3a00b9a..0000000000000 --- a/packages/gatsby/src/redux/actions/restricted.js +++ /dev/null @@ -1,459 +0,0 @@ -// @flow -const { camelCase } = require(`lodash`) -const report = require(`gatsby-cli/lib/reporter`) -const { parseTypeDef } = require(`../../schema/types/type-defs`) - -import type { Plugin } from "./types" - -const actions = {} - -/** - * Add a third-party schema to be merged into main schema. Schema has to be a - * graphql-js GraphQLSchema object. - * - * This schema is going to be merged as-is. This can easily break the main - * Gatsby schema, so it's user's responsibility to make sure it doesn't happen - * (by e.g. namespacing the schema). - * - * @availableIn [createSchemaCustomization, sourceNodes] - * - * @param {Object} $0 - * @param {GraphQLSchema} $0.schema GraphQL schema to add - */ -actions.addThirdPartySchema = ( - { schema }: { schema: GraphQLSchema }, - plugin?: Plugin, - traceId?: string -) => { - return { - type: `ADD_THIRD_PARTY_SCHEMA`, - plugin, - traceId, - payload: schema, - } -} - -import type GatsbyGraphQLType from "../../schema/types/type-builders" -/** - * Add type definitions to the GraphQL schema. - * - * @availableIn [createSchemaCustomization, sourceNodes] - * - * @param {string | GraphQLOutputType | GatsbyGraphQLType | string[] | GraphQLOutputType[] | GatsbyGraphQLType[]} types Type definitions - * - * Type definitions can be provided either as - * [`graphql-js` types](https://graphql.org/graphql-js/), in - * [GraphQL schema definition language (SDL)](https://graphql.org/learn/) - * or using Gatsby Type Builders available on the `schema` API argument. - * - * Things to note: - * * type definitions targeting node types, i.e. `MarkdownRemark` and others - * added in `sourceNodes` or `onCreateNode` APIs, need to implement the - * `Node` interface. Interface fields will be added automatically, but it - * is mandatory to label those types with `implements Node`. - * * by default, explicit type definitions from `createTypes` will be merged - * with inferred field types, and default field resolvers for `Date` (which - * adds formatting options) and `File` (which resolves the field value as - * a `relativePath` foreign-key field) are added. This behavior can be - * customised with `@infer`, `@dontInfer` directives or extensions. Fields - * may be assigned resolver (and other option like args) with additional - * directives. Currently `@dateformat`, `@link`, `@fileByRelativePath` and - * `@proxy` are available. - * - * - * Schema customization controls: - * * `@infer` - run inference on the type and add fields that don't exist on the - * defined type to it. - * * `@dontInfer` - don't run any inference on the type - * - * Extensions to add resolver options: - * * `@dateformat` - add date formatting arguments. Accepts `formatString` and - * `locale` options that sets the defaults for this field - * * `@link` - connect to a different Node. Arguments `by` and `from`, which - * define which field to compare to on a remote node and which field to use on - * the source node - * * `@fileByRelativePath` - connect to a File node. Same arguments. The - * difference from link is that this normalizes the relative path to be - * relative from the path where source node is found. - * * `@proxy` - in case the underlying node data contains field names with - * characters that are invalid in GraphQL, `proxy` allows to explicitly - * proxy those properties to fields with valid field names. Takes a `from` arg. - * - * - * @example - * exports.createSchemaCustomization = ({ actions }) => { - * const { createTypes } = actions - * const typeDefs = ` - * """ - * Markdown Node - * """ - * type MarkdownRemark implements Node @infer { - * frontmatter: Frontmatter! - * } - * - * """ - * Markdown Frontmatter - * """ - * type Frontmatter @infer { - * title: String! - * author: AuthorJson! @link - * date: Date! @dateformat - * published: Boolean! - * tags: [String!]! - * } - * - * """ - * Author information - * """ - * # Does not include automatically inferred fields - * type AuthorJson implements Node @dontInfer { - * name: String! - * birthday: Date! @dateformat(locale: "ru") - * } - * ` - * createTypes(typeDefs) - * } - * - * // using Gatsby Type Builder API - * exports.createSchemaCustomization = ({ actions, schema }) => { - * const { createTypes } = actions - * const typeDefs = [ - * schema.buildObjectType({ - * name: 'MarkdownRemark', - * fields: { - * frontmatter: 'Frontmatter!' - * }, - * interfaces: ['Node'], - * extensions: { - * infer: true, - * }, - * }), - * schema.buildObjectType({ - * name: 'Frontmatter', - * fields: { - * title: { - * type: 'String!', - * resolve(parent) { - * return parent.title || '(Untitled)' - * } - * }, - * author: { - * type: 'AuthorJson' - * extensions: { - * link: {}, - * }, - * } - * date: { - * type: 'Date!' - * extensions: { - * dateformat: {}, - * }, - * }, - * published: 'Boolean!', - * tags: '[String!]!', - * } - * }), - * schema.buildObjectType({ - * name: 'AuthorJson', - * fields: { - * name: 'String!' - * birthday: { - * type: 'Date!' - * extensions: { - * dateformat: { - * locale: 'ru', - * }, - * }, - * }, - * }, - * interfaces: ['Node'], - * extensions: { - * infer: false, - * }, - * }), - * ] - * createTypes(typeDefs) - * } - */ -actions.createTypes = ( - types: - | string - | GraphQLOutputType - | GatsbyGraphQLType - | Array, - plugin?: Plugin, - traceId?: string -) => { - return { - type: `CREATE_TYPES`, - plugin, - traceId, - payload: Array.isArray(types) - ? types.map(parseTypeDef) - : parseTypeDef(types), - } -} - -const { reservedExtensionNames } = require(`../../schema/extensions`) -import type GraphQLFieldExtensionDefinition from "../../schema/extensions" -/** - * Add a field extension to the GraphQL schema. - * - * Extensions allow defining custom behavior which can be added to fields - * via directive (in SDL) or on the `extensions` prop (with Type Builders). - * - * The extension definition takes a `name`, an `extend` function, and optional - * extension `args` for options. The `extend` function has to return a (partial) - * field config, and receives the extension options and the previous field config - * as arguments. - * - * @availableIn [createSchemaCustomization, sourceNodes] - * - * @param {GraphQLFieldExtensionDefinition} extension The field extension definition - * @example - * exports.createSchemaCustomization = ({ actions }) => { - * const { createFieldExtension } = actions - * createFieldExtension({ - * name: 'motivate', - * args: { - * caffeine: 'Int' - * }, - * extend(options, prevFieldConfig) { - * return { - * type: 'String', - * args: { - * sunshine: { - * type: 'Int', - * defaultValue: 0, - * }, - * }, - * resolve(source, args, context, info) { - * const motivation = (options.caffeine || 0) - args.sunshine - * if (motivation > 5) return 'Work! Work! Work!' - * return 'Maybe tomorrow.' - * }, - * } - * }, - * }) - * } - */ -actions.createFieldExtension = ( - extension: GraphQLFieldExtensionDefinition, - plugin?: Plugin, - traceId?: string -) => (dispatch, getState) => { - const { name } = extension || {} - const { fieldExtensions } = getState().schemaCustomization - - if (!name) { - report.error(`The provided field extension must have a \`name\` property.`) - } else if (reservedExtensionNames.includes(name)) { - report.error( - `The field extension name \`${name}\` is reserved for internal use.` - ) - } else if (fieldExtensions[name]) { - report.error( - `A field extension with the name \`${name}\` has already been registered.` - ) - } else { - dispatch({ - type: `CREATE_FIELD_EXTENSION`, - plugin, - traceId, - payload: { name, extension }, - }) - } -} - -/** - * Write GraphQL schema to file - * - * Writes out inferred and explicitly specified type definitions. This is not - * the full GraphQL schema, but only the types necessary to recreate all type - * definitions, i.e. it does not include directives, built-ins, and derived - * types for filtering, sorting, pagination etc. Optionally, you can define a - * list of types to include/exclude. This is recommended to avoid including - * definitions for plugin-created types. - * - * @availableIn [createSchemaCustomization] - * - * @param {object} $0 - * @param {string} [$0.path] The path to the output file, defaults to `schema.gql` - * @param {object} [$0.include] Configure types to include - * @param {string[]} [$0.include.types] Only include these types - * @param {string[]} [$0.include.plugins] Only include types owned by these plugins - * @param {object} [$0.exclude] Configure types to exclude - * @param {string[]} [$0.exclude.types] Do not include these types - * @param {string[]} [$0.exclude.plugins] Do not include types owned by these plugins - * @param {boolean} [withFieldTypes] Include field types, defaults to `true` - */ -actions.printTypeDefinitions = ( - { - path = `schema.gql`, - include, - exclude, - withFieldTypes = true, - }: { - path?: string, - include?: { types?: Array, plugins?: Array }, - exclude?: { types?: Array, plugins?: Array }, - withFieldTypes?: boolean, - }, - plugin?: Plugin, - traceId?: string -) => { - return { - type: `PRINT_SCHEMA_REQUESTED`, - plugin, - traceId, - payload: { - path, - include, - exclude, - withFieldTypes, - }, - } -} - -/** - * Make functionality available on field resolver `context` - * - * @availableIn [createSchemaCustomization] - * - * @param {object} context Object to make available on `context`. - * When called from a plugin, the context value will be namespaced under - * the camel-cased plugin name without the "gatsby-" prefix - * @example - * const getHtml = md => remark().use(html).process(md) - * exports.createSchemaCustomization = ({ actions }) => { - * actions.createResolverContext({ getHtml }) - * } - * // The context value can then be accessed in any field resolver like this: - * exports.createSchemaCustomization = ({ actions }) => { - * actions.createTypes(schema.buildObjectType({ - * name: 'Test', - * interfaces: ['Node'], - * fields: { - * md: { - * type: 'String!', - * async resolve(source, args, context, info) { - * const processed = await context.transformerRemark.getHtml(source.internal.contents) - * return processed.contents - * } - * } - * } - * })) - * } - */ -actions.createResolverContext = ( - context: object, - plugin?: Plugin, - traceId?: string -) => dispatch => { - if (!context || typeof context !== `object`) { - report.error( - `Expected context value passed to \`createResolverContext\` to be an object. Received "${context}".` - ) - } else { - const { name } = plugin || {} - const payload = - !name || name === `default-site-plugin` - ? context - : { [camelCase(name.replace(/^gatsby-/, ``))]: context } - dispatch({ - type: `CREATE_RESOLVER_CONTEXT`, - plugin, - traceId, - payload, - }) - } -} - -const withDeprecationWarning = (actionName, action, api, allowedIn) => ( - ...args -) => { - report.warn( - `Calling \`${actionName}\` in the \`${api}\` API is deprecated. ` + - `Please use: ${allowedIn.map(a => `\`${a}\``).join(`, `)}.` - ) - return action(...args) -} - -const withErrorMessage = (actionName, api, allowedIn) => () => - // return a thunk that does not dispatch anything - () => { - report.error( - `\`${actionName}\` is not available in the \`${api}\` API. ` + - `Please use: ${allowedIn.map(a => `\`${a}\``).join(`, `)}.` - ) - } - -const nodeAPIs = Object.keys(require(`../../utils/api-node-docs`)) - -const ALLOWED_IN = `ALLOWED_IN` -const DEPRECATED_IN = `DEPRECATED_IN` - -const set = (availableActionsByAPI, api, actionName, action) => { - availableActionsByAPI[api] = availableActionsByAPI[api] || {} - availableActionsByAPI[api][actionName] = action -} - -const mapAvailableActionsToAPIs = restrictions => { - const availableActionsByAPI = {} - - const actionNames = Object.keys(restrictions) - actionNames.forEach(actionName => { - const action = actions[actionName] - - const allowedIn = restrictions[actionName][ALLOWED_IN] || [] - allowedIn.forEach(api => - set(availableActionsByAPI, api, actionName, action) - ) - - const deprecatedIn = restrictions[actionName][DEPRECATED_IN] || [] - deprecatedIn.forEach(api => - set( - availableActionsByAPI, - api, - actionName, - withDeprecationWarning(actionName, action, api, allowedIn) - ) - ) - - const forbiddenIn = nodeAPIs.filter( - api => ![...allowedIn, ...deprecatedIn].includes(api) - ) - forbiddenIn.forEach(api => - set( - availableActionsByAPI, - api, - actionName, - withErrorMessage(actionName, api, allowedIn) - ) - ) - }) - - return availableActionsByAPI -} - -const availableActionsByAPI = mapAvailableActionsToAPIs({ - createFieldExtension: { - [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], - }, - createTypes: { - [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], - [DEPRECATED_IN]: [`onPreInit`, `onPreBootstrap`], - }, - createResolverContext: { - [ALLOWED_IN]: [`createSchemaCustomization`], - }, - addThirdPartySchema: { - [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], - [DEPRECATED_IN]: [`onPreInit`, `onPreBootstrap`], - }, - printTypeDefinitions: { - [ALLOWED_IN]: [`createSchemaCustomization`], - }, -}) - -module.exports = { actions, availableActionsByAPI } diff --git a/packages/gatsby/src/redux/actions/restricted.ts b/packages/gatsby/src/redux/actions/restricted.ts new file mode 100644 index 0000000000000..8cb4a68b89f0b --- /dev/null +++ b/packages/gatsby/src/redux/actions/restricted.ts @@ -0,0 +1,517 @@ +import { camelCase } from "lodash" +import { GraphQLSchema, GraphQLOutputType } from "graphql" +import { ActionCreator } from "redux" +import { ThunkAction } from "redux-thunk" +import report from "gatsby-cli/lib/reporter" +import { parseTypeDef } from "../../schema/types/type-defs" +import { + GraphQLFieldExtensionDefinition, + reservedExtensionNames, +} from "../../schema/extensions" +import { GatsbyGraphQLType } from "../../schema/types/type-builders" +import { + IGatsbyPlugin, + ActionsUnion, + IAddThirdPartySchema, + ICreateTypes, + IGatsbyState, + ICreateFieldExtension, + IPrintTypeDefinitions, + ICreateResolverContext, + IGatsbyPluginContext, +} from "../types" + +type RestrictionActionNames = + | "createFieldExtension" + | "createTypes" + | "createResolverContext" + | "addThirdPartySchema" + | "printTypeDefinitions" + +type SomeActionCreator = + | ActionCreator + | ActionCreator> + +export const actions = { + /** + * Add a third-party schema to be merged into main schema. Schema has to be a + * graphql-js GraphQLSchema object. + * + * This schema is going to be merged as-is. This can easily break the main + * Gatsby schema, so it's user's responsibility to make sure it doesn't happen + * (by e.g. namespacing the schema). + * + * @availableIn [createSchemaCustomization, sourceNodes] + * + * @param {Object} $0 + * @param {GraphQLSchema} $0.schema GraphQL schema to add + */ + addThirdPartySchema: ( + { schema }: { schema: GraphQLSchema }, + plugin: IGatsbyPlugin, + traceId?: string + ): IAddThirdPartySchema => { + return { + type: `ADD_THIRD_PARTY_SCHEMA`, + plugin, + traceId, + payload: schema, + } + }, + + /** + * Add type definitions to the GraphQL schema. + * + * @availableIn [createSchemaCustomization, sourceNodes] + * + * @param {string | GraphQLOutputType | GatsbyGraphQLType | string[] | GraphQLOutputType[] | GatsbyGraphQLType[]} types Type definitions + * + * Type definitions can be provided either as + * [`graphql-js` types](https://graphql.org/graphql-js/), in + * [GraphQL schema definition language (SDL)](https://graphql.org/learn/) + * or using Gatsby Type Builders available on the `schema` API argument. + * + * Things to note: + * * type definitions targeting node types, i.e. `MarkdownRemark` and others + * added in `sourceNodes` or `onCreateNode` APIs, need to implement the + * `Node` interface. Interface fields will be added automatically, but it + * is mandatory to label those types with `implements Node`. + * * by default, explicit type definitions from `createTypes` will be merged + * with inferred field types, and default field resolvers for `Date` (which + * adds formatting options) and `File` (which resolves the field value as + * a `relativePath` foreign-key field) are added. This behavior can be + * customised with `@infer`, `@dontInfer` directives or extensions. Fields + * may be assigned resolver (and other option like args) with additional + * directives. Currently `@dateformat`, `@link`, `@fileByRelativePath` and + * `@proxy` are available. + * + * + * Schema customization controls: + * * `@infer` - run inference on the type and add fields that don't exist on the + * defined type to it. + * * `@dontInfer` - don't run any inference on the type + * + * Extensions to add resolver options: + * * `@dateformat` - add date formatting arguments. Accepts `formatString` and + * `locale` options that sets the defaults for this field + * * `@link` - connect to a different Node. Arguments `by` and `from`, which + * define which field to compare to on a remote node and which field to use on + * the source node + * * `@fileByRelativePath` - connect to a File node. Same arguments. The + * difference from link is that this normalizes the relative path to be + * relative from the path where source node is found. + * * `@proxy` - in case the underlying node data contains field names with + * characters that are invalid in GraphQL, `proxy` allows to explicitly + * proxy those properties to fields with valid field names. Takes a `from` arg. + * + * + * @example + * exports.createSchemaCustomization = ({ actions }) => { + * const { createTypes } = actions + * const typeDefs = ` + * """ + * Markdown Node + * """ + * type MarkdownRemark implements Node @infer { + * frontmatter: Frontmatter! + * } + * + * """ + * Markdown Frontmatter + * """ + * type Frontmatter @infer { + * title: String! + * author: AuthorJson! @link + * date: Date! @dateformat + * published: Boolean! + * tags: [String!]! + * } + * + * """ + * Author information + * """ + * # Does not include automatically inferred fields + * type AuthorJson implements Node @dontInfer { + * name: String! + * birthday: Date! @dateformat(locale: "ru") + * } + * ` + * createTypes(typeDefs) + * } + * + * // using Gatsby Type Builder API + * exports.createSchemaCustomization = ({ actions, schema }) => { + * const { createTypes } = actions + * const typeDefs = [ + * schema.buildObjectType({ + * name: 'MarkdownRemark', + * fields: { + * frontmatter: 'Frontmatter!' + * }, + * interfaces: ['Node'], + * extensions: { + * infer: true, + * }, + * }), + * schema.buildObjectType({ + * name: 'Frontmatter', + * fields: { + * title: { + * type: 'String!', + * resolve(parent) { + * return parent.title || '(Untitled)' + * } + * }, + * author: { + * type: 'AuthorJson' + * extensions: { + * link: {}, + * }, + * } + * date: { + * type: 'Date!' + * extensions: { + * dateformat: {}, + * }, + * }, + * published: 'Boolean!', + * tags: '[String!]!', + * } + * }), + * schema.buildObjectType({ + * name: 'AuthorJson', + * fields: { + * name: 'String!' + * birthday: { + * type: 'Date!' + * extensions: { + * dateformat: { + * locale: 'ru', + * }, + * }, + * }, + * }, + * interfaces: ['Node'], + * extensions: { + * infer: false, + * }, + * }), + * ] + * createTypes(typeDefs) + * } + */ + createTypes: ( + types: + | string + | GraphQLOutputType + | GatsbyGraphQLType + | Array, + plugin: IGatsbyPlugin, + traceId?: string + ): ICreateTypes => { + return { + type: `CREATE_TYPES`, + plugin, + traceId, + payload: Array.isArray(types) + ? types.map(parseTypeDef) + : parseTypeDef(types), + } + }, + + /** + * Add a field extension to the GraphQL schema. + * + * Extensions allow defining custom behavior which can be added to fields + * via directive (in SDL) or on the `extensions` prop (with Type Builders). + * + * The extension definition takes a `name`, an `extend` function, and optional + * extension `args` for options. The `extend` function has to return a (partial) + * field config, and receives the extension options and the previous field config + * as arguments. + * + * @availableIn [createSchemaCustomization, sourceNodes] + * + * @param {GraphQLFieldExtensionDefinition} extension The field extension definition + * @example + * exports.createSchemaCustomization = ({ actions }) => { + * const { createFieldExtension } = actions + * createFieldExtension({ + * name: 'motivate', + * args: { + * caffeine: 'Int' + * }, + * extend(options, prevFieldConfig) { + * return { + * type: 'String', + * args: { + * sunshine: { + * type: 'Int', + * defaultValue: 0, + * }, + * }, + * resolve(source, args, context, info) { + * const motivation = (options.caffeine || 0) - args.sunshine + * if (motivation > 5) return 'Work! Work! Work!' + * return 'Maybe tomorrow.' + * }, + * } + * }, + * }) + * } + */ + createFieldExtension: ( + extension: GraphQLFieldExtensionDefinition, + plugin: IGatsbyPlugin, + traceId?: string + ): ThunkAction => ( + dispatch, + getState + ): void => { + const { name } = extension || {} + const { fieldExtensions } = getState().schemaCustomization + + if (!name) { + report.error( + `The provided field extension must have a \`name\` property.` + ) + } else if (reservedExtensionNames.includes(name)) { + report.error( + `The field extension name \`${name}\` is reserved for internal use.` + ) + } else if (fieldExtensions[name]) { + report.error( + `A field extension with the name \`${name}\` has already been registered.` + ) + } else { + dispatch({ + type: `CREATE_FIELD_EXTENSION`, + plugin, + traceId, + payload: { name, extension }, + }) + } + }, + + /** + * Write GraphQL schema to file + * + * Writes out inferred and explicitly specified type definitions. This is not + * the full GraphQL schema, but only the types necessary to recreate all type + * definitions, i.e. it does not include directives, built-ins, and derived + * types for filtering, sorting, pagination etc. Optionally, you can define a + * list of types to include/exclude. This is recommended to avoid including + * definitions for plugin-created types. + * + * @availableIn [createSchemaCustomization] + * + * @param {object} $0 + * @param {string} [$0.path] The path to the output file, defaults to `schema.gql` + * @param {object} [$0.include] Configure types to include + * @param {string[]} [$0.include.types] Only include these types + * @param {string[]} [$0.include.plugins] Only include types owned by these plugins + * @param {object} [$0.exclude] Configure types to exclude + * @param {string[]} [$0.exclude.types] Do not include these types + * @param {string[]} [$0.exclude.plugins] Do not include types owned by these plugins + * @param {boolean} [withFieldTypes] Include field types, defaults to `true` + */ + printTypeDefinitions: ( + { + path = `schema.gql`, + include, + exclude, + withFieldTypes = true, + }: { + path?: string + include?: { types?: Array; plugins?: Array } + exclude?: { types?: Array; plugins?: Array } + withFieldTypes?: boolean + }, + plugin: IGatsbyPlugin, + traceId?: string + ): IPrintTypeDefinitions => { + return { + type: `PRINT_SCHEMA_REQUESTED`, + plugin, + traceId, + payload: { + path, + include, + exclude, + withFieldTypes, + }, + } + }, + + /** + * Make functionality available on field resolver `context` + * + * @availableIn [createSchemaCustomization] + * + * @param {object} context Object to make available on `context`. + * When called from a plugin, the context value will be namespaced under + * the camel-cased plugin name without the "gatsby-" prefix + * @example + * const getHtml = md => remark().use(html).process(md) + * exports.createSchemaCustomization = ({ actions }) => { + * actions.createResolverContext({ getHtml }) + * } + * // The context value can then be accessed in any field resolver like this: + * exports.createSchemaCustomization = ({ actions }) => { + * actions.createTypes(schema.buildObjectType({ + * name: 'Test', + * interfaces: ['Node'], + * fields: { + * md: { + * type: 'String!', + * async resolve(source, args, context, info) { + * const processed = await context.transformerRemark.getHtml(source.internal.contents) + * return processed.contents + * } + * } + * } + * })) + * } + */ + createResolverContext: ( + context: IGatsbyPluginContext, + plugin: IGatsbyPlugin, + traceId?: string + ): ThunkAction => ( + dispatch + ): void => { + if (!context || typeof context !== `object`) { + report.error( + `Expected context value passed to \`createResolverContext\` to be an object. Received "${context}".` + ) + } else { + const { name } = plugin || {} + const payload = + !name || name === `default-site-plugin` + ? context + : { [camelCase(name.replace(/^gatsby-/, ``))]: context } + dispatch({ + type: `CREATE_RESOLVER_CONTEXT`, + plugin, + traceId, + payload, + }) + } + }, +} + +const withDeprecationWarning = ( + actionName: RestrictionActionNames, + action: SomeActionCreator, + api: API, + allowedIn: API[] +): SomeActionCreator => (...args: any[]): ReturnType> => { + report.warn( + `Calling \`${actionName}\` in the \`${api}\` API is deprecated. ` + + `Please use: ${allowedIn.map(a => `\`${a}\``).join(`, `)}.` + ) + return action(...args) +} + +const withErrorMessage = ( + actionName: RestrictionActionNames, + api: API, + allowedIn: API[] +) => () => + // return a thunk that does not dispatch anything + (): void => { + report.error( + `\`${actionName}\` is not available in the \`${api}\` API. ` + + `Please use: ${allowedIn.map(a => `\`${a}\``).join(`, `)}.` + ) + } + +const nodeAPIs = Object.keys(require(`../../utils/api-node-docs`)) + +const ALLOWED_IN = `ALLOWED_IN` +const DEPRECATED_IN = `DEPRECATED_IN` + +type API = string + +type Restrictions = Record< + RestrictionActionNames, + Partial<{ + ALLOWED_IN: API[] + DEPRECATED_IN: API[] + }> +> + +type AvailableActionsByAPI = Record< + API, + { [K in RestrictionActionNames]: SomeActionCreator } +> + +const set = ( + availableActionsByAPI: {}, + api: API, + actionName: RestrictionActionNames, + action: SomeActionCreator +): void => { + availableActionsByAPI[api] = availableActionsByAPI[api] || {} + availableActionsByAPI[api][actionName] = action +} + +const mapAvailableActionsToAPIs = ( + restrictions: Restrictions +): AvailableActionsByAPI => { + const availableActionsByAPI: AvailableActionsByAPI = {} + + const actionNames = Object.keys(restrictions) as (keyof typeof restrictions)[] + actionNames.forEach(actionName => { + const action = actions[actionName] + + const allowedIn: API[] = restrictions[actionName][ALLOWED_IN] || [] + allowedIn.forEach(api => + set(availableActionsByAPI, api, actionName, action) + ) + + const deprecatedIn: API[] = restrictions[actionName][DEPRECATED_IN] || [] + deprecatedIn.forEach(api => + set( + availableActionsByAPI, + api, + actionName, + withDeprecationWarning(actionName, action, api, allowedIn) + ) + ) + + const forbiddenIn = nodeAPIs.filter( + api => ![...allowedIn, ...deprecatedIn].includes(api) + ) + forbiddenIn.forEach(api => + set( + availableActionsByAPI, + api, + actionName, + withErrorMessage(actionName, api, allowedIn) + ) + ) + }) + + return availableActionsByAPI +} + +export const availableActionsByAPI = mapAvailableActionsToAPIs({ + createFieldExtension: { + [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], + }, + createTypes: { + [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], + [DEPRECATED_IN]: [`onPreInit`, `onPreBootstrap`], + }, + createResolverContext: { + [ALLOWED_IN]: [`createSchemaCustomization`], + }, + addThirdPartySchema: { + [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], + [DEPRECATED_IN]: [`onPreInit`, `onPreBootstrap`], + }, + printTypeDefinitions: { + [ALLOWED_IN]: [`createSchemaCustomization`], + }, +}) diff --git a/packages/gatsby/src/redux/actions/types.js b/packages/gatsby/src/redux/actions/types.js deleted file mode 100644 index 5980f1797479d..0000000000000 --- a/packages/gatsby/src/redux/actions/types.js +++ /dev/null @@ -1,6 +0,0 @@ -// @flow -type Plugin = { - name: string, -} - -export type { Plugin } diff --git a/packages/gatsby/src/redux/nodes.ts b/packages/gatsby/src/redux/nodes.ts index 03c518d824304..5d952efe242ea 100644 --- a/packages/gatsby/src/redux/nodes.ts +++ b/packages/gatsby/src/redux/nodes.ts @@ -3,9 +3,27 @@ import { IGatsbyNode } from "./types" import { createPageDependency } from "./actions/add-page-dependency" import { IDbQueryElemMatch } from "../db/common/query" +// Only list supported ops here. "CacheableFilterOp" +type FilterOp = "$eq" | "$lte" +// Note: `undefined` is an encoding for a property that does not exist +type FilterValueNullable = string | number | boolean | null | undefined +// This is filter value in most cases +type FilterValue = string | number | boolean export type FilterCacheKey = string -export type FilterCache = Map> -export type FiltersCache = Map +export interface IFilterCache { + op: FilterOp + // In this set, `undefined` values represent nodes that did not have the path + byValue: Map> + meta: { + // Ordered set of all values found by this filter. No null / undefs. + valuesAsc?: Array + // Flat set of nodes, ordered by valueAsc, but not ordered per value group + nodesByValueAsc?: Array + // Ranges of nodes per value, maps to the nodesByValueAsc array + valueRanges?: Map + } +} +export type FiltersCache = Map /** * Get all nodes from redux store. @@ -152,24 +170,68 @@ export const addResolvedNodes = ( return resolvedNodes } +export const postIndexingMetaSetup = (filterCache: IFilterCache): void => { + // Create an ordered array of individual nodes, ordered (grouped) by the + // value to which the filter resolves. Nodes are not ordered per value. + // This way non-eq ops can simply slice the array to get a range. + + const entriesNullable: Array<[FilterValueNullable, Set]> = [ + ...filterCache.byValue.entries(), + ] + + // These range checks never return `null` or `undefined` so filter those out + // By filtering them out early, the sort should be faster. Could be ... + const entries: Array<[ + FilterValue, + Set + ]> = entriesNullable.filter(([v]) => v != null) as Array< + [FilterValue, Set] + > + + // Sort all sets by its value, asc. Ignore/allow potential type casting. + entries.sort(([a], [b]) => (a < b ? -1 : a > b ? 1 : 0)) + + const orderedNodes: Array = [] + const orderedValues: Array = [] + const offsets: Map = new Map() + entries.forEach(([v, bucket]: [FilterValue, Set]) => { + // Record the range containing all nodes with as filter value v + // The last value of the range should be the offset of the next value + // (So you should be able to do `nodes.slice(start, stop)` to get them) + offsets.set(v, [orderedNodes.length, orderedNodes.length + bucket.size]) + // We could do `arr.push(...bucket)` here but that's not safe with very + // large sets, so we use a regular loop + bucket.forEach(node => orderedNodes.push(node)) + orderedValues.push(v) + }) + + filterCache.meta.valuesAsc = orderedValues + filterCache.meta.nodesByValueAsc = orderedNodes + // The nodesByValueAsc is ordered by value, but multiple nodes per value are + // not ordered. To make lt as fast as lte, we must know the start and stop + // index for each value. Similarly useful for for `ne`. + filterCache.meta.valueRanges = offsets +} + /** - * Given a ("flat") filter path leading up to "eq", a set of node types, and a + * Given a single non-elemMatch filter path, a set of node types, and a * cache, create a cache that for each resulting value of the filter contains - * all the Nodes in a Set (or, if the property is `id`, just the Nodes). + * all the Nodes in a Set. * This cache is used for applying the filter and is a massive improvement over - * looping over all the nodes, when the number of pages (/nodes) scale up. + * looping over all the nodes, when the number of pages (/nodes) scales up. */ -export const ensureIndexByTypedChain = ( - cacheKey: FilterCacheKey, - chain: string[], +export const ensureIndexByQuery = ( + op: FilterOp, + filterCacheKey: FilterCacheKey, + filterPath: string[], nodeTypeNames: string[], filtersCache: FiltersCache ): void => { const state = store.getState() const resolvedNodesCache = state.resolvedNodesCache - const filterCache: FilterCache = new Map() - filtersCache.set(cacheKey, filterCache) + const filterCache: IFilterCache = { op, byValue: new Map(), meta: {} } + filtersCache.set(filterCacheKey, filterCache) // We cache the subsets of nodes by type, but only one type. So if searching // through one node type we can prevent a search through all nodes, otherwise @@ -177,7 +239,7 @@ export const ensureIndexByTypedChain = ( if (nodeTypeNames.length === 1) { getNodesByType(nodeTypeNames[0]).forEach(node => { - addNodeToFilterCache(node, chain, filterCache, resolvedNodesCache) + addNodeToFilterCache(node, filterPath, filterCache, resolvedNodesCache) }) } else { // Here we must first filter for the node type @@ -187,15 +249,19 @@ export const ensureIndexByTypedChain = ( return } - addNodeToFilterCache(node, chain, filterCache, resolvedNodesCache) + addNodeToFilterCache(node, filterPath, filterCache, resolvedNodesCache) }) } + + if (op === `$lte`) { + postIndexingMetaSetup(filterCache) + } } function addNodeToFilterCache( node: IGatsbyNode, chain: Array, - filterCache: FilterCache, + filterCache: IFilterCache, resolvedNodesCache, valueOffset: any = node ): void { @@ -209,35 +275,44 @@ function addNodeToFilterCache( // - for plain query, valueOffset === node // - for elemMatch, valueOffset is sub-tree of the node to continue matching let v = valueOffset as any + let prev = v let i = 0 while (i < chain.length && v) { const nextProp = chain[i++] + prev = v v = v[nextProp] } if ( (typeof v !== `string` && typeof v !== `number` && - typeof v !== `boolean`) || + typeof v !== `boolean` && + v !== null) || i !== chain.length ) { - // Not sure whether this is supposed to happen, but this means that either - // - The node chain ended with `undefined`, or - // - The node chain ended in something other than a primitive, or - // - A part in the chain in the object was not an object - return + if (chain[i - 1] in prev) { + // This means that either + // - The filter resolved to `undefined`, or + // - The filter resolved to something other than a primitive + return + } + // The filter path did not fully exist in node. Encode this as `undefined`. + // The edge case is that `eq` will return these for `null` checks while + // range checks like `lte` do not return these, so we make a distinction. + v = undefined } - let set = filterCache.get(v) + let set = filterCache.byValue.get(v) if (!set) { set = new Set() - filterCache.set(v, set) + filterCache.byValue.set(v, set) } set.add(node) } export const ensureIndexByElemMatch = ( - cacheKey: FilterCacheKey, + op: FilterOp, + filterCacheKey: FilterCacheKey, filter: IDbQueryElemMatch, nodeTypeNames: Array, filtersCache: FiltersCache @@ -248,8 +323,8 @@ export const ensureIndexByElemMatch = ( const state = store.getState() const { resolvedNodesCache } = state - const filterCache: FilterCache = new Map() - filtersCache.set(cacheKey, filterCache) + const filterCache: IFilterCache = { op, byValue: new Map(), meta: {} } + filtersCache.set(filterCacheKey, filterCache) if (nodeTypeNames.length === 1) { getNodesByType(nodeTypeNames[0]).forEach(node => { @@ -277,13 +352,17 @@ export const ensureIndexByElemMatch = ( ) }) } + + if (op === `$lte`) { + postIndexingMetaSetup(filterCache) + } } function addNodeToBucketWithElemMatch( node: IGatsbyNode, valueAtCurrentStep: any, // Arbitrary step on the path inside the node filter: IDbQueryElemMatch, - filterCache: FilterCache, + filterCache: IFilterCache, resolvedNodesCache ): void { // There can be a filter that targets `__gatsby_resolved` so fix that first @@ -337,24 +416,127 @@ function addNodeToBucketWithElemMatch( } } +const binarySearch = ( + values: Array, + needle: FilterValue +): [number, number] | undefined => { + let min = 0 + let max = values.length - 1 + let pivot = Math.floor(values.length / 2) + while (min <= max) { + const value = values[pivot] + if (needle < value) { + // Move pivot to middle of nodes left of current pivot + // assert pivot < max + max = pivot + } else if (needle > value) { + // Move pivot to middle of nodes right of current pivot + // assert pivot > min + min = pivot + } else { + // This means needle === value + // TODO: except for NaN ... and potentially certain type casting cases + return [pivot, pivot] + } + + if (max - min <= 1) { + // End of search. Needle not found (as expected). Use pivot as index. + // If the needle was not found, max-min==1 and max is returned. + return [min, max] + } + + pivot = Math.floor((max - min) / 2) + } + + // Shouldn't be reachable, but just in case, fall back to Sift if so. + return undefined +} + /** - * Given a ("flat") filter path leading up to "eq", a target value to filter - * for, a set of node types, and a pre-generated lookup cache, return the set - * of Nodes (or, if the property is `id` just the Node) which pass the filter. - * This returns `undefined` if there is Node that passes the filter. + * Given the cache key for a filter and a target value return the set of nodes + * that resolve to this value. + * This returns `undefined` if there is no such node * * Basically if the filter was {a: {b: {slug: {eq: "foo/bar"}}}} then it will * return all the nodes that have `node.slug === "foo/bar"`. That usually (but * not always) at most one node for slug, but this filter can apply to anything. - * - * The only exception is `id`, since internally there can be at most one node - * per `id` so there's a minor optimization for that (no need for Sets). */ -export const getFilterCacheByTypedChain = ( - cacheKey: FilterCacheKey, - value: boolean | number | string, +export const getNodesFromCacheByValue = ( + filterCacheKey: FilterCacheKey, + filterValue: FilterValueNullable, filtersCache: FiltersCache ): Set | undefined => { - const byTypedKey = filtersCache?.get(cacheKey) - return byTypedKey?.get(value) + const filterCache = filtersCache?.get(filterCacheKey) + if (!filterCache) { + return undefined + } + + const op = filterCache.op + + if (op === `$eq`) { + if (filterValue == null) { + // Edge case; fetch all nodes for `null` and `undefined` because `$eq` + // also returns nodes without the path when searching for `null`. Not + // ops do so, so we map non-existing paths to `undefined`. + return new Set([ + ...(filterCache.byValue.get(null) ?? []), + ...(filterCache.byValue.get(undefined) ?? []), + ]) + } + return filterCache.byValue.get(filterValue) + } + + if (op === `$lte`) { + // First try a direct approach. If a value is queried that also exists then + // we can prevent a binary search through the whole set, O(1) vs O(log n) + + if (filterValue == null) { + // This is an edge case and this value should be directly indexed + // For `lte` this should only return nodes for `null`, not a "range" + return filterCache.byValue.get(filterValue) + } + + const ranges = filterCache.meta.valueRanges + const nodes = filterCache.meta.nodesByValueAsc + + const range = ranges!.get(filterValue) + if (range) { + return new Set(nodes!.slice(0, range[1])) + } + + // Query may ask for a value that doesn't appear in the set, like if the + // set is [1, 2, 5, 6] and the query is <= 3. In that case we have to + // apply a search (we'll do binary) to determine the offset to slice from. + + // Note: for lte, the valueAsc array must be set at this point + const values = filterCache.meta.valuesAsc as Array + // It shouldn't find the targetValue (but it might) and return the index of + // the two value between which targetValue sits, or first/last element. + const point = binarySearch(values, filterValue) + if (!point) { + return undefined + } + const [pivotMin, pivotMax] = point + // Each pivot index must have a value and a range + // The returned min/max index may include the lower/upper bound, so we still + // have to do lte checks for both values. + let pivotValue = values[pivotMax] + if (pivotValue > filterValue) { + pivotValue = values[pivotMin] + } + + // Note: the pivot value _shouldnt_ match the filter value because that + // means the value was actually found, but those should have been indexed + // so should have yielded a result in the .get() above. + + const [exclPivot, inclPivot] = ranges!.get(pivotValue) as [number, number] + + // Note: technically, `5 <= "5" === true` but `5` would not be cached. + // So we have to consider weak comparison and may have to include the pivot + const until = pivotValue <= filterValue ? inclPivot : exclPivot + return new Set(nodes!.slice(0, until)) + } + + // Unreachable because we checked all values of FilterOp (which op is) + return undefined } diff --git a/packages/gatsby/src/redux/persist.ts b/packages/gatsby/src/redux/persist.ts index 7bc4a0cba0c0c..68a049a0b7fdb 100644 --- a/packages/gatsby/src/redux/persist.ts +++ b/packages/gatsby/src/redux/persist.ts @@ -1,11 +1,12 @@ import path from "path" +import os from "os" import v8 from "v8" import { existsSync, mkdtempSync, + moveSync, // Note: moveSync over renameSync because /tmp may be on other mount readFileSync, removeSync, - renameSync, writeFileSync, } from "fs-extra" import { IGatsbyNode, ICachedReduxState } from "./types" @@ -130,7 +131,7 @@ function safelyRenameToBak(reduxCacheFolder: string): string { ++suffixCounter bakName = reduxCacheFolder + tmpSuffix + suffixCounter } - renameSync(reduxCacheFolder, bakName) + moveSync(reduxCacheFolder, bakName) return bakName } @@ -139,7 +140,7 @@ export function writeToCache(contents: ICachedReduxState): void { // Note: this should be a transactional operation. So work in a tmp dir and // make sure the cache cannot be left in a corruptable state due to errors. - const tmpDir = mkdtempSync(`reduxcache`) // linux / windows + const tmpDir = mkdtempSync(path.join(os.tmpdir(), `reduxcache`)) // linux / windows prepareCacheFolder(tmpDir, contents) @@ -156,7 +157,7 @@ export function writeToCache(contents: ICachedReduxState): void { } // The redux cache folder should now not exist so we can rename our tmp to it - renameSync(tmpDir, reduxCacheFolder) + moveSync(tmpDir, reduxCacheFolder) // Now try to yolorimraf the old cache folder try { diff --git a/packages/gatsby/src/redux/reducers/__tests__/redirects.js b/packages/gatsby/src/redux/reducers/__tests__/redirects.ts similarity index 86% rename from packages/gatsby/src/redux/reducers/__tests__/redirects.js rename to packages/gatsby/src/redux/reducers/__tests__/redirects.ts index 605500d55e604..76323bf7035fc 100644 --- a/packages/gatsby/src/redux/reducers/__tests__/redirects.js +++ b/packages/gatsby/src/redux/reducers/__tests__/redirects.ts @@ -1,9 +1,11 @@ +import { ICreateRedirectAction, IRedirect } from "../../types" + let reducer describe(`redirects`, () => { beforeEach(() => { jest.isolateModules(() => { - reducer = require(`../redirects`) + reducer = require(`../redirects`).redirectsReducer }) }) it(`lets you redirect to an internal url`, () => { @@ -15,7 +17,7 @@ describe(`redirects`, () => { }, } - let state = reducer(undefined, action) + const state = reducer(undefined, action) expect(state).toEqual([ { @@ -34,7 +36,7 @@ describe(`redirects`, () => { }, } - let state = reducer(undefined, action) + const state = reducer(undefined, action) expect(state).toEqual([ { @@ -73,7 +75,10 @@ describe(`redirects`, () => { }) it(`prevents duplicate redirects`, () => { - function createRedirect(fromPath, toPath) { + function createRedirect( + fromPath: string, + toPath: string + ): ICreateRedirectAction { return { type: `CREATE_REDIRECT`, payload: { fromPath, toPath }, @@ -92,7 +97,7 @@ describe(`redirects`, () => { }) it(`allows multiple redirects with same "fromPath" but different options`, () => { - function createRedirect(redirect) { + function createRedirect(redirect: IRedirect): ICreateRedirectAction { return { type: `CREATE_REDIRECT`, payload: redirect, diff --git a/packages/gatsby/src/redux/reducers/index.js b/packages/gatsby/src/redux/reducers/index.js index 67067487f1b42..86a4e2509a487 100644 --- a/packages/gatsby/src/redux/reducers/index.js +++ b/packages/gatsby/src/redux/reducers/index.js @@ -1,5 +1,6 @@ const reduxNodes = require(`./nodes`) const lokiNodes = require(`../../db/loki/nodes`).reducer +import { redirectsReducer } from "./redirects" const backend = process.env.GATSBY_DB_NODES || `redux` @@ -59,7 +60,7 @@ module.exports = { jobsV2: require(`./jobsv2`), webpack: require(`./webpack`), webpackCompilationHash: require(`./webpack-compilation-hash`), - redirects: require(`./redirects`), + redirects: redirectsReducer, babelrc: require(`./babelrc`), schemaCustomization: require(`./schema-customization`), themes: require(`./themes`), diff --git a/packages/gatsby/src/redux/reducers/redirects.js b/packages/gatsby/src/redux/reducers/redirects.js deleted file mode 100644 index ed8b072dc98fd..0000000000000 --- a/packages/gatsby/src/redux/reducers/redirects.js +++ /dev/null @@ -1,44 +0,0 @@ -const _ = require(`lodash`) - -const redirects = new Map() - -function exists(newRedirect) { - if (!redirects.has(newRedirect.fromPath)) { - return false - } - - return redirects - .get(newRedirect.fromPath) - .some(redirect => _.isEqual(redirect, newRedirect)) -} - -function add(redirect) { - let samePathRedirects = redirects.get(redirect.fromPath) - - if (!samePathRedirects) { - samePathRedirects = [] - redirects.set(redirect.fromPath, samePathRedirects) - } - - samePathRedirects.push(redirect) -} - -module.exports = (state = [], action) => { - switch (action.type) { - case `CREATE_REDIRECT`: { - const redirect = action.payload - - // Add redirect only if it wasn't yet added to prevent duplicates - if (!exists(redirect)) { - add(redirect) - - state.push(redirect) - } - - return state - } - - default: - return state - } -} diff --git a/packages/gatsby/src/redux/reducers/redirects.ts b/packages/gatsby/src/redux/reducers/redirects.ts new file mode 100644 index 0000000000000..716d0ebe3ed45 --- /dev/null +++ b/packages/gatsby/src/redux/reducers/redirects.ts @@ -0,0 +1,46 @@ +import _ from "lodash" +import { IGatsbyState, IRedirect, ICreateRedirectAction } from "../types" + +const redirects = new Map() + +function exists(newRedirect: IRedirect): boolean { + const fromPathRedirects = redirects.get(newRedirect.fromPath) + + if (!fromPathRedirects) return false + + return fromPathRedirects.some(redirect => _.isEqual(redirect, newRedirect)) +} + +function add(redirect: IRedirect): void { + let samePathRedirects = redirects.get(redirect.fromPath) + + if (!samePathRedirects) { + samePathRedirects = [] + redirects.set(redirect.fromPath, samePathRedirects) + } + + samePathRedirects.push(redirect) +} + +export const redirectsReducer = ( + state: IGatsbyState["redirects"] = [], + action: ICreateRedirectAction +): IGatsbyState["redirects"] => { + switch (action.type) { + case `CREATE_REDIRECT`: { + const redirect = action.payload + + // Add redirect only if it wasn't yet added to prevent duplicates + if (!exists(redirect)) { + add(redirect) + + state.push(redirect) + } + + return state + } + + default: + return state + } +} diff --git a/packages/gatsby/src/redux/run-sift.js b/packages/gatsby/src/redux/run-sift.js index bd511eaf7b2cd..0f3f1140ce963 100644 --- a/packages/gatsby/src/redux/run-sift.js +++ b/packages/gatsby/src/redux/run-sift.js @@ -14,13 +14,21 @@ const { dbQueryToSiftQuery, } = require(`../db/common/query`) const { - ensureIndexByTypedChain, + ensureIndexByQuery, ensureIndexByElemMatch, - getFilterCacheByTypedChain, + getNodesFromCacheByValue, addResolvedNodes, getNode: siftGetNode, } = require(`./nodes`) +const FAST_OPS = [ + `$eq`, + // "$lt", + `$lte`, + // "$gt", + // "$gte" +] + /** * Creates a key for one filterCache inside FiltersCache * @@ -28,7 +36,7 @@ const { * @param {DbQuery} filter * @returns {FilterCacheKey} (a string: `types.join()/path.join()/operator` ) */ -const createTypedFilterCacheKey = (typeNames, filter) => { +const createFilterCacheKey = (typeNames, filter) => { // Note: while `elemMatch` is a special case, in the key it's just `elemMatch` // (This function is future proof for elemMatch support, won't receive it yet) let f = filter @@ -129,13 +137,13 @@ function handleMany(siftArgs, nodes) { } /** - * Given the chain of a simple filter, return the set of nodes that pass the - * filter. The chain should be a property chain leading to the property to - * check, followed by the value to check against. Common example: - * `allThings(filter: { fields: { slug: { eq: $slug } } })` + * Given the path of a set of filters, return the sets of nodes that pass the + * filter. * Only nodes of given node types will be considered * A fast index is created if one doesn't exist yet so cold call is slower. - * The empty result value is null if firstOnly is false, or else an empty array. + * Returns undefined if an op was not supported for fast indexes or when no + * nodes were found for given (query) value. In the zero nodes case, we have to + * go through Sift to make sure we're not missing an edge case, for now. * * @param {Array} filters Resolved. (Should be checked by caller to exist) * @param {Array} nodeTypeNames @@ -143,23 +151,33 @@ function handleMany(siftArgs, nodes) { * @returns {Array | undefined} */ const runFiltersWithoutSift = (filters, nodeTypeNames, filtersCache) => { - const caches = getBucketsForFilters(filters, nodeTypeNames, filtersCache) + const nodesPerValueSets /*: Array> */ = getBucketsForFilters( + filters, + nodeTypeNames, + filtersCache + ) - if (!caches) { + if (!nodesPerValueSets) { // Let Sift take over as fallback return undefined } // Put smallest last (we'll pop it) - caches.sort((a, b) => b.length - a.length) + nodesPerValueSets.sort( + (a /*: Set */, b /*: Set */) => b.size - a.size + ) // Iterate on the set with the fewest elements and create the intersection - const needles = caches.pop() + const needles /*: Set*/ = nodesPerValueSets.pop() // Take the intersection of the retrieved caches-by-value - const result = [] + const result /*: Array */ = [] // This _can_ still be expensive but the set of nodes should be limited ... - needles.forEach(node => { - if (caches.every(cache => cache.has(node))) { + needles.forEach((node /*: IGatsbyNode */) => { + if ( + nodesPerValueSets.every((cache /*: Set */) => + cache.has(node) + ) + ) { // Every cache set contained this node so keep it result.push(node) } @@ -169,6 +187,9 @@ const runFiltersWithoutSift = (filters, nodeTypeNames, filtersCache) => { // Consider the case of {a: {eq: 5}, b: {eq: 10}}, do we cache the [5,10] // case for all value pairs? How likely is that to ever be reused? + if (result.length === 0) { + return undefined + } return result } @@ -180,36 +201,36 @@ const runFiltersWithoutSift = (filters, nodeTypeNames, filtersCache) => { * cache was not found. Must fallback to sift. */ const getBucketsForFilters = (filters, nodeTypeNames, filtersCache) => { - const filterCaches /*: Array*/ = [] + const nodesPerValueSets /*: Array>*/ = [] // Fail fast while trying to create and get the value-cache for each path let every = filters.every((filter /*: DbQuery*/) => { - let cacheKey = createTypedFilterCacheKey(nodeTypeNames, filter) + let filterCacheKey = createFilterCacheKey(nodeTypeNames, filter) if (filter.type === `query`) { // (Let TS warn us if a new query type gets added) const q /*: IDbQueryQuery */ = filter return getBucketsForQueryFilter( - cacheKey, + filterCacheKey, q, nodeTypeNames, filtersCache, - filterCaches + nodesPerValueSets ) } else { // (Let TS warn us if a new query type gets added) const q /*: IDbQueryElemMatch*/ = filter return collectBucketForElemMatch( - cacheKey, + filterCacheKey, q, nodeTypeNames, filtersCache, - filterCaches + nodesPerValueSets ) } }) if (every) { - return filterCaches + return nodesPerValueSets } // "failed at least one" @@ -219,62 +240,72 @@ const getBucketsForFilters = (filters, nodeTypeNames, filtersCache) => { /** * Fetch all buckets for given query filter. That means it's not elemMatch. * - * @param {FilterCacheKey} cacheKey + * @param {FilterCacheKey} filterCacheKey * @param {IDbQueryQuery} filter * @param {Array} nodeTypeNames * @param {FiltersCache} filtersCache - * @param {Array} filterCaches + * @param {Array>} nodesPerValueSets * @returns {boolean} false means soft fail, filter must go through Sift */ const getBucketsForQueryFilter = ( - cacheKey, + filterCacheKey, filter, nodeTypeNames, filtersCache, - filterCaches + nodesPerValueSets ) => { let { - path: chain, - query: { value: targetValue }, + path: filterPath, + query: { + // Note: comparator is verified to be a FilterOp in filterWithoutSift + comparator /*: as FilterOp*/, + value: filterValue, + }, } = filter - if (!filtersCache.has(cacheKey)) { - ensureIndexByTypedChain(cacheKey, chain, nodeTypeNames, filtersCache) + if (!filtersCache.has(filterCacheKey)) { + ensureIndexByQuery( + comparator, + filterCacheKey, + filterPath, + nodeTypeNames, + filtersCache + ) } - const filterCache = getFilterCacheByTypedChain( - cacheKey, - targetValue, + const nodesPerValue /*: Set | undefined */ = getNodesFromCacheByValue( + filterCacheKey, + filterValue, filtersCache ) // If we couldn't find the needle then maybe sift can, for example if the // schema contained a proxy; `slug: String @proxy(from: "slugInternal")` // There are also cases (and tests) where id exists with a different type - if (!filterCache) { + if (!nodesPerValue) { return false } // In all other cases this must be a non-empty Set because the indexing // mechanism does not create a Set unless there's a IGatsbyNode for it - filterCaches.push(filterCache) + nodesPerValueSets.push(nodesPerValue) return true } /** - * @param {string} typedKey + * @param {FilterCacheKey} filterCacheKey * @param {IDbQueryElemMatch} filter * @param {Array} nodeTypeNames * @param {FiltersCache} filtersCache - * @param {Array} filterCaches Matching node sets are put in this array + * @param {Array>} nodesPerValueSets Matching node sets are put in this array */ const collectBucketForElemMatch = ( - typedKey, + filterCacheKey, filter, nodeTypeNames, filtersCache, - filterCaches + nodesPerValueSets ) => { // Get comparator and target value for this elemMatch let comparator = `` @@ -292,22 +323,22 @@ const collectBucketForElemMatch = ( } } - if ( - ![ - `$eq`, - // "$lte", - // "$gte", - ].includes(comparator) - ) { + if (!FAST_OPS.includes(comparator)) { return false } - if (!filtersCache.has(typedKey)) { - ensureIndexByElemMatch(typedKey, filter, nodeTypeNames, filtersCache) + if (!filtersCache.has(filterCacheKey)) { + ensureIndexByElemMatch( + comparator, + filterCacheKey, + filter, + nodeTypeNames, + filtersCache + ) } - const nodesByKeyValue /*: Set | undefined*/ = getFilterCacheByTypedChain( - typedKey, + const nodesByValue /*: Set | undefined*/ = getNodesFromCacheByValue( + filterCacheKey, targetValue, filtersCache ) @@ -315,13 +346,13 @@ const collectBucketForElemMatch = ( // If we couldn't find the needle then maybe sift can, for example if the // schema contained a proxy; `slug: String @proxy(from: "slugInternal")` // There are also cases (and tests) where id exists with a different type - if (!nodesByKeyValue) { + if (!nodesByValue) { return false } // In all other cases this must be a non-empty Set because the indexing // mechanism does not create a Set unless there's a IGatsbyNode for it - filterCaches.push(nodesByKeyValue) + nodesPerValueSets.push(nodesByValue) return true } @@ -330,7 +361,6 @@ const collectBucketForElemMatch = ( * Filters and sorts a list of nodes using mongodb-like syntax. * * @param args raw graphql query filter/sort as an object - * @property {boolean | number | string} args.type gqlType. See build-node-types * @property {boolean} args.firstOnly true if you want to return only the first * result found. This will return a collection of size 1. Not a single element * @property {{filter?: Object, sort?: Object} | undefined} args.queryArgs @@ -444,9 +474,11 @@ const filterToStats = ( } /** - * Check if the filter is "flat" (single leaf) and an "$eq". If so, uses custom - * indexes based on filter and types and returns any result it finds. - * If conditions are not met or no nodes are found, returns undefined. + * Check if filter op is supported (not all are). If so, uses custom + * fast indexes based on filter and types and returns any result it finds. + * If conditions are not met or no nodes are found, returns undefined and + * a slow run through Sift is executed instead. + * This function is a noop if no filter cache is given to it. * * @param {Array} filters Resolved. (Should be checked by caller to exist) * @param {Array} nodeTypeNames @@ -454,8 +486,6 @@ const filterToStats = ( * @returns {Array | undefined} Collection of results */ const filterWithoutSift = (filters, nodeTypeNames, filtersCache) => { - // This can also be `$ne`, `$in` or any other grapqhl comparison op - if (!filtersCache) { // If no filter cache is passed on, explicitly don't use one return undefined @@ -463,17 +493,14 @@ const filterWithoutSift = (filters, nodeTypeNames, filtersCache) => { if (filters.length === 0) { // If no filters are given, go through Sift. This does not appear to be - // slower than s - // hortcutting it here. + // slower than shortcutting it here. return undefined } if ( filters.some( filter => - filter.type === `query` && // enabled - // filter.type === `elemMatch` || // disabled - ![`$eq`].includes(filter.query.comparator) + filter.type === `query` && !FAST_OPS.includes(filter.query.comparator) ) ) { // If there's a filter with non-supported op, stop now. diff --git a/packages/gatsby/src/redux/types.ts b/packages/gatsby/src/redux/types.ts index dce585b7f6970..b871c5bd76f0b 100644 --- a/packages/gatsby/src/redux/types.ts +++ b/packages/gatsby/src/redux/types.ts @@ -1,11 +1,21 @@ import { IProgram } from "../commands/types" -import { GraphQLSchema } from "graphql" +import { GraphQLFieldExtensionDefinition } from "../schema/extensions" +import { DocumentNode, GraphQLSchema } from "graphql" import { SchemaComposer } from "graphql-compose" type SystemPath = string type Identifier = string type StructuredLog = any // TODO this should come from structured log interface +export interface IRedirect { + fromPath: string + toPath: string + isPermanent?: boolean + redirectInBrowser?: boolean + // Users can add anything to this createRedirect API + [key: string]: any +} + export enum ProgramStatus { BOOTSTRAP_FINISHED = `BOOTSTRAP_FINISHED`, BOOTSTRAP_QUERY_RUNNING_FINISHED = `BOOTSTRAP_QUERY_RUNNING_FINISHED`, @@ -62,6 +72,15 @@ export interface IGatsbyNode { [key: string]: unknown } +export interface IGatsbyPlugin { + name: string + version: string +} + +export interface IGatsbyPluginContext { + [key: string]: (...args: any[]) => any +} + type GatsbyNodes = Map export interface IGatsbyState { @@ -140,7 +159,7 @@ export interface IGatsbyState { } webpack: any // TODO This should be the output from ./utils/webpack.config.js webpackCompilationHash: string - redirects: any[] // TODO + redirects: IRedirect[] babelrc: { stages: { develop: any // TODO @@ -214,6 +233,10 @@ export type ActionsUnion = | IQueryExtractionBabelErrorAction | ISetProgramStatusAction | IPageQueryRunAction + | IAddThirdPartySchema + | ICreateTypes + | ICreateFieldExtension + | IPrintTypeDefinitions export interface ICreatePageDependencyAction { type: `CREATE_COMPONENT_DEPENDENCY` @@ -242,7 +265,7 @@ export interface IReplaceComponentQueryAction { export interface IReplaceStaticQueryAction { type: `REPLACE_STATIC_QUERY` - plugin: Plugin | null | undefined + plugin: IGatsbyPlugin | null | undefined payload: { name: string componentPath: string @@ -254,28 +277,28 @@ export interface IReplaceStaticQueryAction { export interface IQueryExtractedAction { type: `QUERY_EXTRACTED` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: { componentPath: string; query: string } } export interface IQueryExtractionGraphQLErrorAction { type: `QUERY_EXTRACTION_GRAPHQL_ERROR` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: { componentPath: string; error: string } } export interface IQueryExtractedBabelSuccessAction { type: `QUERY_EXTRACTION_BABEL_SUCCESS` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: { componentPath: string } } export interface IQueryExtractionBabelErrorAction { type: `QUERY_EXTRACTION_BABEL_ERROR` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: { componentPath: string @@ -285,21 +308,71 @@ export interface IQueryExtractionBabelErrorAction { export interface ISetProgramStatusAction { type: `SET_PROGRAM_STATUS` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: ProgramStatus } export interface IPageQueryRunAction { type: `PAGE_QUERY_RUN` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: { path: string; componentPath: string; isPage: boolean } } export interface IRemoveStaleJobAction { type: `REMOVE_STALE_JOB_V2` - plugin: Plugin + plugin: IGatsbyPlugin traceId?: string payload: { contentDigest: string } } + +export interface IAddThirdPartySchema { + type: `ADD_THIRD_PARTY_SCHEMA` + plugin: IGatsbyPlugin + traceId?: string + payload: GraphQLSchema +} + +export interface ICreateTypes { + type: `CREATE_TYPES` + plugin: IGatsbyPlugin + traceId?: string + payload: DocumentNode | DocumentNode[] +} + +export interface ICreateFieldExtension { + type: `CREATE_FIELD_EXTENSION` + plugin: IGatsbyPlugin + traceId?: string + payload: { + name: string + extension: GraphQLFieldExtensionDefinition + } +} + +export interface IPrintTypeDefinitions { + type: `PRINT_SCHEMA_REQUESTED` + plugin: IGatsbyPlugin + traceId?: string + payload: { + path?: string + include?: { types?: Array; plugins?: Array } + exclude?: { types?: Array; plugins?: Array } + withFieldTypes?: boolean + } +} + +export interface ICreateResolverContext { + type: `CREATE_RESOLVER_CONTEXT` + plugin: IGatsbyPlugin + traceId?: string + payload: + | IGatsbyPluginContext + | { [camelCasedPluginNameWithoutPrefix: string]: IGatsbyPluginContext } +} + +export interface ICreateRedirectAction { + type: `CREATE_REDIRECT` + payload: IRedirect +} diff --git a/packages/gatsby/src/schema/__tests__/node-model.js b/packages/gatsby/src/schema/__tests__/node-model.js index cb736bab5680f..21222402b5bd3 100644 --- a/packages/gatsby/src/schema/__tests__/node-model.js +++ b/packages/gatsby/src/schema/__tests__/node-model.js @@ -296,7 +296,7 @@ describe(`NodeModel`, () => { filter: { frontmatter: { published: { eq: false } } }, } const firstOnly = true - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) const result = await nodeModel.runQuery({ query, firstOnly, @@ -311,7 +311,7 @@ describe(`NodeModel`, () => { filter: { frontmatter: { published: { eq: false } } }, } const firstOnly = false - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) const result = await nodeModel.runQuery({ query, firstOnly, @@ -328,7 +328,7 @@ describe(`NodeModel`, () => { filter: { frontmatter: { published: { eq: false } } }, } const firstOnly = false - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) await nodeModel.runQuery( { query, @@ -354,7 +354,7 @@ describe(`NodeModel`, () => { filter: { frontmatter: { published: { eq: false } } }, } const firstOnly = false - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) await nodeModel.withContext({ path: `/` }).runQuery({ query, firstOnly, @@ -377,7 +377,7 @@ describe(`NodeModel`, () => { filter: { frontmatter: { published: { eq: false } } }, } const firstOnly = false - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) await nodeModel.runQuery( { query, @@ -397,7 +397,7 @@ describe(`NodeModel`, () => { const type = `AllFiles` const query = {} const firstOnly = true - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) const result = nodeModel.runQuery({ query, firstOnly, @@ -412,7 +412,7 @@ describe(`NodeModel`, () => { const type = `TeamMember` const query = { name: { ne: null } } const firstOnly = true - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) const result = await nodeModel.runQuery({ query, firstOnly, @@ -429,7 +429,7 @@ describe(`NodeModel`, () => { }, } const firstOnly = false - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) const result = await nodeModel.runQuery({ query, firstOnly, @@ -448,7 +448,7 @@ describe(`NodeModel`, () => { }, } const firstOnly = true - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) const result = await nodeModel.runQuery({ query, firstOnly, @@ -457,6 +457,33 @@ describe(`NodeModel`, () => { expect(result).toBeDefined() expect(result.id).toEqual(`post2`) }) + + // FIXME: Filters on date instances are not supported yet + // SIFT requires such filters to be expressed as Date instances but we + // don't know if date is stored as `Date` instance or `string` + // so can't really do that + // See https://github.com/crcn/sift.js#date-comparison + it.skip(`queries date instances in nodes`, async () => { + const type = `Post` + const query = { + filter: { + frontmatter: { + date: { lte: `2018-01-01T00:00:00Z` }, + }, + }, + } + const firstOnly = false + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + const result = await nodeModel.runQuery({ + query, + firstOnly, + type, + }) + expect(result).toBeDefined() + expect(result.length).toEqual(2) + expect(result[0].id).toEqual(`post2`) + expect(result[1].id).toEqual(`post3`) + }) }) }) @@ -555,7 +582,7 @@ describe(`NodeModel`, () => { { desc: `no cache`, cb: () => null }, // Always goes through sift ].forEach(({ desc, cb: createFiltersCache }) => { it(`[${desc}] should not resolve prepared nodes more than once`, async () => { - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) await nodeModel.runQuery( { query: { filter: { betterTitle: { eq: `foo` } } }, @@ -566,7 +593,7 @@ describe(`NodeModel`, () => { ) expect(resolveBetterTitleMock.mock.calls.length).toBe(2) expect(resolveOtherTitleMock.mock.calls.length).toBe(0) - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) await nodeModel.runQuery( { query: { filter: { betterTitle: { eq: `foo` } } }, @@ -577,7 +604,7 @@ describe(`NodeModel`, () => { ) expect(resolveBetterTitleMock.mock.calls.length).toBe(2) expect(resolveOtherTitleMock.mock.calls.length).toBe(0) - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) await nodeModel.runQuery( { query: { @@ -590,7 +617,7 @@ describe(`NodeModel`, () => { ) expect(resolveBetterTitleMock.mock.calls.length).toBe(2) expect(resolveOtherTitleMock.mock.calls.length).toBe(2) - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) await nodeModel.runQuery( { query: { @@ -603,7 +630,7 @@ describe(`NodeModel`, () => { ) expect(resolveBetterTitleMock.mock.calls.length).toBe(2) expect(resolveOtherTitleMock.mock.calls.length).toBe(2) - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) await nodeModel.runQuery( { query: { @@ -619,7 +646,7 @@ describe(`NodeModel`, () => { }) it(`[${desc}] can filter by resolved fields`, async () => { - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) const result = await nodeModel.runQuery( { query: { @@ -769,7 +796,7 @@ describe(`NodeModel`, () => { ].forEach(({ desc, cb: createFiltersCache }) => { describe(`[${desc}] Tracks nodes returned by queries`, () => { it(`Tracks objects when running query without filter`, async () => { - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) const result = await nodeModel.runQuery({ query: {}, type: schema.getType(`Test`), @@ -786,7 +813,7 @@ describe(`NodeModel`, () => { }) it(`Tracks objects when running query with filter`, async () => { - nodeModel.replaceTypeKeyValueCache(createFiltersCache()) + nodeModel.replaceFiltersCache(createFiltersCache()) const result = await nodeModel.runQuery({ query: { filter: { diff --git a/packages/gatsby/src/schema/__tests__/run-query.js b/packages/gatsby/src/schema/__tests__/run-query.js index cad1d109f06e7..9c2d373a94790 100644 --- a/packages/gatsby/src/schema/__tests__/run-query.js +++ b/packages/gatsby/src/schema/__tests__/run-query.js @@ -216,9 +216,11 @@ function resetDb(nodes) { ) } +let nodesAfterLastRunQuery async function runQuery(queryArgs, filtersCache) { const nodes = makeNodes() resetDb(nodes) + nodesAfterLastRunQuery = nodes const { sc, type: gqlType } = makeGqlType(nodes) const args = { gqlType, @@ -248,12 +250,17 @@ it(`should use the cache argument`, async () => { // Confirm cache is not ignored expect(filtersCache.size === 1).toBe(true) - filtersCache.forEach((filterCache, cacheKey) => { + filtersCache.forEach(( + filterCache /*: FilterCache */, + cacheKey /*: FilterCacheKey */ + ) => { // This test will change when the composition of the FilterCache changes // For now it should be a Map of values to Set of nodes - expect(filterCache instanceof Map).toBe(true) + expect(filterCache instanceof Object).toBe(true) + expect(filterCache.byValue instanceof Map).toBe(true) + expect(filterCache.meta instanceof Object).toBe(true) // There ought to be at least one value mapped (probably more, shrug) - expect(filterCache.size >= 1).toBe(true) + expect(filterCache.byValue.size >= 1).toBe(true) }) }) @@ -367,8 +374,7 @@ it(`should use the cache argument`, async () => { let result = await runFilter({ hair: { lt: 2 } }) expect(result.length).toEqual(2) - expect(result[0].hair).toEqual(1) - expect(result[1].hair).toEqual(0) + result.forEach(r => expect(r.hair <= 2).toBe(true)) }) it(`handles lt operator with null`, async () => { @@ -383,9 +389,68 @@ it(`should use the cache argument`, async () => { it(`handles lte operator with number`, async () => { let result = await runFilter({ hair: { lte: 1 } }) - expect(result.length).toEqual(2) - expect(result[0].hair).toEqual(1) - expect(result[1].hair).toEqual(0) + let actual = nodesAfterLastRunQuery.reduce( + (acc, node) => (node.hair <= 1 ? acc + 1 : acc), + 0 + ) + + expect(actual).not.toBe(0) // Test should keep this invariant! + expect(result.length).toEqual(actual) + result.forEach(r => expect(r.hair <= 1).toBe(true)) + }) + + it(`should lte when value is lower than all found values`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ float: { lte: 1 } }) + + let actual = nodesAfterLastRunQuery.reduce( + (acc, node) => (node.float <= 1 ? acc + 1 : acc), + 0 + ) + + expect(actual).toEqual(0) // Make sure test nodes keep this invariant! + expect(result).toEqual(null) // Zero results yields null + }) + + it(`should lte when value is in the middle of all found values`, async () => { + let result = await runFilter({ float: { lte: 2 } }) + + let actual = nodesAfterLastRunQuery.reduce( + (acc, node) => (node.float <= 2 ? acc + 1 : acc), + 0 + ) + + expect(result.length).toEqual(actual) + result.forEach(r => expect(r.float <= 2).toBe(true)) + }) + + it(`should lte when value is higher than all found values`, async () => { + let result = await runFilter({ float: { lte: 5 } }) + + let actual = nodesAfterLastRunQuery.reduce( + (acc, node) => (node.float <= 5 ? acc + 1 : acc), + 0 + ) + + expect(result.length).toEqual(actual) + }) + + it.skip(`should lte when type coercion fails direct value lookup`, async () => { + // Here 1.5 exists but only as number. However, `1.5 <= '1.5' === true` + // This test checks whether we don't incorrectly assume that if the + // value wasn't mapped, that it can't be found. + let result = await runFilter({ float: { lte: `1.5` } }) + + let actual = nodesAfterLastRunQuery.reduce( + (acc, node) => (node.float <= 1.5 ? acc + 1 : acc), + 0 + ) + + expect(result).not.toBe(undefined) + expect(result).not.toBe(null) + expect(result.length).toEqual(actual) + result.forEach(r => expect(r.float <= 2).toBe(true)) }) it(`handles lte operator with null`, async () => { @@ -393,8 +458,14 @@ it(`should use the cache argument`, async () => { let result = await runFilter({ nil: { lte: null } }) + let actual = nodesAfterLastRunQuery.reduce( + (acc, node) => (node.nil <= null ? acc + 1 : acc), + 0 + ) + // lte null matches null but no nodes without the property (NULL) - expect(result.length).toEqual(1) + expect(actual).not.toBe(0) // Test should keep this invariant! + expect(result.length).toEqual(actual) expect(result[0].name).toEqual(`The Mad Wax`) expect(result[0].nil).toEqual(null) }) @@ -419,9 +490,14 @@ it(`should use the cache argument`, async () => { it(`handles gte operator with number`, async () => { let result = await runFilter({ hair: { gte: 1 } }) - expect(result.length).toEqual(2) - expect(result[0].hair).toEqual(1) - expect(result[1].hair).toEqual(2) + let actual = nodesAfterLastRunQuery.reduce( + (acc, node) => (node.hair >= 1 ? acc + 1 : acc), + 0 + ) + + expect(actual).not.toBe(0) // Test invariant should hold + expect(result.length).toEqual(actual) + result.forEach(r => expect(r.hair >= 1).toBe(true)) }) it(`handles gte operator with null`, async () => { @@ -429,10 +505,18 @@ it(`should use the cache argument`, async () => { let result = await runFilter({ nil: { gte: null } }) - // lte null matches null but no nodes without the property (NULL) - expect(result.length).toEqual(1) - expect(result[0].name).toEqual(`The Mad Wax`) - expect(result[0].nil).toEqual(null) + let actual = nodesAfterLastRunQuery.reduce( + (acc, node) => (node.nil >= null ? acc + 1 : acc), + 0 + ) + + // gte null matches null but no nodes without the property (NULL) + expect(actual).not.toBe(0) // Test invariant should hold + expect(result.length).toEqual(actual) + result.forEach( + // Note: confirm no `null` is returned for >= null + r => expect(r.nil === null).toBe(true) + ) }) it(`handles the regex operator without flags`, async () => { @@ -590,6 +674,9 @@ it(`should use the cache argument`, async () => { }) expect(result.length).toEqual(1) + expect( + result[0]?.singleElem?.things.some(e => e?.one?.two?.three === 123) + ).toEqual(true) }) it(`handles the elemMatch operator on the second element`, async () => { @@ -803,12 +890,14 @@ it(`should use the cache argument`, async () => { // nodes that do not have the field at all (NULL). expect(result.length).toEqual(2) - result.forEach(edge => { - // Either does not exist or does not contain - expect(edge.anArray === undefined || !edge.anArray.includes(5)).toBe( - true - ) - }) + // Either does not exist or does not contain + result + .filter(edge => edge.anArray !== undefined) + .forEach(edge => { + // In this test, if the property exists it should be an array + expect(Array.isArray(edge.anArray)).toBe(true) + expect(edge.anArray.includes(5)).toBe(false) + }) }) it(`handles the nin operator for array [null]`, async () => { diff --git a/packages/gatsby/src/schema/node-model.js b/packages/gatsby/src/schema/node-model.js index a7c7fd8853afd..638378b0719b2 100644 --- a/packages/gatsby/src/schema/node-model.js +++ b/packages/gatsby/src/schema/node-model.js @@ -71,7 +71,7 @@ class LocalNodeModel { this._prepareNodesQueues = {} this._prepareNodesPromises = {} this._preparedNodesCache = new Map() - this.replaceTypeKeyValueCache() + this.replaceFiltersCache() } /** @@ -84,7 +84,7 @@ class LocalNodeModel { * actually queried. If the filter targets `id` directly, only one Node is * cached instead of a Set of Nodes. If null, don't create or use a cache. */ - replaceTypeKeyValueCache(map = new Map()) { + replaceFiltersCache(map = new Map()) { this._filtersCache = map // See redux/nodes.js for usage } diff --git a/packages/gatsby/src/types.ts b/packages/gatsby/src/types.ts index 1835e473fc1dc..66727d89500b5 100644 --- a/packages/gatsby/src/types.ts +++ b/packages/gatsby/src/types.ts @@ -2,7 +2,7 @@ export interface IMatch { id: string context: { sourceMessage: string - [key: string]: string + [key: string]: string | boolean } error?: Error | undefined [key: string]: unknown diff --git a/packages/gatsby/src/utils/webpack.config.js b/packages/gatsby/src/utils/webpack.config.js index a37c90f315bfc..993055a638cda 100644 --- a/packages/gatsby/src/utils/webpack.config.js +++ b/packages/gatsby/src/utils/webpack.config.js @@ -530,10 +530,13 @@ module.exports = async ( reuseExistingChunk: true, }, commons: { + // only bundle non-async modules + chunks: `initial`, name: `commons`, - // if a chunk is used on all components we put it in commons - minChunks: componentsCount, + // if a chunk is used on all components we put it in commons (we need at least 2 components) + minChunks: Math.max(componentsCount, 2), priority: 20, + reuseExistingChunk: true, }, // If a chunk is used in at least 2 components we create a separate chunk shared: { diff --git a/scripts/check-ts.js b/scripts/check-ts.js index 9aca0ca9641d9..68df1d3b4e551 100644 --- a/scripts/check-ts.js +++ b/scripts/check-ts.js @@ -15,7 +15,8 @@ const execa = require(`execa`) console.log(`TS Check: Running...`) -const PACKAGES_DIR = path.resolve(__dirname, `../packages`) +const toAbsolutePath = relativePath => path.join(__dirname, `..`, relativePath) +const PACKAGES_DIR = toAbsolutePath(`/packages`) const filterPackage = yargs.argv._[0] @@ -59,9 +60,34 @@ if (filterPackage) { } } +let totalTsFiles = 0 +let totalJsFiles = 0 + packagesWithTs.forEach(project => { + const tsFiles = glob.sync( + toAbsolutePath( + `./packages/${project.split(/.*packages[/\\]/)[1]}/src/**/*.ts` + ) + ).length + + const jsFiles = glob.sync( + toAbsolutePath( + `./packages/${project.split(/.*packages[/\\]/)[1]}/src/**/*.js` + ) + ).length + + totalTsFiles += tsFiles + totalJsFiles += jsFiles + + const percentConverted = Number( + ((tsFiles / (jsFiles + tsFiles)) * 100).toFixed(1) + ) + console.log( - `TS Check: Checking ./packages/${project.split(/.*packages[/\\]/)[1]}` + `TS Check: Checking ./packages/${project.split(/.*packages[/\\]/)[1]}`, + `\n - TS Files: ${tsFiles}`, + `\n - JS Files: ${jsFiles}`, + `\n - Percent Converted: ${percentConverted}%` ) const args = [ @@ -84,3 +110,15 @@ packagesWithTs.forEach(project => { }) console.log(`TS Check: Success`) + +if (!filterPackage) { + const percentConverted = Number( + ((totalTsFiles / (totalJsFiles + totalTsFiles)) * 100).toFixed(1) + ) + + console.log( + ` - Total TS Files: ${totalJsFiles}`, + `\n - Total JS Files: ${totalJsFiles}`, + `\n - Percent Converted: ${percentConverted}%` + ) +} diff --git a/scripts/e2e-test.sh b/scripts/e2e-test.sh index 1532bb95a2f0c..509f900a73c54 100755 --- a/scripts/e2e-test.sh +++ b/scripts/e2e-test.sh @@ -3,7 +3,7 @@ SRC_PATH=$1 CUSTOM_COMMAND="${2:-yarn test}" GATSBY_PATH="${CIRCLE_WORKING_DIRECTORY:-../../}" -# cypress docker does not support sudo and do not need it but the default node executor does +# cypress docker does not support sudo and does not need it, but the default node executor does command -v sudo && sudo npm install -g gatsby-dev-cli || npm install -g gatsby-dev-cli && # setting up child integration test link to gatsby packages diff --git a/scripts/i18n/README.md b/scripts/i18n/README.md index 6b7654ccce566..8211f63214653 100644 --- a/scripts/i18n/README.md +++ b/scripts/i18n/README.md @@ -70,3 +70,13 @@ When run, the script will: - Pulls the latest version of `gatsby-i18n-source`. - Creates a "sync" pull request that updates all files that do not contain conflicts from the merge. - Creates a "conflicts" pull request that contains all merge conflicts, with instructions on how to resolve them. + +### `run-all` + +Usage: + +```shell +yarn run-all [script name] +``` + +The `run-all` script runs the script provided in the argument across all languages for which there are translations of gatbyjs.org, listed in /www/src/i18n.json. diff --git a/scripts/i18n/package.json b/scripts/i18n/package.json index b8795edc8fef0..1c60f22081e4f 100644 --- a/scripts/i18n/package.json +++ b/scripts/i18n/package.json @@ -4,6 +4,7 @@ "description": "Scripts for gatsby internationalization", "scripts": { "create": "node ./create.js", + "run-all": "node ./run-all.js", "sync": "node ./sync.js", "update-source": "node ./update-source.js" }, diff --git a/scripts/i18n/run-all.js b/scripts/i18n/run-all.js new file mode 100644 index 0000000000000..3cac7e235ac4a --- /dev/null +++ b/scripts/i18n/run-all.js @@ -0,0 +1,21 @@ +// Run the provided script on all valid repos +const fs = require(`fs`) +const log4js = require(`log4js`) +const shell = require(`shelljs`) +let logger = log4js.getLogger(`run-all`) + +require(`dotenv`).config() + +function runAll(script) { + if (!script) { + logger.error(`Usage: yarn run-all `) + process.exit(1) + } + const langs = JSON.parse(fs.readFileSync(`../../www/i18n.json`)) + for (const { code } of langs) { + shell.exec(`yarn ${script} ${code}`) + } +} + +const [script] = process.argv.slice(2) +runAll(script) diff --git a/scripts/i18n/sync.js b/scripts/i18n/sync.js index fed6e12634fc8..a59690c70fda6 100644 --- a/scripts/i18n/sync.js +++ b/scripts/i18n/sync.js @@ -1,11 +1,12 @@ const log4js = require(`log4js`) const shell = require(`shelljs`) -const { graphql } = require(`@octokit/graphql`) +const { graphql: baseGraphql } = require(`@octokit/graphql`) let logger = log4js.getLogger(`sync`) require(`dotenv`).config() -const host = `https://github.com` +const token = process.env.GITHUB_API_TOKEN +const host = `https://${token}@github.com` const cacheDir = `.cache` const owner = `gatsbyjs` const repoBase = `gatsby` @@ -14,6 +15,7 @@ const sourceRepo = `gatsby-i18n-source` const sourceRepoUrl = `${host}/${owner}/${sourceRepo}` const sourceRepoGitUrl = `${sourceRepoUrl}.git` +const syncLabelName = `sync` // get the git short hash function getShortHash(hash) { @@ -33,25 +35,68 @@ function cloneOrUpdateRepo(repoName, repoUrl) { } } +// Run the query and exit if there are errors +async function graphql(query, params) { + const graphqlWithAuth = baseGraphql.defaults({ + headers: { + authorization: `token ${token}`, + }, + }) + try { + return await graphqlWithAuth(query, params) + } catch (error) { + logger.error(error.message) + return process.exit(1) + } +} + async function getRepository(owner, name) { const { repository } = await graphql( ` - query($owner: String!, $name: String!) { + query($owner: String!, $name: String!, $syncLabel: String!) { repository(owner: $owner, name: $name) { id + syncPullRequests: pullRequests(labels: [$syncLabel], first: 1) { + nodes { + id + } + } + syncLabel: label(name: $syncLabel) { + id + } } } `, { - headers: { - authorization: `token ${process.env.GITHUB_ADMIN_AUTH_TOKEN}`, - }, owner, name, + syncLabel: syncLabelName, } ) return repository } + +async function createLabel(input) { + const { createLabel } = await graphql( + ` + mutation($input: CreateLabelInput!) { + createLabel(input: $input) { + label { + id + } + } + } + `, + { + headers: { + accept: `application/vnd.github.bane-preview+json`, + }, + input, + } + ) + return createLabel.label +} + async function createPullRequest(input) { const { createPullRequest } = await graphql( ` @@ -66,7 +111,6 @@ async function createPullRequest(input) { `, { headers: { - authorization: `token ${process.env.GITHUB_BOT_AUTH_TOKEN}`, accept: `application/vnd.github.shadow-cat-preview+json`, }, input, @@ -75,6 +119,27 @@ async function createPullRequest(input) { return createPullRequest.pullRequest } +async function addLabelToPullRequest(pullRequest, label) { + await graphql( + ` + mutation($input: AddLabelsToLabelableInput!) { + addLabelsToLabelable(input: $input) { + clientMutationId + } + } + `, + { + headers: { + accept: `application/vnd.github.bane-preview+json`, + }, + input: { + labelableId: pullRequest.id, + labelIds: [label.id], + }, + } + ) +} + function conflictPRBody(conflictFiles, comparisonUrl, prNumber) { return ` Sync conflicts with the source repo. Please update the translations based on updated source content. @@ -126,7 +191,32 @@ async function syncTranslationRepo(code) { shell.exec(`git remote add source ${sourceRepoGitUrl}`) shell.exec(`git fetch source master`) - // TODO don't run the sync script if there is a current PR from the bot + const repository = await getRepository(owner, transRepoName) + + if (repository.syncPullRequests.nodes.length > 0) { + logger.info( + `There are currently open sync pull requests. Please ask the language maintainers to merge the existing PR(s) in before opening another one. Exiting...` + ) + process.exit(0) + } + + logger.info(`No currently open sync pull requests.`) + + let syncLabel + if (!repository.syncLabel) { + logger.info( + `Repository does not have a "${syncLabelName}" label. Creating one...` + ) + syncLabel = await createLabel({ + repositoryId: repository.id, + name: syncLabelName, + description: `Sync with translation source. Used by @gatsbybot to track open sync pull requests.`, + color: `fbca04`, + }) + } else { + logger.info(`Repository has an existing ${syncLabelName} label.`) + syncLabel = repository.syncLabel + } // TODO exit early if this fails // Compare these changes @@ -152,15 +242,13 @@ async function syncTranslationRepo(code) { // Remove files that are deleted by upstream // https://stackoverflow.com/a/54232519 shell.exec(`git diff --name-only --diff-filter=U | xargs git rm`) - shell.exec(`git ci --no-edit`) + shell.exec(`git commit --no-edit`) shell.exec(`git push -u origin ${syncBranch}`) - const repository = await getRepository(owner, transRepoName) - logger.info(`Creating sync pull request`) // TODO if there is already an existing PR, don't create a new one and exit early - const { number: syncPRNumber } = await createPullRequest({ + const syncPR = await createPullRequest({ repositoryId: repository.id, baseRefName: `master`, headRefName: syncBranch, @@ -168,6 +256,7 @@ async function syncTranslationRepo(code) { body: syncPRBody(), maintainerCanModify: true, }) + await addLabelToPullRequest(syncPR, syncLabel) // if we successfully publish the PR, pull again and create a new PR -- shell.exec(`git checkout master`) @@ -235,15 +324,16 @@ async function syncTranslationRepo(code) { logger.info(`Creating conflicts pull request`) // TODO assign codeowners as reviewers - await createPullRequest({ + const conflictsPR = await createPullRequest({ repositoryId: repository.id, baseRefName: `master`, headRefName: conflictBranch, title: `(sync) Resolve conflicts with ${sourceRepo} @ ${shortHash}`, - body: conflictPRBody(conflictFiles, comparisonUrl, syncPRNumber), + body: conflictPRBody(conflictFiles, comparisonUrl, syncPR.number), maintainerCanModify: true, draft: true, }) + await addLabelToPullRequest(conflictsPR, syncLabel) } const [langCode] = process.argv.slice(2) diff --git a/starters/hello-world/gatsby-config.js b/starters/hello-world/gatsby-config.js index 823925db4c737..4172a129ff405 100644 --- a/starters/hello-world/gatsby-config.js +++ b/starters/hello-world/gatsby-config.js @@ -6,4 +6,5 @@ module.exports = { /* Your site config here */ + plugins: [] } diff --git a/www/gatsby-config.js b/www/gatsby-config.js index 4bed679173af9..2e4add583a1fe 100644 --- a/www/gatsby-config.js +++ b/www/gatsby-config.js @@ -2,7 +2,7 @@ const path = require(`path`) require(`dotenv`).config({ path: `.env.${process.env.NODE_ENV}`, }) -const { langCodes } = require(`./src/utils/i18n`) +const { i18nEnabled, langCodes } = require(`./src/utils/i18n`) const GA = { identifier: `UA-93349937-5`, @@ -53,12 +53,8 @@ if (process.env.AIRTABLE_API_KEY) { }) } -// true if `env.LOCALES` has a defined list of languages -if (langCodes.length > 0) { - const naughtyFiles = [ - `docs/docs/graphql-api.md`, - `docs/docs/data-fetching.md`, - ] +if (i18nEnabled) { + const naughtyFiles = [`docs/docs/data-fetching.md`] dynamicPlugins.push( ...langCodes.map(code => ({ resolve: `gatsby-source-git`, diff --git a/www/gatsby-node.js b/www/gatsby-node.js index 84f4ee66c88a5..643f31fd51806 100644 --- a/www/gatsby-node.js +++ b/www/gatsby-node.js @@ -5,6 +5,7 @@ const child_process = require(`child_process`) const startersRedirects = require(`./starter-redirects.json`) const yaml = require(`js-yaml`) const redirects = yaml.load(fs.readFileSync(`./redirects.yaml`)) +const { i18nEnabled } = require(`./src/utils/i18n`) const docs = require(`./src/utils/node/docs.js`) const showcase = require(`./src/utils/node/showcase.js`) @@ -41,7 +42,7 @@ exports.onCreateNode = helpers => { exports.onPostBootstrap = () => { // Compile language strings if locales are enabled - if (!!process.env.LOCALES) { + if (i18nEnabled) { child_process.execSync(`yarn lingui:build`) } } diff --git a/www/src/components/I18nContext.js b/www/src/components/I18nContext.js index 8ba94e01e938b..67a21224916e4 100644 --- a/www/src/components/I18nContext.js +++ b/www/src/components/I18nContext.js @@ -1,5 +1,5 @@ import React from "react" -import { defaultLang } from "../utils/i18n" +import { i18nEnabled, defaultLang } from "../utils/i18n" import { I18nProvider as LinguiProvider } from "@lingui/react" // Lingui doesn't give access to the locale, so we need our own provider @@ -7,7 +7,7 @@ import { I18nProvider as LinguiProvider } from "@lingui/react" const LocaleContext = React.createContext(defaultLang) export function I18nProvider({ locale = defaultLang, children }) { - const catalog = !!process.env.LOCALES + const catalog = i18nEnabled ? require(`../data/locales/${locale}/messages.js`) : {} return ( diff --git a/www/src/components/events/__tests__/event-list.js b/www/src/components/events/__tests__/event-list.js index 9ccbdbb16c15f..9c67be82a8732 100644 --- a/www/src/components/events/__tests__/event-list.js +++ b/www/src/components/events/__tests__/event-list.js @@ -27,7 +27,8 @@ describe(``, () => { expect(getByText(`No events are scheduled right now.`)).toBeVisible() }) - it(`splits upcoming and past events`, () => { + + it.skip(`splits upcoming and past events`, () => { const { getByText } = render( `, () => { dateNowSpy.mockRestore() }) - it(`display's today's events as upcoming`, () => { + it.skip(`display's today's events as upcoming`, () => { const { getByText } = render( ) diff --git a/www/src/components/events/event-list.js b/www/src/components/events/event-list.js index 09b17c4abc7a3..104c4d0b4e8d0 100644 --- a/www/src/components/events/event-list.js +++ b/www/src/components/events/event-list.js @@ -5,9 +5,9 @@ import Event from "./event" export default function EventList({ events }) { const endOfDay = date => new Date(date).setHours(23, 59, 59, 999) - const upcoming = events.nodes.filter( - event => endOfDay(event.data.date) >= Date.now() - ) + // const upcoming = events.nodes.filter( + // event => endOfDay(event.data.date) >= Date.now() + // ) const past = events.nodes .filter(event => endOfDay(event.data.date) < Date.now()) @@ -15,14 +15,15 @@ export default function EventList({ events }) { return events.nodes.length > 0 ? ( <> -

Upcoming Events

+ // Temporarily removing during COVID + {/*

Upcoming Events

    {upcoming.map(event => (
  • ))} -
+ */}

Past Events

    {past.map(event => ( diff --git a/www/src/components/navigation-mobile.js b/www/src/components/navigation-mobile.js index 5887cf22178d6..b2c6121059a70 100644 --- a/www/src/components/navigation-mobile.js +++ b/www/src/components/navigation-mobile.js @@ -75,7 +75,7 @@ const MobileNavigation = ({ i18n }) => ( ))} diff --git a/www/src/components/sidebar/section-title.js b/www/src/components/sidebar/section-title.js index 47cab4712da49..e6b533594c298 100644 --- a/www/src/components/sidebar/section-title.js +++ b/www/src/components/sidebar/section-title.js @@ -138,9 +138,11 @@ const SplitButton = withI18n()(