diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index fbfad5f96..f28b0de8c 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -12,4 +12,4 @@ ## Is there a release notes update needed for this change?: -## Additional documentation: \ No newline at end of file +## Additional documentation: diff --git a/.github/SUPPORT.md b/.github/SUPPORT.md index 8d74d941f..73b3c774d 100644 --- a/.github/SUPPORT.md +++ b/.github/SUPPORT.md @@ -1,4 +1,3 @@ - # Looking for support? We want to help Please post your question to X (#) using the Timber tag. diff --git a/.storybook/main.ts b/.storybook/main.ts index f7066d01f..e0c6acb07 100644 --- a/.storybook/main.ts +++ b/.storybook/main.ts @@ -1,16 +1,16 @@ -import { dirname, join } from "path"; +import { dirname, join } from 'path' import type { StorybookConfig } from '@storybook/react-vite' const config: StorybookConfig = { stories: ['../src/**/*.mdx', '../src/**/*.stories.@(js|jsx|ts|tsx)'], addons: [ - getAbsolutePath("@storybook/addon-links"), - getAbsolutePath("@storybook/addon-essentials"), - getAbsolutePath("@storybook/addon-interactions"), - getAbsolutePath("@storybook/addon-a11y") + getAbsolutePath('@storybook/addon-links'), + getAbsolutePath('@storybook/addon-essentials'), + getAbsolutePath('@storybook/addon-interactions'), + getAbsolutePath('@storybook/addon-a11y') ], framework: { - name: getAbsolutePath("@storybook/react-vite"), + name: getAbsolutePath('@storybook/react-vite'), options: {} }, docs: { @@ -21,5 +21,5 @@ const config: StorybookConfig = { export default config function getAbsolutePath(value: string): any { - return dirname(require.resolve(join(value, "package.json"))); + return dirname(require.resolve(join(value, 'package.json'))) } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 43f6b1790..8b967beb0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -132,6 +132,7 @@ We use a [CODEOWNERS](https://github.com/iqss/dataverse-frontend/blob/master/.gi By contributing code you are granting _(`Dataverse`)_ shared ownership of your work. You still own it but _(`Dataverse`)_ will have the right to relicense your work based on our needs & treat this work as if it was developed by a _(`Dataverse`)_ engineer. ## Browser support + + For more information and a list of additional resources, please visit this [discussion](#). --- @@ -82,6 +85,7 @@ For more information and a list of additional resources, please visit this [disc
  • Running the Project Locally
  • +
  • Writing test cases
  • Deployment
  • @@ -112,7 +116,6 @@ The Dataverse Project is an open source web application to share, preserve, cite The Dataverse Frontend repository is an initiative undertaken in 2023 to modernize the UI and design of the Dataverse Project by creating a stand-alone interface to allow users and organizations to implement their own Dataverse installations and utilize the JavaScript framework of their choice. - **The goals of Dataverse Frontend:** - Modernize the application @@ -134,7 +137,7 @@ The Dataverse Frontend repository is an initiative undertaken in 2023 to moderni - Cypress testing automation - Storybook for UI Component Library -*** +--- ### Beta Testing Environment @@ -148,9 +151,8 @@ Environment updates are carried out automatically through GitHub actions, presen The environment is accessible through the following URLs: - - Dataverse Frontend SPA: [beta.dataverse.org/spa][dv_app_beta_spa_url] - - Dataverse JSF: [beta.dataverse.org][dv_app_beta_legacyjsf_url] - +- Dataverse Frontend SPA: [beta.dataverse.org/spa][dv_app_beta_spa_url] +- Dataverse JSF: [beta.dataverse.org][dv_app_beta_legacyjsf_url] ### How Existing Dataverse Installations May Be Affected @@ -161,40 +163,38 @@ The environment is accessible through the following URLs:
    Changes from the original Dataverse JSF application - - >### Changes From the Style Guide - > - >The design system and frontend in this repo are inspired by the Dataverse Project [Style Guide](https://guides.dataverse.org/en/latest/style/index.html), but the following changes have been made, especially for accessibility. - > - >#### Links - > - >We added an underline to links to make them accessible. - > - >#### File Labels - > - >Now we are using Bootstrap with a theme, so there is only one definition for the secondary color. Since Bootstrap applies - the secondary color to the labels automatically, the color of the file label is now the global secondary color which is - a lighter shade of grey than what it used to be. - > - >We changed the citation block to be white with a colored border, to make the text in the box more accessible. - > - >### Changes in Functionality & Behavior - > - >Our main goal is to replicate the behavior of the original JSF application in all its functionalities, although during development we have found opportunities to review certain behaviors and apply changes where we find appropriate. - > - >#### Dataset Files Tab Search - > - >The original Dataset JSF page uses Solr to search for files based on the available filters. Past dataset versions are not indexed in Solr, so the filter option is not available (hidden) for such versions. When a version is indexed, the search text is searched in Solr, and Solr grammar can be applied. When the version is not indexed, the search text is searched in the database. - > - >The new SPA does not use Solr as the API endpoint it uses performs all queries on the database. Filters and search options are available for all versions in the same way, homogenizing behavior, although losing the possibility of using the Solr grammar. - > - >The decision of this change is made on the assumption that Solr may not be required in the context of files tab search, whose search facets are reduced compared to other in-application searches. Therefore, if we find evidence that the assumption is incorrect, we will work on extending the search capabilities to support Solr. +> ### Changes From the Style Guide +> +> The design system and frontend in this repo are inspired by the Dataverse Project [Style Guide](https://guides.dataverse.org/en/latest/style/index.html), but the following changes have been made, especially for accessibility. +> +> #### Links +> +> We added an underline to links to make them accessible. +> +> #### File Labels +> +> Now we are using Bootstrap with a theme, so there is only one definition for the secondary color. Since Bootstrap applies +> the secondary color to the labels automatically, the color of the file label is now the global secondary color which is +> a lighter shade of grey than what it used to be. +> +> We changed the citation block to be white with a colored border, to make the text in the box more accessible. +> +> ### Changes in Functionality & Behavior +> +> Our main goal is to replicate the behavior of the original JSF application in all its functionalities, although during development we have found opportunities to review certain behaviors and apply changes where we find appropriate. +> +> #### Dataset Files Tab Search +> +> The original Dataset JSF page uses Solr to search for files based on the available filters. Past dataset versions are not indexed in Solr, so the filter option is not available (hidden) for such versions. When a version is indexed, the search text is searched in Solr, and Solr grammar can be applied. When the version is not indexed, the search text is searched in the database. +> +> The new SPA does not use Solr as the API endpoint it uses performs all queries on the database. Filters and search options are available for all versions in the same way, homogenizing behavior, although losing the possibility of using the Solr grammar. +> +> The decision of this change is made on the assumption that Solr may not be required in the context of files tab search, whose search facets are reduced compared to other in-application searches. Therefore, if we find evidence that the assumption is incorrect, we will work on extending the search capabilities to support Solr.

    (back to top)

    - ## Getting Started _To get a local copy up and running follow these simple example steps._ @@ -207,19 +207,20 @@ _To get a local copy up and running follow these simple example steps._ 2. **Docker**: We use Docker Desktop, but the Docker CLI will also work. 3. **Create a Copy of .npmrc**: In the project directory root, duplicate `.npmrc.example`, saving the copy as `.npmrc`. - > ```bash - > # root project directory - > cp .npmrc.example .npmrc - > ``` + > ```bash + > # root project directory + > cp .npmrc.example .npmrc + > ``` 4. **Create and Add Your Npm and GitHub Tokens**: In order to connect with the Dataverse API, developers will need to install [@iqss/dataverse-client-javascript][dv_repo_dvclientjs_npm_url] from the GitHub registry by following the steps outlined below. Read more about access tokens on [GitHub Docs][dv_docs_github_userauthtoken_url]. - > **Getting a GitHub Token** - > 1. Go to your GitHub [Personal Access Tokens][dv_docs_github_token_url] settings - > 2. Select `Generate new token (classic)` - > 3. Give the token a name and select scope `read:packages` - > 4. Copy the generated token and replace the string `YOUR_GITHUB_AUTH_TOKEN` in the previously created `.npmrc` file. - > Now, you should be able to install the [Dataverse JavaScript][dv_repo_dvclientjs_url] client using npm. + > **Getting a GitHub Token** + > + > 1. Go to your GitHub [Personal Access Tokens][dv_docs_github_token_url] settings + > 2. Select `Generate new token (classic)` + > 3. Give the token a name and select scope `read:packages` + > 4. Copy the generated token and replace the string `YOUR_GITHUB_AUTH_TOKEN` in the previously created `.npmrc` file. + > Now, you should be able to install the [Dataverse JavaScript][dv_repo_dvclientjs_url] client using npm. Afterwards, your .npmrc file should resemble the following: @@ -232,6 +233,7 @@ legacy-peer-deps=true //npm.pkg.github.com/:_authToken= @iqss:registry=https://npm.pkg.github.com/ ``` +
    ### Installation & Setup @@ -243,12 +245,12 @@ legacy-peer-deps=true npm install ``` + > [!WARNING] > You may see a message about vulnerabilities after running this command. > > Please check this announcement from Create React App repository [facebook/create-react-app#11174][_uses_repo_cra_error_url]. These vulnerabilities will not be included in the production build since they come from libraries only used during development. - 2. Build the UI Library, needed for running the application. ```bash @@ -256,8 +258,6 @@ npm install cd packages/design-system && npm run build ``` - - **Running & Building the App:** Run the app in the development mode. Open [http://localhost:5173][dv_app_localhost_build_url] to view it in your browser. @@ -270,7 +270,6 @@ npm start The application will actively watch the directory for changes and reload when changes are saved. You may also see any existing linting errors in the console. - ```bash # root project directory @@ -281,6 +280,7 @@ npm run build npm run preview ``` +
    **Storybook:** @@ -288,6 +288,7 @@ npm run preview Runs the Storybook in the development mode. There are 2 Storybook instances, one for the general Design System and one for the Dataverse Frontend component specifications. Both should be started automatically and available at: + - Dataverse Frontend Storybook: [http://localhost:6006/][dv_app_localhost_storybook_url] - Dataverse Design System Storybook: [http://localhost:6007/][dv_app_localhost_designsystem_url] @@ -303,6 +304,7 @@ npm run build-storybook ``` Note that both Storybook instances are also published to Chromatic as part of the build and merge processes, located at: + - [DataverseFrontend-Chromatic](https://www.chromatic.com/builds?appId=646f68aa9beb01b35c599acd) - [DataverseDesignSystem-Chromatic](https://www.chromatic.com/builds?appId=646fbe232a8d3b501a1943f3) @@ -325,44 +327,43 @@ npm run cy:open-e2e [cy:open-unit] npm run test:coverage ``` +
    -Using `grep` with Cypress +Running specific tests ->The project includes [@cypress/grep][_uses_lib_grep_url] for running specific tests. -> ->Some examples used by the grep library are below for reference: +> The project includes [@cypress/grep][_uses_lib_grep_url] for running specific tests. > ->```bash -># root project directory +> Some examples used by the grep library are below for reference: > -># run only the tests with "auth user" in the title ->$ npx cypress run --env grep="auth user" +> ```bash +> # root project directory > -># run tests with "hello" or "auth user" in their titles by separating them with ";" character ->$ npx cypress run --env grep="hello; auth user" ->``` +> # run only the tests with "auth user" in the title +> $ npx cypress run --env grep="auth user" > ->To target specific tests, add `--spec` argument +> # run tests with "hello" or "auth user" in their titles by separating them with ";" character +> $ npx cypress run --env grep="hello; auth user" +> ``` > ->```bash -># root project directory +> To target specific tests, add `--spec` argument > ->$ npx cypress run --env grep="loads the restricted files when the user is logged in as owner" ->\ --spec tests/e2e-integration/e2e/sections/dataset/Dataset.spec.tsx ->``` +> ```bash +> # root project directory > +> $ npx cypress run --env grep="loads the restricted files when the user is logged in as owner" +> \ --spec tests/e2e-integration/e2e/sections/dataset/Dataset.spec.tsx +> ``` > ->**Repeat and burn tests** ->You can run the selected tests multiple times by using the `burn=N` parameter. For example, run all all the tests in the spec A five times using: +> **Repeat and burn tests** +> You can run the selected tests multiple times by using the `burn=N` parameter. For example, run all all the tests in the spec A five times using: > ->```bash -># root project directory -> ->$ npx cypress run --env burn=5 --spec tests/e2e-integration/e2e/sections/dataset/Dataset.spec.tsx ->``` +> ```bash +> # root project directory > +> $ npx cypress run --env burn=5 --spec tests/e2e-integration/e2e/sections/dataset/Dataset.spec.tsx +> ```
    @@ -387,7 +388,6 @@ Launch the prettier formatter. We recommend you to configure your IDE to run prettier on save. See the official IDE setups used by the IQSS team at [vscode-settings][dv_repo_vscode_url] on GitHub. - ```bash # root project directory @@ -421,11 +421,13 @@ $ ./run-env.sh # Removes the project and its dependencies $ ./rm-env.sh ``` + Note that the image tag in the docker registry must to be pre pushed, otherwise the script will fail. You can find the existing tags on DockerHub [@gdcc/dataverse][dv_app_docker_image_url] If you are running the script for the first time, it may take a while, since npm has to install all project dependencies. This can also happen if you added new dependencies to `package.json`, or used the _uninstall_ script to remove current project files and shut down any running containers. Once the script has finished, you will be able to access Dataverse via: + - Dataverse SPA Frontend: [http://localhost:8000/spa][dv_app_localhost_spa_url] - Dataverse JSF Application: [http://localhost:8000][dv_app_localhost_legacy_url] @@ -442,10 +444,324 @@ If you want to add test data (collections and datasets) to the Dataverse instanc $ ./add-env-data.sh ``` + Note: The above command uses the [dataverse-sample-data][dv_repo_dvsampledata_url] repository whose scripts occasionally fail, so some of the test data may not be added.
    +## Writing test cases + +Testing is a crucial part of the SPA development. Our React application employs three main types of tests: Unit tests, +Integration tests, and End-to-End (e2e) tests. In this section, we'll describe each type of test and how to implement them. + +### 1. **Unit Tests or Component tests:** + +Unit tests are designed to test individual React components in isolation. In our approach, we focus on testing components +from the user's perspective, following the principles of the [React Testing Library](https://testing-library.com/docs/react-testing-library/intro/). +This means: + +- **Test What Users See:** Focus on the output of the components, such as text, interactions, and the DOM, rather than + internal implementation details like classes or functions. +- **Exceptions to the Rule:** In complex scenarios or where performance is a critical factor, we might test implementation + details to ensure a repository is correctly called, for example. However, these cases are exceptions, and the primary + goal remains on user-centric testing. +- **Avoid Testing Implementation Details:** Avoid testing implementation details like internal state or methods, as these + tests are more brittle and less valuable than user-centric tests. +- **Mocking:** We use mocks to isolate components from their dependencies, ensuring that tests are focused on the component + itself and not on its dependencies. +- **Tools and Frameworks:** We use [Cypress Component testing](https://docs.cypress.io/guides/component-testing/overview) + alongside React Testing Library to render components in isolation and test their behavior. + +
    +Example of a Unit Test + +```javascript +//tests/component/sections/home/Home.spec.tsx + +import { Home } from '../../../../src/sections/home/Home' +import { DatasetRepository } from '../../../../src/dataset/domain/repositories/DatasetRepository' +import { DatasetPreviewMother } from '../../dataset/domain/models/DatasetPreviewMother' + +const datasetRepository: DatasetRepository = {} as DatasetRepository +const totalDatasetsCount = 10 +const datasets = DatasetPreviewMother.createMany(totalDatasetsCount) +describe('Home page', () => { + beforeEach(() => { + datasetRepository.getAll = cy.stub().resolves(datasets) + datasetRepository.getTotalDatasetsCount = cy.stub().resolves(totalDatasetsCount) + }) + + it('renders Root title', () => { + cy.customMount() + cy.findByRole('heading').should('contain.text', 'Root') + }) +}) +``` + +
    + +### 2. **Integration Tests:** + +Test the integration of the SPA with external systems, such as APIs, third-party +libraries (like js-dataverse), or databases. This ensures that the application communicates correctly with outside +resources and services. + +- **External Integrations:** Test the integration of the SPA with external systems, such as APIs, third-party + libraries (like [js-dataverse](https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript/97068340)), or databases. +- **Tools and Frameworks:** We use Cypress for integration tests, to test the repository's implementation. + +
    +Example of an Integration Test + +```javascript +//tests/e2e-integration/integration/datasets/DatasetJSDataverseRepository.spec.ts + +describe('Dataset JSDataverse Repository', () => { + before(() => TestsUtils.setup()) + beforeEach(() => { + TestsUtils.login() + }) + it('gets the dataset by persistentId', async () => { + const datasetResponse = await DatasetHelper.create() + + await datasetRepository.getByPersistentId(datasetResponse.persistentId).then((dataset) => { + if (!dataset) { + throw new Error('Dataset not found') + } + const datasetExpected = datasetData(dataset.persistentId, dataset.version.id) + + expect(dataset.license).to.deep.equal(datasetExpected.license) + expect(dataset.metadataBlocks).to.deep.equal(datasetExpected.metadataBlocks) + expect(dataset.summaryFields).to.deep.equal(datasetExpected.summaryFields) + expect(dataset.version).to.deep.equal(datasetExpected.version) + expect(dataset.metadataBlocks[0].fields.publicationDate).not.to.exist + expect(dataset.metadataBlocks[0].fields.citationDate).not.to.exist + expect(dataset.permissions).to.deep.equal(datasetExpected.permissions) + expect(dataset.locks).to.deep.equal(datasetExpected.locks) + expect(dataset.downloadUrls).to.deep.equal(datasetExpected.downloadUrls) + expect(dataset.fileDownloadSizes).to.deep.equal(datasetExpected.fileDownloadSizes) + }) + }) +}) +``` + +
    + +
    +Wait for no locks + +Some integration tests require waiting for no locks to be present in the dataset. This is done using the `waitForNoLocks` +method from the `TestsUtils` class. + +```javascript +it('waits for no locks', async () => { + const datasetResponse = await DatasetHelper.create() + + await DatasetHelper.publish(datasetResponse.persistentId) + await TestsUtils.waitForNoLocks(datasetResponse.persistentId) + + await datasetRepository.getByPersistentId(datasetResponse.persistentId).then((dataset) => { + if (!dataset) { + throw new Error('Dataset not found') + } + expect(dataset.locks).to.deep.equal([]) + }) +}) +``` + +
    + +### 3. **End-to-End (e2e) Tests:** + +End-to-end tests simulate real user scenarios, covering the complete flow of the application: + +- **User Workflows:** Focus on common user paths, like searching for a file, logging in, or creating a Dataset. Ensure + these workflows work from start to finish. +- **Avoid Redundancy:** Do not replicate tests covered by unit tests. E2E tests should cover broader user experiences. + It is important to note that e2e tests are slower and more brittle than unit tests, so we use them sparingly. +- **Tools and Frameworks:** We use Cypress for e2e tests, to test the application's behavior from the user's perspective. + It allows us to simulate user interactions and test the application's behavior in a real-world scenario. + +
    +Example of an E2E Test + +```javascript +//tests/e2e-integration/e2e/sections/create-dataset/CreateDatasetForm.spec.tsx + +describe('Create Dataset', () => { + before(() => { + TestsUtils.setup() + }) + beforeEach(() => { + TestsUtils.login() + }) + + it('navigates to the new dataset after submitting a valid form', () => { + cy.visit('/spa/datasets/create') + + cy.findByLabelText(/Title/i).type('Test Dataset Title') + cy.findByLabelText(/Author Name/i).type('Test author name', { force: true }) + cy.findByLabelText(/Point of Contact E-mail/i).type('email@test.com') + cy.findByLabelText(/Description Text/i).type('Test description text') + cy.findByLabelText(/Arts and Humanities/i).check() + + cy.findByText(/Save Dataset/i).click() + + cy.findByRole('heading', { name: 'Test Dataset Title' }).should('exist') + cy.findByText(DatasetLabelValue.DRAFT).should('exist') + cy.findByText(DatasetLabelValue.UNPUBLISHED).should('exist') + }) +}) +``` + +> **Note:** The current e2e tests are failing due to the following reasons: +> +> - **Dataset JSDataverse Repository -> gets the total dataset count**: Calling `destroyAll()` before the "gets the total +> - dataset count" test in `DatasetJSDataverseRepository.spec.ts` causes an optimistic lock exception in Dataverse. +> +> **Solution:** We need to either reset the database after each test or find an alternative method to avoid the optimistic +> lock exception. Check the issue [here](https://github.com/IQSS/dataverse-frontend/issues/294). + +### Patterns and Conventions + +#### **Folder Structure** + +We have a `tests` folder in the root of the project, with subfolders for each type of test (component, +integration, e2e). The folders for integration and e2e are together in the `e2e-integration` folder. We follow the same +structure that we use for the application. + +#### **Test Naming** + +We use the same naming conventions as the application, with the suffix `.spec`. + +#### **Test Data for unit tests** + +We use [faker](https://www.npmjs.com/package/@faker-js/faker) to create test data for unit tests. This library allows us +to generate realistic and varied test data with minimal effort. We use it to create random strings, numbers, and other +values, ensuring that tests cover a wide range of cases without introducing unpredictable failures. + +**[Object Mother Pattern](https://martinfowler.com/bliki/ObjectMother.html)** + +We use the Object Mother pattern to create test data for unit tests. These classes are located in the `tests/component` +folder. + +The primary goal of the Object Mother pattern is to simplify the creation and management of test objects. It serves as a +centralized place where test objects are defined, making the testing process more streamlined and less error-prone. In this +pattern, we create a class or a set of functions dedicated solely to constructing and configuring objects needed for tests. + +Some benefits of this pattern are: + +- **Consistency:** It ensures consistent object setup across different tests, improving test reliability. +- **Readability:** It makes tests more readable and understandable by hiding complex object creation details. +- **Flexibility:** It allows us to create test data with different values and configurations. +- **Reusability:** It allows for the reuse of object configurations, reducing code duplication and saving time +- **Maintainability:** It centralizes object creation, making tests easier to maintain and update when object structures change. +- **Simplicity:** It simplifies test setup, allowing testers to focus more on the test logic than on object configuration. +- **Controlled Randomness:** It allows creating realistic and varied test scenarios while maintaining control over the randomness. + This ensures tests cover a wide range of cases without introducing unpredictable failures. + +
    +Example of an Object Mother class + +```javascript +//tests/component/dataset/domain/models/DatasetMother.ts + +export class DatasetMother { + static create(props?: Partial): Dataset { + return { + id: faker.datatype.uuid(), + title: faker.lorem.words(3), + ...props + } + } +} +``` + +
    + +#### **Test Data for integration and e2e tests** + +We use some helper classes to create test data for the integration and e2e tests. These classes are located in the +`tests/e2e-integration/shared` folder. + +The test data is created using [axios](https://www.npmjs.com/package/axios), which allows us to make requests to the +Dataverse API and create test data for the integration and e2e tests. + +
    +Example of a Helper class to create test data + +```javascript +//tests/e2e-integration/shared/datasets/DatasetHelper.ts +export class DatasetHelper extends DataverseApiHelper { + static async create(): Promise { + return this.request < DatasetResponse > (`/dataverses/root/datasets`, 'POST', newDatasetData) + } +} +``` + +
    + +#### **Test Organization** + +We organize tests into suites, grouping them by feature or user workflow. This makes it easier to find and run tests. + +#### **Test Isolation** + +We aim to keep tests as isolated as possible, avoiding dependencies between tests and ensuring that each test can run +independently. + +### Continuous Integration (CI) + +We run tests on every pull request and merge to ensure that the application is always stable and functional. You can +find the CI workflow in the `.github/workflows/test.yml` file. + +CI checks include: + +- **Unit Tests:** We run all unit tests to ensure that the application's components work as expected. +- **Integration Tests:** We run integration tests to ensure that the application communicates correctly with external + systems. +- **E2E Tests:** We run e2e tests to ensure that the application's behavior is correct from the user's perspective. +- **Accessibility Tests:** We run checks to ensure that the application is accessible and that it meets the highest standards + for accessibility compliance. +- **Code Coverage:** We check the test coverage to ensure that the application is well-tested and that new code is + covered by tests. + +### Test coverage + +We aim for high test coverage, especially in critical areas of the application, like user workflows or complex components. +However, we prioritize user-centric testing over coverage numbers. + +- **Coverage Threshold:** We aim for a test coverage of 95% for the unit tests. This threshold is set in the `.nycrc.json` file. +- **Coverage Reports:** We use [nyc](https://www.npmjs.com/package/nyc) to generate coverage reports, which are available + in the `coverage` folder after running the tests. These reports are also published to [Coveralls](https://coveralls.io/github/IQSS/dataverse-frontend?branch=develop) + with every pull request and merge. The coverage badge is displayed at the top of the README. +- **Tests included in the coverage:** We include all unit tests in the coverage report. +- **Pre-commit hook:** We use [pre-commit](https://www.npmjs.com/package/pre-commit) to run the unit tests before every commit, + ensuring that no code is committed without passing the tests. It also runs the coverage checks to ensure that the coverage + threshold is met. + +#### How to run the code coverage + +To generate the code coverage, you first need to run the tests with the `test:unit` script. After running the tests, you +can check the coverage with the `test:coverage` script. + +If you want to see the coverage report in the browser, you can open the `coverage/lcov-report/index.html` file in the browser. + +```bash +# root project directory + +# Run the unit tests and generate the coverage report + +npm run test:unit + +# Check the coverage + +npm run test:coverage +``` + +

    (back to top)

    +
    + ### Deployment Once the site is built through the `npm run build` command, it can be deployed in different ways to different types of infrastructure, depending the needs of the installation. @@ -454,7 +770,6 @@ We are working to provide different preconfigured automated deployment options, The current automated deployment options are available within the GitHub `deploy` workflow, which is designed to be run manually from GitHub Actions. The deployment option is selected via a dropdown menu, as well as the target environment. -
    Examples for AWS and Payara @@ -464,37 +779,33 @@ This option will build and deploy the application to a remote S3 bucket. For this workflow to work, a GitHub environment must be configured with the following environment secrets: - - `AWS_ACCESS_KEY_ID` - - `AWS_SECRET_ACCESS_KEY` - - `AWS_S3_BUCKET_NAME` - - `AWS_DEFAULT_REGION` +- `AWS_ACCESS_KEY_ID` +- `AWS_SECRET_ACCESS_KEY` +- `AWS_S3_BUCKET_NAME` +- `AWS_DEFAULT_REGION` Note that for the deployment to the S3 bucket to succeed, you must make the following changes to the bucket via the S3 web interface (or equivalent changes using aws-cli or similar tools): - - Under _`Permissions`_ ⏵ _`Permissions Overview`_ ⏵ _`Block public access (bucket settings)`_ ⏵ click _`Edit`_, then __uncheck__ _`Block all public access`_ and save. - - Under _`Properties`_ ⏵ _`Static website hosting`_ ⏵ click _`Edit`_ and enable it. Change _`Index document`_ and _`Error document`_ to `index.html`. - - Under _`Bucket Policy`_, click _`Edit`_ and paste the following policy (changing `` to your bucket name) and save. +- Under _`Permissions`_ ⏵ _`Permissions Overview`_ ⏵ _`Block public access (bucket settings)`_ ⏵ click _`Edit`_, then **uncheck** _`Block all public access`_ and save. +- Under _`Properties`_ ⏵ _`Static website hosting`_ ⏵ click _`Edit`_ and enable it. Change _`Index document`_ and _`Error document`_ to `index.html`. +- Under _`Bucket Policy`_, click _`Edit`_ and paste the following policy (changing `` to your bucket name) and save. ```json { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "PublicReadGetObject", - "Principal": "*", - "Effect": "Allow", - "Action": [ - "s3:GetObject" - ], - "Resource": [ - "arn:aws:s3:::/*" - ] - } - ] + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "PublicReadGetObject", + "Principal": "*", + "Effect": "Allow", + "Action": ["s3:GetObject"], + "Resource": ["arn:aws:s3:::/*"] + } + ] } ``` -You should see the deployed app at `http://[BUCKET-NAME].s3-website-[REGION].amazonaws.com`, for example; `http://my-dataverse-bucket.s3-website-us-east-1.amazonaws.com/` +You should see the deployed app at `http://[BUCKET-NAME].s3-website-[REGION].amazonaws.com`, for example; `http://my-dataverse-bucket.s3-website-us-east-1.amazonaws.com/` #### Deployment with Payara @@ -504,9 +815,9 @@ This option is intended for an "all-in-one" solution, where the Dataverse backen For this workflow to work, a GitHub environment must be configured with the following environment secrets: - - `PAYARA_INSTANCE_HOST` - - `PAYARA_INSTANCE_USERNAME` - - `PAYARA_INSTANCE_SSH_PRIVATE_KEY` +- `PAYARA_INSTANCE_HOST` +- `PAYARA_INSTANCE_USERNAME` +- `PAYARA_INSTANCE_SSH_PRIVATE_KEY` It is important that the remote instance is correctly pre-configured, with the Payara server running, and a service account for Dataverse with the corresponding SSH key pair established. @@ -517,18 +828,16 @@ A base path for the frontend application can be established on the remote server

    (back to top)


    - ## Usage Use this space to show useful examples of how a project can be used. Additional screenshots, code examples and demos work well in this space. You may also link to more resources. _For more examples, please refer to the [Documentation][dv_docs_devs_url]_ - - This project uses Atomic Design for it's Components. Here are several resources for Atomic Design if you are unfamiliar: + @@ -564,6 +873,7 @@ If you have a suggestion that would make this better, please fork the repo and c Got Questions? Join the conversation on [Zulip][dv_community_zulip_url], or our Google Groups for [Developers][dv_community_google_devs_url] and [Users][dv_community_google_users_url]. Or attend community meetings, hosted by the The Global Dataverse Community Consortium to collaborate with the interest groups for [Frontend Development][dv_community_gdcc_ui_url] and [Containerization][dv_community_gdcc_containers_url], learn and share with communities around the world! + Find videos and release overviews on our YouTube Channel, and check out the resources on our [website][dv_docs_dataverse_url].

    (back to top)

    @@ -572,7 +882,6 @@ Find videos and release overviews on our YouTube Channel, and check out the reso - ## Contact Email us with software installation questions. Please note our response time is generally 24 business hours. @@ -588,12 +897,9 @@ Report bugs and add feature requests in GitHub Issues or use GitHub pull request Ask a question or start a discussion: A great place to publicly share feedback, ideas, feature requests, and troubleshoot any issues is in the dataverse-community mailing list. Additional mailing lists are available, including language-specific ones. - -

    (back to top)


    - ## Acknowledgments Chromatic @@ -603,7 +909,7 @@ Thanks to [Chromatic][_uses_chromatic_url] for providing the visual testing plat

    (back to top)


    -*** +--- ### Built With @@ -612,7 +918,7 @@ This section should list any major frameworks/libraries used to bootstrap your p

    (back to top)


    -*** +--- ## License @@ -621,14 +927,13 @@ Distributed under the Apache License, Version 2.0. See `LICENSE` for more inform

    (back to top)


    - - + [dv_repo_url]: https://github.com/IQSS/dataverse-frontend [dv_repo_issues_url]: https://github.com/IQSS/dataverse-frontend/issues [dv_repo_sprint_url]: https://github.com/orgs/IQSS/projects/34/views/23 @@ -643,6 +948,7 @@ Distributed under the Apache License, Version 2.0. See `LICENSE` for more inform + [dv_repo_dvclientjs_url]: https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript [dv_repo_dvclientjs_npm_url]: https://www.npmjs.com/package/js-dataverse [dv_repo_dvsampledata_url]: https://github.com/IQSS/dataverse-sample-data @@ -653,6 +959,7 @@ Distributed under the Apache License, Version 2.0. See `LICENSE` for more inform + [dv_app_beta_spa_url]: https://beta.dataverse.org/spa [dv_app_beta_legacyjsf_url]: https://beta.dataverse.org [dv_app_legacyjsf_demo_url]: https://demo.dataverse.org/ @@ -661,11 +968,14 @@ Distributed under the Apache License, Version 2.0. See `LICENSE` for more inform [dv_app_localhost_designsystem_url]: http://localhost:6007/ [dv_app_localhost_spa_url]: http://localhost:8000/spa [dv_app_localhost_legacy_url]: http://localhost:8000/ + + [dv_app_docker_image_url]: https://hub.docker.com/r/gdcc/dataverse/tags + [dv_community_gdcc_url]: https://www.gdcc.io/ [dv_community_gdcc_ui_url]: https://ui.gdcc.io/ [dv_community_gdcc_containers_url]: https://ct.gdcc.io/ @@ -675,12 +985,14 @@ Distributed under the Apache License, Version 2.0. See `LICENSE` for more inform + [hvd_iqss_url]: https://www.iq.harvard.edu/ [hvd_iqss_roadmap_url]: https://www.iq.harvard.edu/roadmap-dataverse-project [hvd_legacyjsf_url]: https://dataverse.harvard.edu/ + [dv_docs_dataverse_url]: https://dataverse.org/ [dv_docs_about_url]: https://dataverse.org/about [dv_docs_styleguide_url]: https://guides.dataverse.org/en/latest/style/index.html @@ -691,6 +1003,7 @@ Distributed under the Apache License, Version 2.0. See `LICENSE` for more inform + [_uses_reactjs_url]: https://reactjs.org/ [_uses_nodejs_url]: https://nodejs.org/ [_uses_typescript_url]: https://typescriptlang.org/ @@ -704,12 +1017,16 @@ Distributed under the Apache License, Version 2.0. See `LICENSE` for more inform [_uses_chromatic_url]: https://www.chromatic.com/ [_uses_repo_cra_error_url]: https://github.com/facebook/create-react-app/issues/11174 [_uses_tool_chromatic_url]: https://www.chromatic.com/builds?appId=646f68aa9beb01b35c599acd + + [_uses_lib_grep_url]: https://www.npmjs.com/package/@cypress/grep + + [_shield_reactjs]: https://img.shields.io/badge/React-20232A?style=for-the-badge&logo=react&logoColor=61DAFB [_shield_nodejs]: https://img.shields.io/badge/node.js-000000?style=for-the-badge&logo=nodedotjs&logoColor=white [_shield_typescript]: https://img.shields.io/badge/TypeScript-3178C6?style=for-the-badge&logo=typescript&logoColor=white @@ -722,12 +1039,16 @@ Distributed under the Apache License, Version 2.0. See `LICENSE` for more inform [_shield_zulip]: https://img.shields.io/badge/zulip-chat?style=for-the-badge&logo=zulip&logoColor=%236492FE [_shield_googledevs]: https://img.shields.io/badge/Developer_Group-white?style=for-the-badge&logo=google [_shield_googleusers]: https://img.shields.io/badge/User_Group-white?style=for-the-badge&logo=google + + [_shield_projectstatus]: https://img.shields.io/badge/repo_status-WIP-yellow?style=for-the-badge [_shield_contributors]: https://img.shields.io/github/contributors/IQSS/dataverse-frontend?branch=develop&style=for-the-badge [_shield_stargazers]: https://img.shields.io/github/stars/iqss/dataverse-frontend?style=for-the-badge [_shield_coveralls]: https://img.shields.io/coverallsCoverage/github/IQSS/dataverse-frontend?branch=develop&style=for-the-badge + + [_shield_workflow]: https://img.shields.io/github/actions/workflow/status/IQSS/dataverse-frontend/test.yml?branch=develop&style=for-the-badge [_shield_issues]: https://img.shields.io/github/issues/IQSS/dataverse-frontend?style=for-the-badge [_shield_forks]: https://img.shields.io/github/forks/IQSS/dataverse-frontend?style=for-the-badge @@ -735,18 +1056,24 @@ Distributed under the Apache License, Version 2.0. See `LICENSE` for more inform + [_img_dv_logo_withbackground]: https://github.com/IQSS/dataverse-frontend/assets/7512607/6986476f-39ba-46a4-9be0-f05cd8e92244 [_img_dv_logo_nobackground]: https://github.com/IQSS/dataverse-frontend/assets/7512607/6c4d79e4-7be5-4102-88bd-dfa167dc79d3 [_img_screenshot]: images/screenshot.png + [_video_demo_datasetpage_url]: https://groups.google.com/g/dataverse-community/c/cxZ3Bal_-uo/m/h3kh3iVNCwAJ + + [_video_demo_filetable_url]: https://groups.google.com/g/dataverse-community/c/w_rEMddESYc/m/6F7QC1p-AgAJ + [_social_twitter]: https://twitter.com/your_username +