diff --git a/.gitignore b/.gitignore index ae9a44f32..c62ea1ff8 100644 --- a/.gitignore +++ b/.gitignore @@ -27,3 +27,6 @@ www/ .DS_Store yarn-error.log coverage/ + +## Database Cache +standards_cache.sqlite diff --git a/Makefile b/Makefile index 1620fe3de..db2f2c27f 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,7 @@ dev-run: e2e: yarn build [ -d "./venv" ] && . ./venv/bin/activate - export FLASK_APP=$(CURDIR)/cre.py + export FLASK_APP=cre export FLASK_CONFIG=development fFLASK_CONFIG=development flask run& @@ -20,7 +20,7 @@ e2e: killall flask test: [ -d "./venv" ] && . ./venv/bin/activate - export FLASK_APP=$(CURDIR)/cre.py + export FLASK_APP=cre flask routes flask test @@ -57,12 +57,12 @@ clean: migrate-upgrade: if ! [ -f "standards_cache.sqlite" ]; then cp cres/db.sqlite standards_cache.sqlite; fi [ -d "./venv" ] && . ./venv/bin/activate - export FLASK_APP=$(CURDIR)/cre.py + export FLASK_APP=cre flask db upgrade migrate-downgrade: [ -d "./venv" ] && . ./venv/bin/activate - export FLASK_APP=$(CURDIR)/cre.py + export FLASK_APP=cre flask db downgrade import-all: diff --git a/README.md b/README.md index 92b1fb273..7bf2fe639 100644 --- a/README.md +++ b/README.md @@ -1,23 +1,20 @@ - - [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![GitHub Super-Linter](https://github.com/OWASP/common-requirement-enumeration/workflows/Lint%20Code%20Base/badge.svg)](https://github.com/marketplace/actions/super-linter) [![GitHub CodeQL](https://github.com/OWASP/common-requirement-enumeration/workflows/CodeQL/badge.svg)](https://github.com/marketplace/actions/codeql-analysis) [![Main Branch Build](https://github.com/OWASP/common-requirement-enumeration/workflows/Test/badge.svg?branch=main)](https://github.com/OWASP/OWASP/common-requirement-enumeration/workflows/Test) -Common Requirements Enumeration Application -=============================== +# Common Requirements Enumeration Application + This is work in progress. See the application working at https://www.opencre.org CRE is an interactive content linking platform for uniting security standards and guidelines. It offers easy and robust access to relevant information when designing, developing, testing and procuring secure software. This python web and cli application handles adding and presenting CREs. -WHY? -========== +# WHY? Independent software security professionals got together to find a solution for the complexity and fragmentation in today’s landscape of security standards and guidelines. These people are Spyros Gasteratos, Elie Saad, Rob van der Veer and friends, in close collaboration with the SKF, OpenSSF and Owasp Top 10 project. -HOW? -====== +# HOW? + The CRE links each section of a standard to a shared topic (a Common Requirement), causing that section to also link with all other resources that map to the same topic. This 1) enables users to find all combined information from relevant sources, 2) it facilitates a shared and better understanding of cyber security, and 3) it allows standard makers to have links that keep working and offer all the information that readers need, so they don’t have to cover it all themselves. The CRE maintains itself: topic links in the standard text are scanned automatically. Furthermore, topics are linked with related other topics, creating a semantic web for security. Example: the session time-out topic will take the user to relevant criteria in several standards, and to testing guides, development tips, more technical detail, threat descriptions, articles etc. From there, the user can navigate to resources about session management in general. @@ -27,95 +24,108 @@ CRE is currently in beta and has linked OWASP standards (Top 10, ASVS, Proactive Data has been kindly contributed by the SKF and ASVS projects -Installing ---- +## Installing To install this application you need python3, yarn and virtualenv. -Clone the repository: -
git clone https://github.com/OWASP/common-requirement-enumeration 
-Copy sqlite database to required location -
cp cres/db.sqlite standards_cache.sqlite
+```bash +git clone https://github.com/OWASP/common-requirement-enumeration + +# Copy the sqlite database to the required location +cp cres/db.sqlite standards_cache.sqlite -Install dependencies -
 make install 
+# Install the dependencies +make install +``` +On the first run, you'll also need to apply any pending database migrations: -Running -------- +```bash +make migrate-upgrade +``` + +## Running To run the CLI application, you can run +
python cre.py --help
To download a remote cre spreadsheet locally you can run +
python cre.py --review --from_spreadsheet < google sheets url>
To add a remote spreadsheet to your local database you can run +
python cre.py --add --from_spreadsheet < google sheets url>
To run the web application for development you can run +
make dev-run
Alternatively, you can use the dockerfile with +
make docker && make docker-run
To run the web application for production you need gunicorn and you can run from within the cre_sync dir +
make prod-run
-Developing ---- +## Developing + You can run backend tests with +
make test
-You can run get a coverage report with + +You can run get a coverage report with +
make cover
+ Try to keep the coverage above 70% Repo Moved here from https://github.com/northdpole/www-project-integration-standards -Contributing ---- -Please see [Contributing](CONTRIBUTING.md) for contributing instructions +## Contributing +Please see [Contributing](CONTRIBUTING.md) for contributing instructions -Development Notes ---- +## Development Notes - [ ] add tests -- [x] defs -- [x] db -- [x] parsers -- [ ] mapping_add ( done for important methods ) argparse logic only remains +- [x] defs +- [x] db +- [x] parsers +- [ ] mapping_add ( done for important methods ) argparse logic only remains - [x] spreadsheet_utils -- [ ] frontend - -- [x] add parse from export format -- [x] add parse from export format where the root doc is a standard and it links to cres or groups -- [x] add parse from spreadsheet with unknown standards (for key,val in items add_standard) -- [x] merge spreadsheet to yaml and mapping add, they do the same thing -- [x] add the ability for standards to link other standards, then you can handle assigning CREs yourself -- [x] support importing yaml export files of more than 1 levels deep -- [x] add export for Standards unmapped to CREs as lone standards (useful for visibility) -- [x] add sparse_spreadsheet_export functionality one level of mapping per row, either everything that maps to standard X or everything that maps to CRE x -- [x] add parse from export format -- [x] add github actions ci -- [x] make into flask rest api -- [x] > refer use case (search by cre) -- [x] > search by standard -- [x] add the ability for a mapping document to have multiple yamls in it -- [x] add db integration of tags -- [x] add tags in db (search by tag, export with tags etc) -- [x] add parser integration of tags (parse the new new new spreadsheet template which incorporates tags) -- [x] add search by tag in rest -- [x] add dockerfile -- [x] add conditional export (select the standards you want exported get mappings between them) (gap analysis use case) ~ -- Done +- [ ] frontend + +- [x] add parse from export format +- [x] add parse from export format where the root doc is a standard and it links to cres or groups +- [x] add parse from spreadsheet with unknown standards (for key,val in items add_standard) +- [x] merge spreadsheet to yaml and mapping add, they do the same thing +- [x] add the ability for standards to link other standards, then you can handle assigning CREs yourself +- [x] support importing yaml export files of more than 1 levels deep +- [x] add export for Standards unmapped to CREs as lone standards (useful for visibility) +- [x] add sparse_spreadsheet_export functionality one level of mapping per row, either everything that maps to standard X or everything that maps to CRE x +- [x] add parse from export format +- [x] add github actions ci +- [x] make into flask rest api +- [x] > refer use case (search by cre) +- [x] > search by standard +- [x] add the ability for a mapping document to have multiple yamls in it +- [x] add db integration of tags +- [x] add tags in db (search by tag, export with tags etc) +- [x] add parser integration of tags (parse the new new new spreadsheet template which incorporates tags) +- [x] add search by tag in rest +- [x] add dockerfile +- [x] add conditional export (select the standards you want exported get mappings between them) (gap analysis use case) ~ -- Done - [x] add flask cover command from here https://github.com/miguelgrinberg/flasky/blob/master/flasky.py#L33 - [x] Make Standards versioned ~ -- Done -- [x] write frontend -- [x] make results per page a config item from env -- [x] migrate to new repo -- [x] add black autoformater -- [x] merge frontend changes to master -- [x] Typed Python? +- [x] write frontend +- [x] make results per page a config item from env +- [x] migrate to new repo +- [x] add black autoformater +- [x] merge frontend changes to master +- [x] Typed Python? = Future Considerations = diff --git a/application/frontend/src/components/ExportButton/export-button.scss b/application/frontend/src/components/ExportButton/export-button.scss new file mode 100644 index 000000000..cec0a1faa --- /dev/null +++ b/application/frontend/src/components/ExportButton/export-button.scss @@ -0,0 +1,5 @@ +a.export-button { + font-size: 1.2rem; + margin-left: 1rem; + cursor: pointer; +} diff --git a/application/frontend/src/components/ExportButton/export-button.tsx b/application/frontend/src/components/ExportButton/export-button.tsx new file mode 100644 index 000000000..fe4f4e020 --- /dev/null +++ b/application/frontend/src/components/ExportButton/export-button.tsx @@ -0,0 +1,74 @@ +import './export-button.scss'; + +import React, { useState } from 'react'; +import { Loader } from 'semantic-ui-react'; + +interface IExportButton { + fetchURL: string; + fetchParams?: any; +} + +const openURLInNewTab = (url: string): void => { + const newWindow = window.open(url, '_blank', 'noopener,noreferrer'); + if (newWindow) newWindow.opener = null; +}; + +/** + * Returns the export URL for a given API endpoint. + * Handles the CRE, search and standard endpoints. + * + * Also handles query parameters as part of the `url` or as part of `params`. + * @param url original fetch URL + * @param params (optional) parameters that were passed to Axios + * @returns computed request url to get an export of the endpoint + */ +const getExportURL = (url: string, params?: string[][]): string => { + const EXPORT_STRING = '/export'; + if (url.includes('?')) { + const [prefix, queryParams] = url.split('?'); + return prefix + EXPORT_STRING + '?' + queryParams; + } + + if (params) { + return url + '/export?' + new URLSearchParams(params['params']).toString(); + } + + return url + EXPORT_STRING; +}; + +const ExportButton = ({ fetchURL, fetchParams }: IExportButton) => { + const [isLoading, setLoading] = useState(false); + + const fetchSpreadsheetURLAndOpen = () => { + setLoading(true); + + fetch(getExportURL(fetchURL, fetchParams)) + .then((response) => response.json()) + .then((data) => { + if (!data || !data.status || data.status !== 'ok') { + window.alert('Failed to export CRE data'); + } + + openURLInNewTab(data.spreadsheetURL); + + // Timeout is added so we don't get a flashing effect + setTimeout(() => { + setLoading(false); + }, 500); + }); + }; + + return ( + fetchSpreadsheetURLAndOpen()}> + 🔗 Export + {isLoading && ( + <> + {' '} + + + )} + + ); +}; + +export default ExportButton; diff --git a/application/frontend/src/pages/CommonRequirementEnumeration/CommonRequirementEnumeration.tsx b/application/frontend/src/pages/CommonRequirementEnumeration/CommonRequirementEnumeration.tsx index d91ae2b2e..7bc661d31 100644 --- a/application/frontend/src/pages/CommonRequirementEnumeration/CommonRequirementEnumeration.tsx +++ b/application/frontend/src/pages/CommonRequirementEnumeration/CommonRequirementEnumeration.tsx @@ -1,27 +1,28 @@ import './commonRequirementEnumeration.scss'; -import React, { useEffect, useMemo, useState, useContext } from 'react'; +import React, { useEffect, useMemo, useState } from 'react'; import { useQuery } from 'react-query'; import { useParams } from 'react-router-dom'; import { DocumentNode } from '../../components/DocumentNode'; +import ExportButton from '../../components/ExportButton/export-button'; +import { ClearFilterButton, FilterButton } from '../../components/FilterButton/FilterButton'; import { LoadingAndErrorIndicator } from '../../components/LoadingAndErrorIndicator'; import { DOCUMENT_TYPE_NAMES } from '../../const'; import { useEnvironment } from '../../hooks'; +import { applyFilters } from '../../hooks/applyFilters'; import { Document } from '../../types'; import { groupLinksByType } from '../../utils'; -import { applyFilters, filterContext } from '../../hooks/applyFilters'; -import { ClearFilterButton, FilterButton } from '../../components/FilterButton/FilterButton'; export const CommonRequirementEnumeration = () => { const { id } = useParams(); const { apiUrl } = useEnvironment(); const [loading, setLoading] = useState(false); - const globalState = useContext(filterContext) - const { error, data, refetch } = useQuery<{ data: Document; }, string>( + const FETCH_URL = `${apiUrl}/id/${id}`; + const { error, data, refetch } = useQuery<{ data: Document }, string>( 'cre', - () => fetch(`${apiUrl}/id/${id}`).then((res) => res.json()), + () => fetch(FETCH_URL).then((res) => res.json()), { retry: false, enabled: false, @@ -38,13 +39,13 @@ export const CommonRequirementEnumeration = () => { }, [id]); const cre = data?.data; - let filteredCRE - if(cre != undefined){ - filteredCRE = applyFilters(JSON.parse(JSON.stringify(cre))) // dirty deepcopy + let filteredCRE; + if (cre != undefined) { + filteredCRE = applyFilters(JSON.parse(JSON.stringify(cre))); // dirty deepcopy } let currentUrlParams = new URLSearchParams(window.location.search); - let display:Document - display = currentUrlParams.get("applyFilters") === "true"? filteredCRE:cre + let display: Document; + display = currentUrlParams.get('applyFilters') === 'true' ? filteredCRE : cre; const linksByType = useMemo(() => (display ? groupLinksByType(display) : {}), [display]); @@ -53,37 +54,54 @@ export const CommonRequirementEnumeration = () => { {!loading && !error && display && ( <> -

{display.name}

+

+ {display.name} + +

{display.id}
{display.description}
- { display && display.hyperlink && + {display && display.hyperlink && ( <> Reference: - { display.hyperlink } + + {' '} + {display.hyperlink} + - } - {display.tags? -
Tags:{display.tags.map((tag) => ( {tag} ))}
:""} - - {currentUrlParams.get("applyFilters")==="true"? -
- Filtering on: - {currentUrlParams.getAll("filters").map((filter)=>( - {filter.replace("s:","").replace("c:","")}, ))} + )} + {display.tags ? ( +
+ Tags:{' '} + {display.tags.map((tag) => ( + {tag} + ))} +
+ ) : ( + '' + )} - -
:""} + {currentUrlParams.get('applyFilters') === 'true' ? ( +
+ Filtering on: + {currentUrlParams.getAll('filters').map((filter) => ( + {filter.replace('s:', '').replace('c:', '')}, + ))} + +
+ ) : ( + '' + )}
{Object.keys(linksByType).length > 0 && Object.entries(linksByType).map(([type, links]) => (
- {display.doctype}:{display.id}: {display.name} {DOCUMENT_TYPE_NAMES[type]}: + {display.doctype}:{display.id}: {display.name} {DOCUMENT_TYPE_NAMES[type]}:
{links.map((link, i) => (
- +
))}
diff --git a/application/frontend/src/pages/Search/SearchName.tsx b/application/frontend/src/pages/Search/SearchName.tsx index 0866b78a6..de1b87be7 100644 --- a/application/frontend/src/pages/Search/SearchName.tsx +++ b/application/frontend/src/pages/Search/SearchName.tsx @@ -1,12 +1,12 @@ +import axios from 'axios'; import React, { useEffect, useMemo, useState } from 'react'; import { useParams } from 'react-router-dom'; -import axios from 'axios'; -import { useEnvironment } from '../../hooks'; +import ExportButton from '../../components/ExportButton/export-button'; import { LoadingAndErrorIndicator } from '../../components/LoadingAndErrorIndicator'; -import { groupBy } from '../../utils/document'; +import { useEnvironment } from '../../hooks'; import { Document } from '../../types'; - +import { groupBy } from '../../utils/document'; import { SearchResults } from './components/SearchResults'; const CRE = "CRE"; @@ -19,22 +19,27 @@ export const SearchName = () => { const [documents, setDocuments] = useState([]); const [error, setError] = useState(null); + const FETCH_URL = `${apiUrl}/text_search`; + const FETCH_PARAMS = { params: { text: searchTerm } }; + useEffect(() => { setLoading(true); - axios.get(`${apiUrl}/text_search`, {params: {text: searchTerm}}) - .then(function (response) { - setError(null); - setDocuments(response.data); - }) - .catch(function (axiosError) { - // TODO: backend errors if no matches, shoudl return - // proper error instead. - setError(axiosError); - }).finally( () => { - setLoading(false); - }); + axios + .get(FETCH_URL, FETCH_PARAMS) + .then(function (response) { + setError(null); + setDocuments(response.data); + }) + .catch(function (axiosError) { + // TODO: backend errors if no matches, shoudl return + // proper error instead. + setError(axiosError); + }) + .finally(() => { + setLoading(false); + }); }, [searchTerm]); - + const groupedByType = groupBy(documents, doc => doc.doctype); const cres = groupedByType[CRE] @@ -54,8 +59,10 @@ export const SearchName = () => { {!loading && !error &&
-

Related CRE's

- {cres && } +

Related CRE's + +

+ {groupedByType[CRE] && }

Related Documents

@@ -63,6 +70,7 @@ export const SearchName = () => {
} +
); }; diff --git a/application/frontend/src/pages/Search/components/BodyText.tsx b/application/frontend/src/pages/Search/components/BodyText.tsx index e1167db79..d0f20bf00 100644 --- a/application/frontend/src/pages/Search/components/BodyText.tsx +++ b/application/frontend/src/pages/Search/components/BodyText.tsx @@ -1,72 +1,81 @@ -import React, { useState } from 'react'; import './BodyText.scss'; -export const SearchBody = () => { +import React, { useState } from 'react'; +export const SearchBody = () => { return (
-

- OPEN CRE -

+

OPEN CRE

- CRE is an interactive content linking platform for uniting security standards and guidelines. It offers easy and robust access to relevant information when designing, developing, testing and procuring secure software. + CRE is an interactive content linking platform for uniting security standards and guidelines. It + offers easy and robust access to relevant information when designing, developing, testing and + procuring secure software.

-

- WHY? -

+

WHY?

- Independent software security professionals got together to find a solution for the complexity and fragmentation in today’s landscape of security standards and guidelines. These people are Spyros Gasteratos, Elie Saad, Rob van der Veer and friends, in close collaboration with the SKF, OpenSSF and Owasp Top 10 project. + Independent software security professionals got together to find a solution for the complexity and + fragmentation in today’s landscape of security standards and guidelines. These people are Spyros + Gasteratos, Elie Saad, Rob van der Veer and friends, in close collaboration with the SKF, OpenSSF and + Owasp Top 10 project.

-

- HOW? -

+

HOW?

- The CRE links each section of a standard to a shared topic (a Common Requirement), causing that section to also link with all other resources that map to the same topic. This 1) enables users to find all combined information from relevant sources, 2) it facilitates a shared and better understanding of cyber security, and 3) it allows standard makers to have links that keep working and offer all the information that readers need, so they don’t have to cover it all themselves. The CRE maintains itself: topic links in the standard text are scanned automatically. Furthermore, topics are linked with related other topics, creating a semantic web for security. + The CRE links each section of a standard to a shared topic (a Common Requirement), causing that + section to also link with all other resources that map to the same topic. This 1) enables users to + find all combined information from relevant sources, 2) it facilitates a shared and better + understanding of cyber security, and 3) it allows standard makers to have links that keep working and + offer all the information that readers need, so they don’t have to cover it all themselves. The CRE + maintains itself: topic links in the standard text are scanned automatically. Furthermore, topics are + linked with related other topics, creating a semantic web for security.

- Example: the session time-out topic will take the user to relevant criteria in several standards - , and to testing guides, development tips, more technical detail, threat descriptions, articles etc. - From there, the user can navigate to resources about session management in general. + Example: the session time-out topic will take the user to relevant criteria in several + standards , and to testing guides, development tips, more technical detail, threat descriptions, + articles etc. From there, the user can navigate to resources about session management in general.

+ Moreover, standards can use the CRE project to maintain permanent links to other standards. For example ASVS Deeplink could be maintained by ASVS and always redirect to an ASVS entry on github while the following will redirect to the specific section ASVS v9.2.5

-

- WHEN? -

+

WHEN?

- CRE is currently in beta and has linked OWASP standards (Top 10, ASVS, Proactive Controls, Cheat sheets, Testing guide), plus several other sources (CWE, NIST-800 53, NIST-800 63b), as part of the + CRE is currently in beta and has linked OWASP standards (Top 10, ASVS, Proactive Controls, Cheat + sheets, Testing guide), plus several other sources (CWE, NIST-800 53, NIST-800 63b), as part of the OWASP Integration standard project .

-

- Join us -

+

Join us

- Contact us (rob.vanderveer [at] owasp.org) to join the movement. Currently, a stakeholder group is being formed. + Contact us (rob.vanderveer [at] owasp.org) to join the movement. Currently, a stakeholder group is + being formed.

- For more details, see this - presentation video + For more details, see this + + {' '} + presentation video + , or read the - CRE explanation document + + {' '} + CRE explanation document + .

-

- TRY -

+

TRY

See the CRE search bar (beta version). Try searching for Top10 2017 as standard and click around, or 482-866 + as CRE-ID, to get an idea, or search for "Session", or an overview of all top-level topics.

diff --git a/application/frontend/src/pages/Standard/Standard.tsx b/application/frontend/src/pages/Standard/Standard.tsx index eb9033a28..9cf7abc8b 100644 --- a/application/frontend/src/pages/Standard/Standard.tsx +++ b/application/frontend/src/pages/Standard/Standard.tsx @@ -2,10 +2,11 @@ import './standard.scss'; import React, { useEffect, useState } from 'react'; import { useQuery } from 'react-query'; -import { useParams, useLocation } from 'react-router-dom'; +import { useLocation, useParams } from 'react-router-dom'; import { Pagination } from 'semantic-ui-react'; import { DocumentNode } from '../../components/DocumentNode'; +import ExportButton from '../../components/ExportButton/export-button'; import { LoadingAndErrorIndicator } from '../../components/LoadingAndErrorIndicator'; import { useEnvironment } from '../../hooks'; import { Document } from '../../types'; @@ -15,18 +16,20 @@ export const Standard = () => { const { apiUrl } = useEnvironment(); const [page, setPage] = useState(1); const [loading, setLoading] = useState(false); - if (!type) { type = "standard" } - const { error, data, refetch } = useQuery<{ standards: Document[]; total_pages: number; page: number }, string>( - 'standard', - () => fetch(`${apiUrl}/${type}/${id}?page=${page}`).then((res) => res.json()), - { - retry: false, - enabled: false, - onSettled: () => { - setLoading(false); - }, - } - ); + if (!type) { + type = 'standard'; + } + const FETCH_URL = `${apiUrl}/${type}/${id}?page=${page}`; + const { error, data, refetch } = useQuery< + { standards: Document[]; total_pages: number; page: number }, + string + >('standard', () => fetch(FETCH_URL).then((res) => res.json()), { + retry: false, + enabled: false, + onSettled: () => { + setLoading(false); + }, + }); useEffect(() => { window.scrollTo(0, 0); @@ -36,17 +39,19 @@ export const Standard = () => { const documents = data?.standards || []; - return ( <>
-

{id}

+

+ {id} + +

{!loading && !error && documents.map((standard, i) => (
- +
))} {data && data.total_pages > 0 && ( diff --git a/application/frontend/src/pages/Standard/StandardSection.tsx b/application/frontend/src/pages/Standard/StandardSection.tsx index 3f4adb203..83979385d 100644 --- a/application/frontend/src/pages/Standard/StandardSection.tsx +++ b/application/frontend/src/pages/Standard/StandardSection.tsx @@ -1,39 +1,39 @@ import './standard.scss'; -import React, { useEffect, useState, useMemo } from 'react'; +import React, { useEffect, useMemo, useState } from 'react'; import { useQuery } from 'react-query'; import { useParams } from 'react-router-dom'; import { Pagination } from 'semantic-ui-react'; import { DocumentNode } from '../../components/DocumentNode'; +import ExportButton from '../../components/ExportButton/export-button'; import { LoadingAndErrorIndicator } from '../../components/LoadingAndErrorIndicator'; +import { DOCUMENT_TYPE_NAMES } from '../../const'; import { useEnvironment } from '../../hooks'; import { Document } from '../../types'; import { groupLinksByType } from '../../utils'; -import { DOCUMENT_TYPE_NAMES } from '../../const'; - export const StandardSection = () => { const { id, section } = useParams(); const { apiUrl } = useEnvironment(); const [page, setPage] = useState(1); const [loading, setLoading] = useState(false); - + const getSectionParameter = (): string => { return section ? `§ion=${encodeURIComponent(section)}` : ''; - } + }; - const { error, data, refetch } = useQuery<{ standards: Document[]; total_pages: number; page: number }, string>( - 'standard section', - () => fetch(`${apiUrl}/standard/${id}?page=${page}${getSectionParameter()}`).then((res) => res.json()), - { - retry: false, - enabled: false, - onSettled: () => { - setLoading(false); - }, - } - ); + const FETCH_URL = `${apiUrl}/standard/${id}?page=${page}${getSectionParameter()}`; + const { error, data, refetch } = useQuery< + { standards: Document[]; total_pages: number; page: number }, + string + >('standard section', () => fetch(FETCH_URL).then((res) => res.json()), { + retry: false, + enabled: false, + onSettled: () => { + setLoading(false); + }, + }); useEffect(() => { window.scrollTo(0, 0); @@ -41,30 +41,36 @@ export const StandardSection = () => { refetch(); }, [page, id]); - const documents = data ?.standards || []; + const documents = data?.standards || []; const document = documents[0]; const linksByType = useMemo(() => (document ? groupLinksByType(document) : {}), [document]); return ( <>
-

{id}

+

+ {id} + +

Section: {document?.section}
- { document && document.hyperlink && - <> - Reference: - { document.hyperlink } - - } + {document && document.hyperlink && ( + <> + Reference: + + {' '} + {document.hyperlink} + + + )} - {!loading && - !error && + {!loading && !error && (
{Object.keys(linksByType).length > 0 && Object.entries(linksByType).map(([type, links]) => (
- {document.doctype}: {document.name} - {document.section} {DOCUMENT_TYPE_NAMES[type]}: + {document.doctype}: {document.name} - {document.section}{' '} + {DOCUMENT_TYPE_NAMES[type]}:
{links.map((link, i) => (
@@ -74,7 +80,7 @@ export const StandardSection = () => {
))}
- } + )} {data && data.total_pages > 0 && (
diff --git a/application/web/web_main.py b/application/web/web_main.py index b656f349e..c52ba8cd1 100644 --- a/application/web/web_main.py +++ b/application/web/web_main.py @@ -3,7 +3,8 @@ import logging import os import urllib.parse -from typing import Any +from typing import Any, List +import logging from application import cache from application.database import db @@ -20,6 +21,12 @@ send_from_directory, ) +from application import cache +from application.database import db +from application.defs import cre_defs as defs +from application.defs import osib_defs as odefs +from application.utils.spreadsheet import prepare_spreadsheet, write_spreadsheet + ITEMS_PER_PAGE = 20 app = Blueprint("web", __name__, static_folder="../frontend/www") @@ -43,6 +50,16 @@ def extend_cre_with_tag_links( return cre +def export_results_as_spreadsheet( + collection: db.Node_collection, docs: List[defs.Document] +): + prepared_spreadsheet_docs = prepare_spreadsheet(collection=collection, docs=docs) + spreadsheet_url = write_spreadsheet( + title="Export from CRE", docs=prepared_spreadsheet_docs, emails=[] + ) + return {"spreadsheetURL": spreadsheet_url, "status": "ok"} + + @app.route("/rest/v1/id/", methods=["GET"]) @app.route("/rest/v1/name/", methods=["GET"]) @cache.cached(timeout=50) @@ -51,14 +68,20 @@ def find_cre(creid: str = None, crename: str = None) -> Any: # refer include_only = request.args.getlist("include_only") opt_osib = request.args.get("osib") opt_md = request.args.get("format_md") + opt_export = request.args.get("export") cres = database.get_CREs(external_id=creid, name=crename, include_only=include_only) if cres: if len(cres) > 1: logger.error("get by id returned more than one results? This looks buggy") cre = cres[0] result = {"data": cre.todict()} + + if opt_export: + return export_results_as_spreadsheet(collection=database, docs=[cre]) + # disable until we have a consensus on tag behaviour # cre = extend_cre_with_tag_links(cre=cre, collection=database) + if opt_osib: result["osib"] = odefs.cre2osib([cre]).todict() if opt_md: @@ -76,6 +99,7 @@ def find_node_by_name(name: str, ntype: str = defs.Credoctypes.Standard.value) - opt_osib = request.args.get("osib") opt_version = request.args.get("version") opt_mdformat = request.args.get("format_md") + opt_export = request.args.get("export") if opt_section: opt_section = urllib.parse.unquote(opt_section) opt_subsection = request.args.get("subsection") @@ -115,6 +139,10 @@ def find_node_by_name(name: str, ntype: str = defs.Credoctypes.Standard.value) - result["osib"] = odefs.cre2osib(nodes).todict() res = [node.todict() for node in nodes] result["standards"] = res + + if opt_export: + return export_results_as_spreadsheet(collection=database, docs=nodes) + return jsonify(result) else: abort(404) @@ -167,11 +195,14 @@ def text_search() -> Any: database = db.Node_collection() text = request.args.get("text") opt_md = reques.args.get("format_md") + opt_export = request.args.get("export") documents = database.text_search(text) if documents: if opt_md: return mdutils.cre_to_md(documents) res = [doc.todict() for doc in documents] + if opt_export: + return export_results_as_spreadsheet(collection=database, docs=documents) return jsonify(res) else: abort(404)