This commit is contained in:
1018
node_modules/decap-cms-backend-gitlab/src/API.ts
generated
vendored
Normal file
1018
node_modules/decap-cms-backend-gitlab/src/API.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
122
node_modules/decap-cms-backend-gitlab/src/AuthenticationPage.js
generated
vendored
Normal file
122
node_modules/decap-cms-backend-gitlab/src/AuthenticationPage.js
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import styled from '@emotion/styled';
|
||||
import {
|
||||
NetlifyAuthenticator,
|
||||
ImplicitAuthenticator,
|
||||
PkceAuthenticator,
|
||||
} from 'decap-cms-lib-auth';
|
||||
import { AuthenticationPage, Icon } from 'decap-cms-ui-default';
|
||||
|
||||
const LoginButtonIcon = styled(Icon)`
|
||||
margin-right: 18px;
|
||||
`;
|
||||
|
||||
const clientSideAuthenticators = {
|
||||
pkce: ({
|
||||
base_url,
|
||||
auth_endpoint,
|
||||
app_id,
|
||||
auth_token_endpoint}) =>
|
||||
new PkceAuthenticator({
|
||||
base_url,
|
||||
auth_endpoint,
|
||||
app_id,
|
||||
auth_token_endpoint,
|
||||
auth_token_endpoint_content_type: 'application/json; charset=utf-8',
|
||||
}),
|
||||
|
||||
implicit: ({
|
||||
base_url,
|
||||
auth_endpoint,
|
||||
app_id,
|
||||
clearHash }) =>
|
||||
new ImplicitAuthenticator({
|
||||
base_url,
|
||||
auth_endpoint,
|
||||
app_id,
|
||||
clearHash,
|
||||
}),
|
||||
};
|
||||
|
||||
export default class GitLabAuthenticationPage extends React.Component {
|
||||
static propTypes = {
|
||||
onLogin: PropTypes.func.isRequired,
|
||||
inProgress: PropTypes.bool,
|
||||
base_url: PropTypes.string,
|
||||
siteId: PropTypes.string,
|
||||
authEndpoint: PropTypes.string,
|
||||
config: PropTypes.object.isRequired,
|
||||
clearHash: PropTypes.func,
|
||||
t: PropTypes.func.isRequired,
|
||||
};
|
||||
|
||||
state = {};
|
||||
|
||||
componentDidMount() {
|
||||
const {
|
||||
auth_type: authType = '',
|
||||
base_url = 'https://gitlab.com',
|
||||
auth_endpoint = 'oauth/authorize',
|
||||
app_id = '',
|
||||
} = this.props.config.backend;
|
||||
|
||||
if (clientSideAuthenticators[authType]) {
|
||||
this.auth = clientSideAuthenticators[authType]({
|
||||
base_url,
|
||||
auth_endpoint,
|
||||
app_id,
|
||||
auth_token_endpoint: 'oauth/token',
|
||||
clearHash: this.props.clearHash,
|
||||
});
|
||||
// Complete implicit authentication if we were redirected back to from the provider.
|
||||
this.auth.completeAuth((err, data) => {
|
||||
if (err) {
|
||||
this.setState({ loginError: err.toString() });
|
||||
return;
|
||||
}
|
||||
this.props.onLogin(data);
|
||||
});
|
||||
} else {
|
||||
this.auth = new NetlifyAuthenticator({
|
||||
base_url: this.props.base_url,
|
||||
site_id:
|
||||
document.location.host.split(':')[0] === 'localhost'
|
||||
? 'demo.decapcms.org'
|
||||
: this.props.siteId,
|
||||
auth_endpoint: this.props.authEndpoint,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
handleLogin = e => {
|
||||
e.preventDefault();
|
||||
this.auth.authenticate({ provider: 'gitlab', scope: 'api' }, (err, data) => {
|
||||
if (err) {
|
||||
this.setState({ loginError: err.toString() });
|
||||
return;
|
||||
}
|
||||
this.props.onLogin(data);
|
||||
});
|
||||
};
|
||||
|
||||
render() {
|
||||
const { inProgress, config, t } = this.props;
|
||||
return (
|
||||
<AuthenticationPage
|
||||
onLogin={this.handleLogin}
|
||||
loginDisabled={inProgress}
|
||||
loginErrorMessage={this.state.loginError}
|
||||
logoUrl={config.logo_url}
|
||||
siteUrl={config.site_url}
|
||||
renderButtonContent={() => (
|
||||
<React.Fragment>
|
||||
<LoginButtonIcon type="gitlab" />{' '}
|
||||
{inProgress ? t('auth.loggingIn') : t('auth.loginWithGitLab')}
|
||||
</React.Fragment>
|
||||
)}
|
||||
t={t}
|
||||
/>
|
||||
);
|
||||
}
|
||||
}
|
||||
187
node_modules/decap-cms-backend-gitlab/src/__tests__/API.spec.js
generated
vendored
Normal file
187
node_modules/decap-cms-backend-gitlab/src/__tests__/API.spec.js
generated
vendored
Normal file
@@ -0,0 +1,187 @@
|
||||
import API, { getMaxAccess } from '../API';
|
||||
|
||||
global.fetch = jest.fn().mockRejectedValue(new Error('should not call fetch inside tests'));
|
||||
|
||||
jest.spyOn(console, 'log').mockImplementation(() => undefined);
|
||||
|
||||
describe('GitLab API', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('hasWriteAccess', () => {
|
||||
test('should return true on project access_level >= 30', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce({ permissions: { project_access: { access_level: 30 } } });
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
test('should return false on project access_level < 30', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce({ permissions: { project_access: { access_level: 10 } } });
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
});
|
||||
|
||||
test('should return true on group access_level >= 30', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce({ permissions: { group_access: { access_level: 30 } } });
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
test('should return false on group access_level < 30', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce({ permissions: { group_access: { access_level: 10 } } });
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
});
|
||||
|
||||
test('should return true on shared group access_level >= 40', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
api.requestJSON = jest.fn().mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 40 }],
|
||||
});
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(true);
|
||||
|
||||
expect(api.requestJSON).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should return true on shared group access_level >= 30, developers can merge and push', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest.fn();
|
||||
api.requestJSON.mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
|
||||
});
|
||||
api.requestJSON.mockResolvedValueOnce({
|
||||
developers_can_merge: true,
|
||||
developers_can_push: true,
|
||||
});
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
test('should return false on shared group access_level < 30,', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest.fn();
|
||||
api.requestJSON.mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 20 }],
|
||||
});
|
||||
api.requestJSON.mockResolvedValueOnce({
|
||||
developers_can_merge: true,
|
||||
developers_can_push: true,
|
||||
});
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
});
|
||||
|
||||
test("should return false on shared group access_level >= 30, developers can't merge", async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest.fn();
|
||||
api.requestJSON.mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
|
||||
});
|
||||
api.requestJSON.mockResolvedValueOnce({
|
||||
developers_can_merge: false,
|
||||
developers_can_push: true,
|
||||
});
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
});
|
||||
|
||||
test("should return false on shared group access_level >= 30, developers can't push", async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest.fn();
|
||||
api.requestJSON.mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
|
||||
});
|
||||
api.requestJSON.mockResolvedValueOnce({
|
||||
developers_can_merge: true,
|
||||
developers_can_push: false,
|
||||
});
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
});
|
||||
|
||||
test('should return false on shared group access_level >= 30, error getting branch', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
api.requestJSON = jest.fn();
|
||||
api.requestJSON.mockResolvedValueOnce({
|
||||
permissions: { project_access: null, group_access: null },
|
||||
shared_with_groups: [{ group_access_level: 10 }, { group_access_level: 30 }],
|
||||
});
|
||||
|
||||
const error = new Error('Not Found');
|
||||
api.requestJSON.mockRejectedValue(error);
|
||||
|
||||
await expect(api.hasWriteAccess()).resolves.toBe(false);
|
||||
|
||||
expect(console.log).toHaveBeenCalledTimes(1);
|
||||
expect(console.log).toHaveBeenCalledWith('Failed getting default branch', error);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStatuses', () => {
|
||||
test('should get preview statuses', async () => {
|
||||
const api = new API({ repo: 'repo' });
|
||||
|
||||
const mr = { sha: 'sha' };
|
||||
const statuses = [
|
||||
{ name: 'deploy', status: 'success', target_url: 'deploy-url' },
|
||||
{ name: 'build', status: 'pending' },
|
||||
];
|
||||
|
||||
api.getBranchMergeRequest = jest.fn(() => Promise.resolve(mr));
|
||||
api.getMergeRequestStatues = jest.fn(() => Promise.resolve(statuses));
|
||||
|
||||
const collectionName = 'posts';
|
||||
const slug = 'title';
|
||||
await expect(api.getStatuses(collectionName, slug)).resolves.toEqual([
|
||||
{ context: 'deploy', state: 'success', target_url: 'deploy-url' },
|
||||
{ context: 'build', state: 'other' },
|
||||
]);
|
||||
|
||||
expect(api.getBranchMergeRequest).toHaveBeenCalledTimes(1);
|
||||
expect(api.getBranchMergeRequest).toHaveBeenCalledWith('cms/posts/title');
|
||||
|
||||
expect(api.getMergeRequestStatues).toHaveBeenCalledTimes(1);
|
||||
expect(api.getMergeRequestStatues).toHaveBeenCalledWith(mr, 'cms/posts/title');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMaxAccess', () => {
|
||||
it('should return group with max access level', () => {
|
||||
const groups = [
|
||||
{ group_access_level: 10 },
|
||||
{ group_access_level: 5 },
|
||||
{ group_access_level: 100 },
|
||||
{ group_access_level: 1 },
|
||||
];
|
||||
expect(getMaxAccess(groups)).toBe(groups[2]);
|
||||
});
|
||||
});
|
||||
});
|
||||
552
node_modules/decap-cms-backend-gitlab/src/__tests__/gitlab.spec.js
generated
vendored
Normal file
552
node_modules/decap-cms-backend-gitlab/src/__tests__/gitlab.spec.js
generated
vendored
Normal file
@@ -0,0 +1,552 @@
|
||||
jest.mock('decap-cms-core/src/backend');
|
||||
import { fromJS } from 'immutable';
|
||||
import { oneLine, stripIndent } from 'common-tags';
|
||||
import nock from 'nock';
|
||||
import { Cursor } from 'decap-cms-lib-util';
|
||||
|
||||
import Gitlab from '../implementation';
|
||||
import AuthenticationPage from '../AuthenticationPage';
|
||||
|
||||
const { Backend, LocalStorageAuthStore } = jest.requireActual('decap-cms-core/src/backend');
|
||||
|
||||
function generateEntries(path, length) {
|
||||
const entries = Array.from({ length }, (val, idx) => {
|
||||
const count = idx + 1;
|
||||
const id = `00${count}`.slice(-3);
|
||||
const fileName = `test${id}.md`;
|
||||
return { id, fileName, filePath: `${path}/${fileName}` };
|
||||
});
|
||||
|
||||
return {
|
||||
tree: entries.map(({ id, fileName, filePath }) => ({
|
||||
id: `d8345753a1d935fa47a26317a503e73e1192d${id}`,
|
||||
name: fileName,
|
||||
type: 'blob',
|
||||
path: filePath,
|
||||
mode: '100644',
|
||||
})),
|
||||
files: entries.reduce(
|
||||
(acc, { id, filePath }) => ({
|
||||
...acc,
|
||||
[filePath]: stripIndent`
|
||||
---
|
||||
title: test ${id}
|
||||
---
|
||||
# test ${id}
|
||||
`,
|
||||
}),
|
||||
{},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
const manyEntries = generateEntries('many-entries', 500);
|
||||
|
||||
const mockRepo = {
|
||||
tree: {
|
||||
'/': [
|
||||
{
|
||||
id: '5d0620ebdbc92068a3e866866e928cc373f18429',
|
||||
name: 'content',
|
||||
type: 'tree',
|
||||
path: 'content',
|
||||
mode: '040000',
|
||||
},
|
||||
],
|
||||
content: [
|
||||
{
|
||||
id: 'b1a200e48be54fde12b636f9563d659d44c206a5',
|
||||
name: 'test1.md',
|
||||
type: 'blob',
|
||||
path: 'content/test1.md',
|
||||
mode: '100644',
|
||||
},
|
||||
{
|
||||
id: 'd8345753a1d935fa47a26317a503e73e1192d623',
|
||||
name: 'test2.md',
|
||||
type: 'blob',
|
||||
path: 'content/test2.md',
|
||||
mode: '100644',
|
||||
},
|
||||
],
|
||||
'many-entries': manyEntries.tree,
|
||||
},
|
||||
files: {
|
||||
'content/test1.md': stripIndent`
|
||||
---
|
||||
title: test
|
||||
---
|
||||
# test
|
||||
`,
|
||||
'content/test2.md': stripIndent`
|
||||
---
|
||||
title: test2
|
||||
---
|
||||
# test 2
|
||||
`,
|
||||
...manyEntries.files,
|
||||
},
|
||||
};
|
||||
|
||||
const resp = {
|
||||
user: {
|
||||
success: {
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
branch: {
|
||||
success: {
|
||||
name: 'master',
|
||||
commit: {
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
project: {
|
||||
success: {
|
||||
permissions: {
|
||||
project_access: {
|
||||
access_level: 30,
|
||||
},
|
||||
},
|
||||
default_branch: 'main',
|
||||
},
|
||||
readOnly: {
|
||||
permissions: {
|
||||
project_access: {
|
||||
access_level: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
describe('gitlab backend', () => {
|
||||
let authStore;
|
||||
let backend;
|
||||
const repo = 'foo/bar';
|
||||
const defaultConfig = {
|
||||
backend: {
|
||||
name: 'gitlab',
|
||||
repo,
|
||||
},
|
||||
};
|
||||
const collectionContentConfig = {
|
||||
name: 'foo',
|
||||
folder: 'content',
|
||||
fields: [{ name: 'title' }],
|
||||
// TODO: folder_based_collection is an internal string, we should not
|
||||
// be depending on it here
|
||||
type: 'folder_based_collection',
|
||||
};
|
||||
const collectionManyEntriesConfig = {
|
||||
name: 'foo',
|
||||
folder: 'many-entries',
|
||||
fields: [{ name: 'title' }],
|
||||
// TODO: folder_based_collection is an internal string, we should not
|
||||
// be depending on it here
|
||||
type: 'folder_based_collection',
|
||||
};
|
||||
const collectionFilesConfig = {
|
||||
name: 'foo',
|
||||
files: [
|
||||
{
|
||||
label: 'foo',
|
||||
name: 'foo',
|
||||
file: 'content/test1.md',
|
||||
fields: [{ name: 'title' }],
|
||||
},
|
||||
{
|
||||
label: 'bar',
|
||||
name: 'bar',
|
||||
file: 'content/test2.md',
|
||||
fields: [{ name: 'title' }],
|
||||
},
|
||||
],
|
||||
type: 'file_based_collection',
|
||||
};
|
||||
const mockCredentials = { token: 'MOCK_TOKEN' };
|
||||
const expectedRepo = encodeURIComponent(repo);
|
||||
const expectedRepoUrl = `/projects/${expectedRepo}`;
|
||||
|
||||
function resolveBackend(config = {}) {
|
||||
authStore = new LocalStorageAuthStore();
|
||||
return new Backend(
|
||||
{
|
||||
init: (...args) => new Gitlab(...args),
|
||||
},
|
||||
{
|
||||
backendName: 'gitlab',
|
||||
config,
|
||||
authStore,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
function mockApi(backend) {
|
||||
return nock(backend.implementation.apiRoot);
|
||||
}
|
||||
|
||||
function interceptAuth(backend, { userResponse, projectResponse } = {}) {
|
||||
const api = mockApi(backend);
|
||||
api
|
||||
.get('/user')
|
||||
.query(true)
|
||||
.reply(200, userResponse || resp.user.success);
|
||||
|
||||
api
|
||||
// The `authenticate` method of the API class from netlify-cms-backend-gitlab
|
||||
// calls the same endpoint twice for gettng a single project.
|
||||
// First time through `this.api.hasWriteAccess()
|
||||
// Second time through the method `getDefaultBranchName` from lib-util
|
||||
// As a result, we need to repeat the same response twice.
|
||||
// Otherwise, we'll get an error: "No match for request to
|
||||
// https://gitlab.com/api/v4"
|
||||
|
||||
.get(expectedRepoUrl)
|
||||
.times(2)
|
||||
.query(true)
|
||||
.reply(200, projectResponse || resp.project.success);
|
||||
}
|
||||
|
||||
function interceptBranch(backend, { branch = 'master' } = {}) {
|
||||
const api = mockApi(backend);
|
||||
api
|
||||
.get(`${expectedRepoUrl}/repository/branches/${encodeURIComponent(branch)}`)
|
||||
.query(true)
|
||||
.reply(200, resp.branch.success);
|
||||
}
|
||||
|
||||
function parseQuery(uri) {
|
||||
const query = uri.split('?')[1];
|
||||
if (!query) {
|
||||
return {};
|
||||
}
|
||||
return query.split('&').reduce((acc, q) => {
|
||||
const [key, value] = q.split('=');
|
||||
acc[key] = value;
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
function createHeaders(backend, { basePath, path, page, perPage, pageCount, totalCount }) {
|
||||
const pageNum = parseInt(page, 10);
|
||||
const pageCountNum = parseInt(pageCount, 10);
|
||||
const url = `${backend.implementation.apiRoot}${basePath}`;
|
||||
|
||||
function link(linkPage) {
|
||||
return `<${url}?id=${expectedRepo}&page=${linkPage}&path=${path}&per_page=${perPage}&recursive=false>`;
|
||||
}
|
||||
|
||||
const linkHeader = oneLine`
|
||||
${link(1)}; rel="first",
|
||||
${link(pageCount)}; rel="last",
|
||||
${pageNum === 1 ? '' : `${link(pageNum - 1)}; rel="prev",`}
|
||||
${pageNum === pageCountNum ? '' : `${link(pageNum + 1)}; rel="next",`}
|
||||
`.slice(0, -1);
|
||||
|
||||
return {
|
||||
'X-Page': page,
|
||||
'X-Total-Pages': pageCount,
|
||||
'X-Per-Page': perPage,
|
||||
'X-Total': totalCount,
|
||||
Link: linkHeader,
|
||||
};
|
||||
}
|
||||
|
||||
function interceptCollection(
|
||||
backend,
|
||||
collection,
|
||||
{ verb = 'get', repeat = 1, page: expectedPage } = {},
|
||||
) {
|
||||
const api = mockApi(backend);
|
||||
const url = `${expectedRepoUrl}/repository/tree`;
|
||||
const { folder } = collection;
|
||||
const tree = mockRepo.tree[folder];
|
||||
api[verb](url)
|
||||
.query(({ path, page }) => {
|
||||
if (path !== folder) {
|
||||
return false;
|
||||
}
|
||||
if (expectedPage && page && parseInt(page, 10) !== parseInt(expectedPage, 10)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.times(repeat)
|
||||
.reply(uri => {
|
||||
const { page = 1, per_page = 20 } = parseQuery(uri);
|
||||
const pageCount = tree.length <= per_page ? 1 : Math.round(tree.length / per_page);
|
||||
const pageLastIndex = page * per_page;
|
||||
const pageFirstIndex = pageLastIndex - per_page;
|
||||
const resp = tree.slice(pageFirstIndex, pageLastIndex);
|
||||
return [
|
||||
200,
|
||||
verb === 'head' ? null : resp,
|
||||
createHeaders(backend, {
|
||||
basePath: url,
|
||||
path: folder,
|
||||
page,
|
||||
perPage: per_page,
|
||||
pageCount,
|
||||
totalCount: tree.length,
|
||||
}),
|
||||
];
|
||||
});
|
||||
}
|
||||
|
||||
function interceptFiles(backend, path) {
|
||||
const api = mockApi(backend);
|
||||
const url = `${expectedRepoUrl}/repository/files/${encodeURIComponent(path)}/raw`;
|
||||
api.get(url).query(true).reply(200, mockRepo.files[path]);
|
||||
|
||||
api
|
||||
.get(`${expectedRepoUrl}/repository/commits`)
|
||||
.query(({ path }) => path === path)
|
||||
.reply(200, [
|
||||
{
|
||||
author_name: 'author_name',
|
||||
author_email: 'author_email',
|
||||
authored_date: 'authored_date',
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
function sharedSetup() {
|
||||
beforeEach(async () => {
|
||||
backend = resolveBackend(defaultConfig);
|
||||
interceptAuth(backend);
|
||||
await backend.authenticate(mockCredentials);
|
||||
interceptCollection(backend, collectionManyEntriesConfig, { verb: 'head' });
|
||||
interceptCollection(backend, collectionContentConfig, { verb: 'head' });
|
||||
});
|
||||
}
|
||||
|
||||
it('throws if configuration does not include repo', () => {
|
||||
expect(() => resolveBackend({ backend: {} })).toThrowErrorMatchingInlineSnapshot(
|
||||
`"The GitLab backend needs a \\"repo\\" in the backend configuration."`,
|
||||
);
|
||||
});
|
||||
|
||||
describe('authComponent', () => {
|
||||
it('returns authentication page component', () => {
|
||||
backend = resolveBackend(defaultConfig);
|
||||
expect(backend.authComponent()).toEqual(AuthenticationPage);
|
||||
});
|
||||
});
|
||||
|
||||
describe('authenticate', () => {
|
||||
it('throws if user does not have access to project', async () => {
|
||||
backend = resolveBackend(defaultConfig);
|
||||
interceptAuth(backend, { projectResponse: resp.project.readOnly });
|
||||
await expect(
|
||||
backend.authenticate(mockCredentials),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Your GitLab user account does not have access to this repo."`,
|
||||
);
|
||||
});
|
||||
|
||||
it('stores and returns user object on success', async () => {
|
||||
const backendName = defaultConfig.backend.name;
|
||||
backend = resolveBackend(defaultConfig);
|
||||
interceptAuth(backend);
|
||||
const user = await backend.authenticate(mockCredentials);
|
||||
expect(authStore.retrieve()).toEqual(user);
|
||||
expect(user).toEqual({ ...resp.user.success, ...mockCredentials, backendName });
|
||||
});
|
||||
});
|
||||
|
||||
describe('currentUser', () => {
|
||||
it('returns null if no user', async () => {
|
||||
backend = resolveBackend(defaultConfig);
|
||||
const user = await backend.currentUser();
|
||||
expect(user).toEqual(null);
|
||||
});
|
||||
|
||||
it('returns the stored user if exists', async () => {
|
||||
const backendName = defaultConfig.backend.name;
|
||||
backend = resolveBackend(defaultConfig);
|
||||
interceptAuth(backend);
|
||||
await backend.authenticate(mockCredentials);
|
||||
const user = await backend.currentUser();
|
||||
expect(user).toEqual({ ...resp.user.success, ...mockCredentials, backendName });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getToken', () => {
|
||||
it('returns the token for the current user', async () => {
|
||||
backend = resolveBackend(defaultConfig);
|
||||
interceptAuth(backend);
|
||||
await backend.authenticate(mockCredentials);
|
||||
const token = await backend.getToken();
|
||||
expect(token).toEqual(mockCredentials.token);
|
||||
});
|
||||
});
|
||||
|
||||
describe('logout', () => {
|
||||
it('sets token to null', async () => {
|
||||
backend = resolveBackend(defaultConfig);
|
||||
interceptAuth(backend);
|
||||
await backend.authenticate(mockCredentials);
|
||||
await backend.logout();
|
||||
const token = await backend.getToken();
|
||||
expect(token).toEqual(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEntry', () => {
|
||||
sharedSetup();
|
||||
|
||||
it('returns an entry from folder collection', async () => {
|
||||
const entryTree = mockRepo.tree[collectionContentConfig.folder][0];
|
||||
const slug = entryTree.path.split('/').pop().replace('.md', '');
|
||||
|
||||
interceptFiles(backend, entryTree.path);
|
||||
interceptCollection(backend, collectionContentConfig);
|
||||
|
||||
const entry = await backend.getEntry(
|
||||
{
|
||||
config: {},
|
||||
integrations: fromJS([]),
|
||||
entryDraft: fromJS({}),
|
||||
mediaLibrary: fromJS({}),
|
||||
},
|
||||
fromJS(collectionContentConfig),
|
||||
slug,
|
||||
);
|
||||
|
||||
expect(entry).toEqual(expect.objectContaining({ path: entryTree.path }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('listEntries', () => {
|
||||
sharedSetup();
|
||||
|
||||
it('returns entries from folder collection', async () => {
|
||||
const tree = mockRepo.tree[collectionContentConfig.folder];
|
||||
tree.forEach(file => interceptFiles(backend, file.path));
|
||||
|
||||
interceptCollection(backend, collectionContentConfig);
|
||||
const entries = await backend.listEntries(fromJS(collectionContentConfig));
|
||||
|
||||
expect(entries).toEqual({
|
||||
cursor: expect.any(Cursor),
|
||||
pagination: 1,
|
||||
entries: expect.arrayContaining(
|
||||
tree.map(file => expect.objectContaining({ path: file.path })),
|
||||
),
|
||||
});
|
||||
expect(entries.entries).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('returns all entries from folder collection', async () => {
|
||||
const tree = mockRepo.tree[collectionManyEntriesConfig.folder];
|
||||
interceptBranch(backend);
|
||||
tree.forEach(file => interceptFiles(backend, file.path));
|
||||
|
||||
interceptCollection(backend, collectionManyEntriesConfig, { repeat: 5 });
|
||||
const entries = await backend.listAllEntries(fromJS(collectionManyEntriesConfig));
|
||||
|
||||
expect(entries).toEqual(
|
||||
expect.arrayContaining(tree.map(file => expect.objectContaining({ path: file.path }))),
|
||||
);
|
||||
expect(entries).toHaveLength(500);
|
||||
}, 7000);
|
||||
|
||||
it('returns entries from file collection', async () => {
|
||||
const { files } = collectionFilesConfig;
|
||||
files.forEach(file => interceptFiles(backend, file.file));
|
||||
const entries = await backend.listEntries(fromJS(collectionFilesConfig));
|
||||
|
||||
expect(entries).toEqual({
|
||||
cursor: expect.any(Cursor),
|
||||
entries: expect.arrayContaining(
|
||||
files.map(file => expect.objectContaining({ path: file.file })),
|
||||
),
|
||||
});
|
||||
expect(entries.entries).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('returns first page from paginated folder collection tree', async () => {
|
||||
const tree = mockRepo.tree[collectionManyEntriesConfig.folder];
|
||||
const pageTree = tree.slice(0, 20);
|
||||
pageTree.forEach(file => interceptFiles(backend, file.path));
|
||||
interceptCollection(backend, collectionManyEntriesConfig, { page: 1 });
|
||||
const entries = await backend.listEntries(fromJS(collectionManyEntriesConfig));
|
||||
|
||||
expect(entries.entries).toEqual(
|
||||
expect.arrayContaining(pageTree.map(file => expect.objectContaining({ path: file.path }))),
|
||||
);
|
||||
expect(entries.entries).toHaveLength(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe('traverseCursor', () => {
|
||||
sharedSetup();
|
||||
|
||||
it('returns complete last page of paginated tree', async () => {
|
||||
const tree = mockRepo.tree[collectionManyEntriesConfig.folder];
|
||||
tree.slice(0, 20).forEach(file => interceptFiles(backend, file.path));
|
||||
interceptCollection(backend, collectionManyEntriesConfig, { page: 1 });
|
||||
const entries = await backend.listEntries(fromJS(collectionManyEntriesConfig));
|
||||
|
||||
const nextPageTree = tree.slice(20, 40);
|
||||
nextPageTree.forEach(file => interceptFiles(backend, file.path));
|
||||
interceptCollection(backend, collectionManyEntriesConfig, { page: 2 });
|
||||
const nextPage = await backend.traverseCursor(entries.cursor, 'next');
|
||||
|
||||
expect(nextPage.entries).toEqual(
|
||||
expect.arrayContaining(
|
||||
nextPageTree.map(file => expect.objectContaining({ path: file.path })),
|
||||
),
|
||||
);
|
||||
expect(nextPage.entries).toHaveLength(20);
|
||||
|
||||
const lastPageTree = tree.slice(-20);
|
||||
lastPageTree.forEach(file => interceptFiles(backend, file.path));
|
||||
interceptCollection(backend, collectionManyEntriesConfig, { page: 25 });
|
||||
const lastPage = await backend.traverseCursor(nextPage.cursor, 'last');
|
||||
expect(lastPage.entries).toEqual(
|
||||
expect.arrayContaining(
|
||||
lastPageTree.map(file => expect.objectContaining({ path: file.path })),
|
||||
),
|
||||
);
|
||||
expect(lastPage.entries).toHaveLength(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe('filterFile', () => {
|
||||
it('should return true for nested file with matching depth', () => {
|
||||
backend = resolveBackend(defaultConfig);
|
||||
|
||||
expect(
|
||||
backend.implementation.filterFile(
|
||||
'content/posts',
|
||||
{ name: 'index.md', path: 'content/posts/dir1/dir2/index.md' },
|
||||
'md',
|
||||
3,
|
||||
),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for nested file with non matching depth', () => {
|
||||
backend = resolveBackend(defaultConfig);
|
||||
|
||||
expect(
|
||||
backend.implementation.filterFile(
|
||||
'content/posts',
|
||||
{ name: 'index.md', path: 'content/posts/dir1/dir2/index.md' },
|
||||
'md',
|
||||
2,
|
||||
),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll();
|
||||
authStore.logout();
|
||||
backend = null;
|
||||
expect(authStore.retrieve()).toEqual(null);
|
||||
});
|
||||
});
|
||||
470
node_modules/decap-cms-backend-gitlab/src/implementation.ts
generated
vendored
Normal file
470
node_modules/decap-cms-backend-gitlab/src/implementation.ts
generated
vendored
Normal file
@@ -0,0 +1,470 @@
|
||||
import trimStart from 'lodash/trimStart';
|
||||
import semaphore from 'semaphore';
|
||||
import { trim } from 'lodash';
|
||||
import { stripIndent } from 'common-tags';
|
||||
import {
|
||||
CURSOR_COMPATIBILITY_SYMBOL,
|
||||
basename,
|
||||
entriesByFolder,
|
||||
entriesByFiles,
|
||||
getMediaDisplayURL,
|
||||
getMediaAsBlob,
|
||||
unpublishedEntries,
|
||||
getPreviewStatus,
|
||||
asyncLock,
|
||||
runWithLock,
|
||||
getBlobSHA,
|
||||
blobToFileObj,
|
||||
contentKeyFromBranch,
|
||||
generateContentKey,
|
||||
localForage,
|
||||
allEntriesByFolder,
|
||||
filterByExtension,
|
||||
branchFromContentKey,
|
||||
getDefaultBranchName,
|
||||
} from 'decap-cms-lib-util';
|
||||
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
import API, { API_NAME } from './API';
|
||||
|
||||
import type {
|
||||
Entry,
|
||||
AssetProxy,
|
||||
PersistOptions,
|
||||
Cursor,
|
||||
Implementation,
|
||||
DisplayURL,
|
||||
User,
|
||||
Credentials,
|
||||
Config,
|
||||
ImplementationFile,
|
||||
UnpublishedEntryMediaFile,
|
||||
AsyncLock,
|
||||
} from 'decap-cms-lib-util';
|
||||
import type { Semaphore } from 'semaphore';
|
||||
|
||||
const MAX_CONCURRENT_DOWNLOADS = 10;
|
||||
|
||||
export default class GitLab implements Implementation {
|
||||
lock: AsyncLock;
|
||||
api: API | null;
|
||||
options: {
|
||||
proxied: boolean;
|
||||
API: API | null;
|
||||
initialWorkflowStatus: string;
|
||||
};
|
||||
repo: string;
|
||||
isBranchConfigured: boolean;
|
||||
branch: string;
|
||||
apiRoot: string;
|
||||
token: string | null;
|
||||
squashMerges: boolean;
|
||||
cmsLabelPrefix: string;
|
||||
mediaFolder: string;
|
||||
previewContext: string;
|
||||
useGraphQL: boolean;
|
||||
graphQLAPIRoot: string;
|
||||
|
||||
_mediaDisplayURLSem?: Semaphore;
|
||||
|
||||
constructor(config: Config, options = {}) {
|
||||
this.options = {
|
||||
proxied: false,
|
||||
API: null,
|
||||
initialWorkflowStatus: '',
|
||||
...options,
|
||||
};
|
||||
|
||||
if (
|
||||
!this.options.proxied &&
|
||||
(config.backend.repo === null || config.backend.repo === undefined)
|
||||
) {
|
||||
throw new Error('The GitLab backend needs a "repo" in the backend configuration.');
|
||||
}
|
||||
|
||||
this.api = this.options.API || null;
|
||||
|
||||
this.repo = config.backend.repo || '';
|
||||
this.branch = config.backend.branch || 'master';
|
||||
this.isBranchConfigured = config.backend.branch ? true : false;
|
||||
this.apiRoot = config.backend.api_root || 'https://gitlab.com/api/v4';
|
||||
this.token = '';
|
||||
this.squashMerges = config.backend.squash_merges || false;
|
||||
this.cmsLabelPrefix = config.backend.cms_label_prefix || '';
|
||||
this.mediaFolder = config.media_folder;
|
||||
this.previewContext = config.backend.preview_context || '';
|
||||
this.useGraphQL = config.backend.use_graphql || false;
|
||||
this.graphQLAPIRoot = config.backend.graphql_api_root || 'https://gitlab.com/api/graphql';
|
||||
this.lock = asyncLock();
|
||||
}
|
||||
|
||||
isGitBackend() {
|
||||
return true;
|
||||
}
|
||||
|
||||
async status() {
|
||||
const auth =
|
||||
(await this.api
|
||||
?.user()
|
||||
.then(user => !!user)
|
||||
.catch(e => {
|
||||
console.warn('Failed getting GitLab user', e);
|
||||
return false;
|
||||
})) || false;
|
||||
|
||||
return { auth: { status: auth }, api: { status: true, statusPage: '' } };
|
||||
}
|
||||
|
||||
authComponent() {
|
||||
return AuthenticationPage;
|
||||
}
|
||||
|
||||
restoreUser(user: User) {
|
||||
return this.authenticate(user);
|
||||
}
|
||||
|
||||
async authenticate(state: Credentials) {
|
||||
this.token = state.token as string;
|
||||
this.api = new API({
|
||||
token: this.token,
|
||||
branch: this.branch,
|
||||
repo: this.repo,
|
||||
apiRoot: this.apiRoot,
|
||||
squashMerges: this.squashMerges,
|
||||
cmsLabelPrefix: this.cmsLabelPrefix,
|
||||
initialWorkflowStatus: this.options.initialWorkflowStatus,
|
||||
useGraphQL: this.useGraphQL,
|
||||
graphQLAPIRoot: this.graphQLAPIRoot,
|
||||
});
|
||||
const user = await this.api.user();
|
||||
const isCollab = await this.api.hasWriteAccess().catch((error: Error) => {
|
||||
error.message = stripIndent`
|
||||
Repo "${this.repo}" not found.
|
||||
|
||||
Please ensure the repo information is spelled correctly.
|
||||
|
||||
If the repo is private, make sure you're logged into a GitLab account with access.
|
||||
`;
|
||||
throw error;
|
||||
});
|
||||
|
||||
// Unauthorized user
|
||||
if (!isCollab) {
|
||||
throw new Error('Your GitLab user account does not have access to this repo.');
|
||||
}
|
||||
|
||||
if (!this.isBranchConfigured) {
|
||||
const defaultBranchName = await getDefaultBranchName({
|
||||
backend: 'gitlab',
|
||||
repo: this.repo,
|
||||
token: this.token,
|
||||
apiRoot: this.apiRoot,
|
||||
});
|
||||
if (defaultBranchName) {
|
||||
this.branch = defaultBranchName;
|
||||
}
|
||||
}
|
||||
// Authorized user
|
||||
return { ...user, login: user.username, token: state.token as string };
|
||||
}
|
||||
|
||||
async logout() {
|
||||
this.token = null;
|
||||
return;
|
||||
}
|
||||
|
||||
getToken() {
|
||||
return Promise.resolve(this.token);
|
||||
}
|
||||
|
||||
filterFile(
|
||||
folder: string,
|
||||
file: { path: string; name: string },
|
||||
extension: string,
|
||||
depth: number,
|
||||
) {
|
||||
// gitlab paths include the root folder
|
||||
const fileFolder = trim(file.path.split(folder)[1] || '/', '/');
|
||||
return filterByExtension(file, extension) && fileFolder.split('/').length <= depth;
|
||||
}
|
||||
|
||||
async entriesByFolder(folder: string, extension: string, depth: number) {
|
||||
let cursor: Cursor;
|
||||
|
||||
const listFiles = () =>
|
||||
this.api!.listFiles(folder, depth > 1).then(({ files, cursor: c }) => {
|
||||
cursor = c.mergeMeta({ folder, extension, depth });
|
||||
return files.filter(file => this.filterFile(folder, file, extension, depth));
|
||||
});
|
||||
|
||||
const files = await entriesByFolder(
|
||||
listFiles,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this.api!.readFileMetadata.bind(this.api),
|
||||
API_NAME,
|
||||
);
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
files[CURSOR_COMPATIBILITY_SYMBOL] = cursor;
|
||||
return files;
|
||||
}
|
||||
|
||||
async listAllFiles(folder: string, extension: string, depth: number) {
|
||||
const files = await this.api!.listAllFiles(folder, depth > 1);
|
||||
const filtered = files.filter(file => this.filterFile(folder, file, extension, depth));
|
||||
return filtered;
|
||||
}
|
||||
|
||||
async allEntriesByFolder(folder: string, extension: string, depth: number) {
|
||||
const files = await allEntriesByFolder({
|
||||
listAllFiles: () => this.listAllFiles(folder, extension, depth),
|
||||
readFile: this.api!.readFile.bind(this.api!),
|
||||
readFileMetadata: this.api!.readFileMetadata.bind(this.api),
|
||||
apiName: API_NAME,
|
||||
branch: this.branch,
|
||||
localForage,
|
||||
folder,
|
||||
extension,
|
||||
depth,
|
||||
getDefaultBranch: () =>
|
||||
this.api!.getDefaultBranch().then(b => ({ name: b.name, sha: b.commit.id })),
|
||||
isShaExistsInBranch: this.api!.isShaExistsInBranch.bind(this.api!),
|
||||
getDifferences: (to, from) => this.api!.getDifferences(to, from),
|
||||
getFileId: path => this.api!.getFileId(path, this.branch),
|
||||
filterFile: file => this.filterFile(folder, file, extension, depth),
|
||||
customFetch: this.useGraphQL ? files => this.api!.readFilesGraphQL(files) : undefined,
|
||||
});
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
entriesByFiles(files: ImplementationFile[]) {
|
||||
return entriesByFiles(
|
||||
files,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this.api!.readFileMetadata.bind(this.api),
|
||||
API_NAME,
|
||||
);
|
||||
}
|
||||
|
||||
// Fetches a single entry.
|
||||
getEntry(path: string) {
|
||||
return this.api!.readFile(path).then(data => ({
|
||||
file: { path, id: null },
|
||||
data: data as string,
|
||||
}));
|
||||
}
|
||||
|
||||
getMedia(mediaFolder = this.mediaFolder) {
|
||||
return this.api!.listAllFiles(mediaFolder).then(files =>
|
||||
files.map(({ id, name, path }) => {
|
||||
return { id, name, path, displayURL: { id, name, path } };
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
getMediaDisplayURL(displayURL: DisplayURL) {
|
||||
this._mediaDisplayURLSem = this._mediaDisplayURLSem || semaphore(MAX_CONCURRENT_DOWNLOADS);
|
||||
return getMediaDisplayURL(
|
||||
displayURL,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this._mediaDisplayURLSem,
|
||||
);
|
||||
}
|
||||
|
||||
async getMediaFile(path: string) {
|
||||
const name = basename(path);
|
||||
const blob = await getMediaAsBlob(path, null, this.api!.readFile.bind(this.api!));
|
||||
const fileObj = blobToFileObj(name, blob);
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
const id = await getBlobSHA(blob);
|
||||
|
||||
return {
|
||||
id,
|
||||
displayURL: url,
|
||||
path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
};
|
||||
}
|
||||
|
||||
async persistEntry(entry: Entry, options: PersistOptions) {
|
||||
// persistEntry is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
() => this.api!.persistFiles(entry.dataFiles, entry.assets, options),
|
||||
'Failed to acquire persist entry lock',
|
||||
);
|
||||
}
|
||||
|
||||
async persistMedia(mediaFile: AssetProxy, options: PersistOptions) {
|
||||
const fileObj = mediaFile.fileObj as File;
|
||||
|
||||
const [id] = await Promise.all([
|
||||
getBlobSHA(fileObj),
|
||||
this.api!.persistFiles([], [mediaFile], options),
|
||||
]);
|
||||
|
||||
const { path } = mediaFile;
|
||||
const url = URL.createObjectURL(fileObj);
|
||||
|
||||
return {
|
||||
displayURL: url,
|
||||
path: trimStart(path, '/'),
|
||||
name: fileObj!.name,
|
||||
size: fileObj!.size,
|
||||
file: fileObj,
|
||||
url,
|
||||
id,
|
||||
};
|
||||
}
|
||||
|
||||
deleteFiles(paths: string[], commitMessage: string) {
|
||||
return this.api!.deleteFiles(paths, commitMessage);
|
||||
}
|
||||
|
||||
traverseCursor(cursor: Cursor, action: string) {
|
||||
return this.api!.traverseCursor(cursor, action).then(async ({ entries, cursor: newCursor }) => {
|
||||
const [folder, depth, extension] = [
|
||||
cursor.meta?.get('folder') as string,
|
||||
cursor.meta?.get('depth') as number,
|
||||
cursor.meta?.get('extension') as string,
|
||||
];
|
||||
if (folder && depth && extension) {
|
||||
entries = entries.filter(f => this.filterFile(folder, f, extension, depth));
|
||||
newCursor = newCursor.mergeMeta({ folder, extension, depth });
|
||||
}
|
||||
const entriesWithData = await entriesByFiles(
|
||||
entries,
|
||||
this.api!.readFile.bind(this.api!),
|
||||
this.api!.readFileMetadata.bind(this.api)!,
|
||||
API_NAME,
|
||||
);
|
||||
return {
|
||||
entries: entriesWithData,
|
||||
cursor: newCursor,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
loadMediaFile(branch: string, file: UnpublishedEntryMediaFile) {
|
||||
const readFile = (
|
||||
path: string,
|
||||
id: string | null | undefined,
|
||||
{ parseText }: { parseText: boolean },
|
||||
) => this.api!.readFile(path, id, { branch, parseText });
|
||||
|
||||
return getMediaAsBlob(file.path, null, readFile).then(blob => {
|
||||
const name = basename(file.path);
|
||||
const fileObj = blobToFileObj(name, blob);
|
||||
return {
|
||||
id: file.path,
|
||||
displayURL: URL.createObjectURL(fileObj),
|
||||
path: file.path,
|
||||
name,
|
||||
size: fileObj.size,
|
||||
file: fileObj,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async loadEntryMediaFiles(branch: string, files: UnpublishedEntryMediaFile[]) {
|
||||
const mediaFiles = await Promise.all(files.map(file => this.loadMediaFile(branch, file)));
|
||||
|
||||
return mediaFiles;
|
||||
}
|
||||
|
||||
async unpublishedEntries() {
|
||||
const listEntriesKeys = () =>
|
||||
this.api!.listUnpublishedBranches().then(branches =>
|
||||
branches.map(branch => contentKeyFromBranch(branch)),
|
||||
);
|
||||
|
||||
const ids = await unpublishedEntries(listEntriesKeys);
|
||||
return ids;
|
||||
}
|
||||
|
||||
async unpublishedEntry({
|
||||
id,
|
||||
collection,
|
||||
slug,
|
||||
}: {
|
||||
id?: string;
|
||||
collection?: string;
|
||||
slug?: string;
|
||||
}) {
|
||||
if (id) {
|
||||
const data = await this.api!.retrieveUnpublishedEntryData(id);
|
||||
return data;
|
||||
} else if (collection && slug) {
|
||||
const entryId = generateContentKey(collection, slug);
|
||||
const data = await this.api!.retrieveUnpublishedEntryData(entryId);
|
||||
return data;
|
||||
} else {
|
||||
throw new Error('Missing unpublished entry id or collection and slug');
|
||||
}
|
||||
}
|
||||
|
||||
getBranch(collection: string, slug: string) {
|
||||
const contentKey = generateContentKey(collection, slug);
|
||||
const branch = branchFromContentKey(contentKey);
|
||||
return branch;
|
||||
}
|
||||
|
||||
async unpublishedEntryDataFile(collection: string, slug: string, path: string, id: string) {
|
||||
const branch = this.getBranch(collection, slug);
|
||||
const data = (await this.api!.readFile(path, id, { branch })) as string;
|
||||
return data;
|
||||
}
|
||||
|
||||
async unpublishedEntryMediaFile(collection: string, slug: string, path: string, id: string) {
|
||||
const branch = this.getBranch(collection, slug);
|
||||
const mediaFile = await this.loadMediaFile(branch, { path, id });
|
||||
return mediaFile;
|
||||
}
|
||||
|
||||
async updateUnpublishedEntryStatus(collection: string, slug: string, newStatus: string) {
|
||||
// updateUnpublishedEntryStatus is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
() => this.api!.updateUnpublishedEntryStatus(collection, slug, newStatus),
|
||||
'Failed to acquire update entry status lock',
|
||||
);
|
||||
}
|
||||
|
||||
async deleteUnpublishedEntry(collection: string, slug: string) {
|
||||
// deleteUnpublishedEntry is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
() => this.api!.deleteUnpublishedEntry(collection, slug),
|
||||
'Failed to acquire delete entry lock',
|
||||
);
|
||||
}
|
||||
|
||||
async publishUnpublishedEntry(collection: string, slug: string) {
|
||||
// publishUnpublishedEntry is a transactional operation
|
||||
return runWithLock(
|
||||
this.lock,
|
||||
() => this.api!.publishUnpublishedEntry(collection, slug),
|
||||
'Failed to acquire publish entry lock',
|
||||
);
|
||||
}
|
||||
|
||||
async getDeployPreview(collection: string, slug: string) {
|
||||
try {
|
||||
const statuses = await this.api!.getStatuses(collection, slug);
|
||||
const deployStatus = getPreviewStatus(statuses, this.previewContext);
|
||||
|
||||
if (deployStatus) {
|
||||
const { target_url: url, state } = deployStatus;
|
||||
return { url, status: state };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
10
node_modules/decap-cms-backend-gitlab/src/index.ts
generated
vendored
Normal file
10
node_modules/decap-cms-backend-gitlab/src/index.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import GitLabBackend from './implementation';
|
||||
import API from './API';
|
||||
import AuthenticationPage from './AuthenticationPage';
|
||||
|
||||
export const DecapCmsBackendGitlab = {
|
||||
GitLabBackend,
|
||||
API,
|
||||
AuthenticationPage,
|
||||
};
|
||||
export { GitLabBackend, API, AuthenticationPage };
|
||||
73
node_modules/decap-cms-backend-gitlab/src/queries.ts
generated
vendored
Normal file
73
node_modules/decap-cms-backend-gitlab/src/queries.ts
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
import { gql } from 'graphql-tag';
|
||||
import { oneLine } from 'common-tags';
|
||||
|
||||
export const files = gql`
|
||||
query files($repo: ID!, $branch: String!, $path: String!, $recursive: Boolean!, $cursor: String) {
|
||||
project(fullPath: $repo) {
|
||||
repository {
|
||||
tree(ref: $branch, path: $path, recursive: $recursive) {
|
||||
blobs(after: $cursor) {
|
||||
nodes {
|
||||
type
|
||||
id: sha
|
||||
path
|
||||
name
|
||||
}
|
||||
pageInfo {
|
||||
endCursor
|
||||
hasNextPage
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const blobs = gql`
|
||||
query blobs($repo: ID!, $branch: String!, $paths: [String!]!) {
|
||||
project(fullPath: $repo) {
|
||||
repository {
|
||||
blobs(ref: $branch, paths: $paths) {
|
||||
nodes {
|
||||
id
|
||||
data: rawBlob
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export function lastCommits(paths: string[]) {
|
||||
const tree = paths
|
||||
.map(
|
||||
(path, index) => oneLine`
|
||||
tree${index}: tree(ref: $branch, path: "${path}") {
|
||||
lastCommit {
|
||||
authorName
|
||||
authoredDate
|
||||
author {
|
||||
id
|
||||
username
|
||||
name
|
||||
publicEmail
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
)
|
||||
.join('\n');
|
||||
|
||||
const query = gql`
|
||||
query lastCommits($repo: ID!, $branch: String!) {
|
||||
project(fullPath: $repo) {
|
||||
repository {
|
||||
${tree}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
return query;
|
||||
}
|
||||
Reference in New Issue
Block a user