Skip to content

Commit

Permalink
Merge pull request #1640 from serlo/staging
Browse files Browse the repository at this point in the history
Deployment
  • Loading branch information
hugotiburtino authored Jul 5, 2024
2 parents dc9bf3d + 5973328 commit b3db9b1
Show file tree
Hide file tree
Showing 25 changed files with 407 additions and 68 deletions.
1 change: 1 addition & 0 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ SERVER_KRATOS_DB_URI=postgres://serlo:secret@localhost:5432/kratos?sslmode=disab
SERVER_SERLO_CLOUDFLARE_WORKER_SECRET=api.serlo.org-playground-secret
SERVER_SERLO_CACHE_WORKER_SECRET=api.serlo.org-cache-worker-secret
SERVER_SERLO_NOTIFICATION_EMAIL_SERVICE_SECRET=api.serlo.org-notification-email-service-secret
SERVER_SERLO_EDITOR_TESTING_SECRET=api.serlo.org-serlo-editor-testing-secret
SERVER_SWR_QUEUE_DASHBOARD_PASSWORD=secret
SERVER_SWR_QUEUE_DASHBOARD_USERNAME=secret

Expand Down
17 changes: 17 additions & 0 deletions .github/workflows/push-migration-image.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: Push DB migration image
on:
# not a problem if we do it at every push because it will check if the image already exists
push:

jobs:
docker-image:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: serlo/configure-repositories/actions/setup-node@main
- uses: google-github-actions/auth@v2
with:
credentials_json: '${{ secrets.GCP_KEY_CONTAINER_REGISTRY }}'
- run: gcloud auth configure-docker
- uses: google-github-actions/setup-gcloud@v2
- run: yarn migrate:push-image
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
67 changes: 42 additions & 25 deletions __tests__/schema/media.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,35 +2,52 @@ import gql from 'graphql-tag'

import { user } from '../../__fixtures__'
import { Client } from '../__utils__'
import { Service } from '~/context/service'

const query = new Client({ userId: user.id }).prepareQuery({
query: gql`
query {
media {
newUpload(mediaType: IMAGE_PNG) {
uploadUrl
urlAfterUpload
}
}
}
`,
test('returns url for uploading media file', async () => {
const query = setupQuery()
await query.shouldReturnData({
media: {
newUpload: {
uploadUrl: 'http://google.com/upload',
urlAfterUpload: expect.stringMatching(
/https:\/\/assets.serlo.org\/[\d\-a-f]+\/image.png/,
) as unknown,
},
},
})
})

describe('media.upload', () => {
test('returns url for uploading media file', async () => {
await query.shouldReturnData({
media: {
newUpload: {
uploadUrl: 'http://google.com/upload',
urlAfterUpload: expect.stringMatching(
/https:\/\/assets.serlo.org\/[\d\-a-f]+\/image.png/,
) as unknown,
},
test('returns url for uploading media file for Serlo Editor testing', async () => {
const query = setupQuery({ service: Service.SerloEditorTesting })
await query.shouldReturnData({
media: {
newUpload: {
uploadUrl: 'http://google.com/upload',
urlAfterUpload: expect.stringMatching(
/https:\/\/storage.googleapis.com\/serlo-editor-testing\/[\d\-a-f]+.png/,
) as unknown,
},
})
},
})
})

test('fails for unauthenticated user', async () => {
await query.forUnauthenticatedUser().shouldFailWithError('UNAUTHENTICATED')
})
test('fails for unauthenticated user', async () => {
const query = setupQuery()
await query.forUnauthenticatedUser().shouldFailWithError('UNAUTHENTICATED')
})

function setupQuery(options: { service?: Service } = {}) {
return new Client({ userId: user.id, ...options }).prepareQuery({
query: gql`
query {
media {
newUpload(mediaType: IMAGE_PNG) {
uploadUrl
urlAfterUpload
}
}
}
`,
})
}
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@
"migrate:run": "packages/db-migrations/scripts/run_migrations.sh",
"migrate:all": "lerna run --stream migrate:all",
"migrate:up": "lerna run --stream migrate:up",
"migrate:build": "lerna run --stream build:all",
"migrate:push-image": "lerna run --stream push-image",
"mysql": "docker compose exec mysql serlo-mysql",
"mysql:delete-last-migration": "yarn mysql --execute 'DELETE FROM migrations ORDER BY id DESC LIMIT 1'",
"mysql:delete-all-migrations": "yarn mysql --execute 'DELETE FROM migrations'",
Expand Down
21 changes: 21 additions & 0 deletions packages/db-migrations/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
FROM node:20-alpine AS base_image
WORKDIR /app
COPY package.json .
RUN corepack enable
RUN yarn set version 3.x

FROM base_image AS build_migrations
COPY scripts scripts
COPY src src
RUN yarn
RUN yarn build:all

FROM base_image AS runner
RUN yarn plugin import workspace-tools
RUN yarn workspaces focus --production
COPY --from=build_migrations /app/migrations migrations
COPY migrations/package.json migrations/package.json
COPY database.json .

ENTRYPOINT ["yarn", "db-migrate"]
CMD ["up"]
2 changes: 2 additions & 0 deletions packages/db-migrations/package.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
{
"name": "@serlo/db-migrations",
"version": "1.1.0-staging.4",
"private": true,
"license": "Apache-2.0",
"type": "module",
Expand All @@ -12,6 +13,7 @@
"migrate:run": "scripts/run_migrations.sh",
"migrate:all": "npm-run-all build:all migrate:up",
"migrate:up": "yarn db-migrate -m migrations up",
"push-image": "tsx scripts/push-image.ts",
"new": "./scripts/touch_file.sh",
"test": "node --experimental-vm-modules node_modules/jest/bin/jest.js"
},
Expand Down
128 changes: 128 additions & 0 deletions packages/db-migrations/scripts/push-image.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
import { spawnSync } from 'child_process'
import * as fs from 'fs'
import * as path from 'path'
import * as R from 'ramda'
import * as semver from 'semver'
import { fileURLToPath } from 'url'
import * as util from 'util'

const __dirname = path.dirname(fileURLToPath(import.meta.url))
const root = path.join(__dirname, '..')
const packageJsonPath = path.join(root, 'package.json')

const fsOptions: { encoding: BufferEncoding } = { encoding: 'utf-8' }

const readFile = util.promisify(fs.readFile)

void run()

async function run() {
const { version } = await fetchPackageJSON()
buildDockerImage({
name: 'api-db-migration',
version,
Dockerfile: path.join(root, 'Dockerfile'),
context: '.',
})
}

function fetchPackageJSON(): Promise<{ version: string }> {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return readFile(packageJsonPath, fsOptions).then(JSON.parse)
}

export function buildDockerImage({
name,
version,
Dockerfile,
context,
}: {
name: string
version: string
Dockerfile: string
context: string
}) {
const semanticVersion = semver.parse(version)

if (!semanticVersion) {
throw new Error(`illegal version number ${version}`)
}

const remoteName = `eu.gcr.io/serlo-shared/${name}`

if (!shouldBuild()) {
// eslint-disable-next-line no-console
console.log(
`Skipping deployment: ${remoteName}:${version} already in registry`,
)
return
}

const versions = getTargetVersions(semanticVersion).map((t) => t.toString())

runBuild(versions)
pushTags(versions)

function shouldBuild() {
const args = [
'container',
'images',
'list-tags',
remoteName,
'--filter',
`tags=${version}`,
'--format',
'json',
]

const result = spawnSync('gcloud', args, { stdio: 'pipe' })
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const images = JSON.parse(String(result.stdout))

if (!Array.isArray(images))
throw new Error('Wrong response from google cloud')

return images.length === 0
}

function runBuild(versions: string[]) {
const tags = [...toTags(name, versions), ...toTags(remoteName, versions)]
const args = [
'build',
'-f',
Dockerfile,
...tags.flatMap((tag) => ['-t', tag]),
context,
]
const result = spawnSync('docker', args, { stdio: 'inherit' })

if (result.status !== 0) throw new Error(`Error while building ${name}`)
}

function pushTags(versions: string[]) {
toTags(remoteName, versions).forEach((remoteTag) => {
// eslint-disable-next-line no-console
console.log('Pushing', remoteTag)
const result = spawnSync('docker', ['push', remoteTag], {
stdio: 'inherit',
})
if (result.status !== 0)
throw new Error(`Error while pushing ${remoteTag}`)
})
}
}

function getTargetVersions(version: semver.SemVer) {
const { major, minor, patch, prerelease } = version

return prerelease.length > 0
? R.range(0, prerelease.length).map(
(i) =>
`${major}.${minor}.${patch}-${prerelease.slice(0, i + 1).join('.')}`,
)
: ['latest', `${major}`, `${major}.${minor}`, `${major}.${minor}.${patch}`]
}

function toTags(name: string, versions: string[]) {
return versions.map((version) => `${name}:${version}`)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
import * as t from 'io-ts'
import * as R from 'ramda'

import {
ApiCache,
Database,
migrateSerloEditorContent,
transformPlugins,
} from './utils'

const TextPlugin = t.type({
plugin: t.literal('text'),
state: t.array(t.unknown),
})

interface CoursePage {
coursePageId: number
courseId: number
}

const Link = t.type({
type: t.literal('a'),
href: t.string,
children: t.array(t.unknown),
})

export async function up(db: Database) {
const apiCache = new ApiCache()

const coursePages = await db.runSql<CoursePage[]>(`
SELECT
entity.id AS coursePageId,
ent2.id AS courseId
FROM entity
JOIN entity_link ON entity.id = entity_link.child_id
JOIN entity ent2 ON entity_link.parent_id = ent2.id
JOIN uuid ON entity.id = uuid.id
WHERE entity.type_id = 8
AND uuid.trashed = 0
AND entity.current_revision_id IS NOT NULL
`)

await migrateSerloEditorContent({
apiCache,
db,
migrationName: 'replace-links-to-course-pages',
migrateState: transformPlugins({
text: (plugin) => {
if (!TextPlugin.is(plugin)) return undefined

const pluginState = plugin.state
if (!pluginState || !pluginState.length) return undefined

const clonedState = structuredClone(pluginState)

replaceLinks(clonedState, coursePages)

if (!R.equals(clonedState, pluginState)) {
return [{ ...plugin, state: clonedState }]
}

return [plugin]
},
}),
})

await apiCache.deleteKeysAndQuit()
}

function replaceLinks(object: object, coursePages: CoursePage[]) {
if (Link.is(object)) {
const startsWithSlash = object.href.at(0) === '/'
const containsSerlo = object.href.includes('serlo')
const isAnAttachment = object.href.startsWith('/attachment/')

if ((startsWithSlash || containsSerlo) && !isAnAttachment) {
coursePages.forEach((coursePage) => {
const { coursePageId, courseId } = coursePage
const regex = new RegExp(`/${coursePageId}(?:/|$)`)

if (regex.test(object.href)) {
const isFirstPage =
coursePages
.filter((page) => page.courseId === courseId)
.sort((a, b) => a.coursePageId - b.coursePageId)[0]
.coursePageId === coursePageId
if (isFirstPage) {
object.href = `/${courseId}`
} else {
object.href = `/${courseId}#${coursePageId}`
}
}
})
}
}

Object.values(object).forEach((value: unknown) => {
if (typeof value === 'object' && value !== null) {
replaceLinks(value, coursePages)
}
})
}
2 changes: 1 addition & 1 deletion packages/db-migrations/src/utils/api-cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export class ApiCache {
constructor() {
this.keys = new Set()

if (typeof process.env.REDIS_URL === 'string') {
if (process.env.REDIS_URL) {
this.redis = new Redis(process.env.REDIS_URL)

this.redis.on('error', (err) => {
Expand Down
Loading

0 comments on commit b3db9b1

Please sign in to comment.