Compare commits
24 Commits
b90b0db806
...
main
Author | SHA1 | Date | |
---|---|---|---|
e4b9e91688 | |||
fa63ef58d2 | |||
8c2ed8ba7b | |||
414a53bd5d | |||
c964df3fbe | |||
c75c73995c | |||
56bdb7d6de | |||
725af11403 | |||
459f1588e0 | |||
f2afe60c6c | |||
324ee0d938 | |||
b7cd26ec27 | |||
325b62634a | |||
5c72daf88c | |||
f8a3cb48dd | |||
4cf6bf1c9b | |||
c921a9cf9f | |||
54ebd69e0b | |||
5ba3d36f0d | |||
322830ab73 | |||
d4455cdd2a | |||
d2a062c1bd | |||
addbee053e | |||
1cda00dcd3 |
86
.woodpecker/build.yml
Normal file
86
.woodpecker/build.yml
Normal file
@ -0,0 +1,86 @@
|
||||
---
|
||||
# https://woodpecker-ci.org/docs/intro
|
||||
|
||||
when:
|
||||
event: [push, manual]
|
||||
|
||||
variables:
|
||||
|
||||
images:
|
||||
- &image_node node:18.16.0
|
||||
- &image_aws_cli amazon/aws-cli:2.15.50
|
||||
- &image_drone_cache meltwater/drone-cache:v1.4.0
|
||||
|
||||
deploy_envs: &deploy_envs
|
||||
S3_BUCKET:
|
||||
from_secret: S3_BUCKET
|
||||
CLOUDFRONT_DISTRIBUTION:
|
||||
from_secret: CLOUDFRONT_DISTRIBUTION
|
||||
AWS_ACCESS_KEY_ID:
|
||||
from_secret: AWS_ACCESS_KEY_ID
|
||||
AWS_SECRET_ACCESS_KEY:
|
||||
from_secret: AWS_SECRET_ACCESS_KEY
|
||||
AWS_REGION:
|
||||
from_secret: AWS_REGION
|
||||
|
||||
cache_settings: &cache_settings
|
||||
access-key:
|
||||
from_secret: CACHE_S3_ACCESS_KEY_ID
|
||||
secret-key:
|
||||
from_secret: CACHE_S3_SECRET_ACCESS_KEY
|
||||
endpoint:
|
||||
from_secret: CACHE_S3_ENDPOINT
|
||||
region:
|
||||
from_secret: CACHE_S3_REGION
|
||||
bucket:
|
||||
from_secret: CACHE_S3_BUCKET
|
||||
path_style:
|
||||
from_secret: CACHE_S3_USE_PATH_STYLE
|
||||
cache_key: '{{ checksum "yarn.lock" }}'
|
||||
archive_format: zstd
|
||||
mount:
|
||||
- 'node_modules'
|
||||
|
||||
steps:
|
||||
|
||||
- name: cache/restore
|
||||
image: *image_drone_cache
|
||||
settings:
|
||||
<<: *cache_settings
|
||||
restore: true
|
||||
|
||||
- name: build/nodejs
|
||||
depends_on: [cache/restore]
|
||||
image: *image_node
|
||||
commands:
|
||||
- corepack enable yarn
|
||||
- yarn install --prefer-offline
|
||||
- yarn run build
|
||||
|
||||
- name: cache/rebuild
|
||||
depends_on: [build/nodejs]
|
||||
image: *image_drone_cache
|
||||
settings:
|
||||
<<: *cache_settings
|
||||
rebuild: true
|
||||
|
||||
- name: publish/s3
|
||||
depends_on: [build/nodejs]
|
||||
when:
|
||||
- branch: main
|
||||
image: *image_aws_cli
|
||||
environment:
|
||||
<<: *deploy_envs
|
||||
commands:
|
||||
- aws s3 sync --delete ./dist "s3://$${S3_BUCKET}/"
|
||||
|
||||
- name: publish/cloudfront
|
||||
depends_on: [publish/s3]
|
||||
when:
|
||||
- branch: main
|
||||
image: *image_aws_cli
|
||||
environment:
|
||||
<<: *deploy_envs
|
||||
commands:
|
||||
- aws cloudfront create-invalidation --distribution-id
|
||||
"$${CLOUDFRONT_DISTRIBUTION}" --paths '/*'
|
@ -1,38 +0,0 @@
|
||||
# https://woodpecker-ci.org/docs/intro
|
||||
|
||||
when:
|
||||
event: [push, manual]
|
||||
branch: main
|
||||
|
||||
variables:
|
||||
secrets: &secrets
|
||||
- S3_BUCKET
|
||||
- CLOUDFRONT_DISTRIBUTION
|
||||
- AWS_ACCESS_KEY_ID
|
||||
- AWS_SECRET_ACCESS_KEY
|
||||
- AWS_REGION
|
||||
|
||||
|
||||
steps:
|
||||
- name: build/yarn
|
||||
image: node:18
|
||||
# when:
|
||||
# - path:
|
||||
# exclude: ['_meta/**']
|
||||
commands:
|
||||
- yarn install
|
||||
- yarn build
|
||||
|
||||
- name: publish/s3
|
||||
image: amazon/aws-cli:2.15.50
|
||||
commands:
|
||||
- aws s3 sync --delete ./dist s3://$${S3_BUCKET}/
|
||||
secrets:
|
||||
- <<: *secrets
|
||||
|
||||
- name: publish/cloudfront
|
||||
image: amazon/aws-cli:2.15.50
|
||||
commands:
|
||||
- aws cloudfront create-invalidation --distribution-id $${CLOUDFRONT_DISTRIBUTION} --paths '/*'
|
||||
secrets:
|
||||
- <<: *secrets
|
@ -44,6 +44,12 @@ $ ls dist
|
||||
|
||||
- `/meta/iac`: terraform iac config for the backend services hosting the rendered site.
|
||||
|
||||
## deployment
|
||||
|
||||
this project deploys to s3+cloudfront with woodpecker ci.
|
||||
|
||||
backend infrastructure for the deployment is defined here: [resume-manpage-iac](https://git.bdeshi.space/bdeshi/resume-manpage-iac)
|
||||
|
||||
---
|
||||
|
||||
[pug]: https://pugjs.org
|
||||
|
@ -1,15 +0,0 @@
|
||||
export TF_WORKSPACE=production
|
||||
|
||||
export TF_TOKEN_app_terraform_io=***
|
||||
export TF_CLOUD_ORGANIZATION=***
|
||||
export TF_CLOUD_PROJECT=***
|
||||
|
||||
export AWS_PROFILE=***
|
||||
export AWS_ACCESS_KEY_ID=***
|
||||
export AWS_SECRET_ACCESS_KEY=***
|
||||
|
||||
export WOODPECKER_SERVER=***
|
||||
export WOODPECKER_TOKEN=***
|
||||
|
||||
export GITEA_BASE_URL=***
|
||||
export GITEA_TOKEN=***
|
34
_meta/iac/.gitignore
vendored
34
_meta/iac/.gitignore
vendored
@ -1,34 +0,0 @@
|
||||
# Local .terraform directories
|
||||
**/.terraform/*
|
||||
|
||||
# .tfstate files
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# Crash log files
|
||||
crash.log
|
||||
crash.*.log
|
||||
|
||||
# Exclude all .tfvars files, which are likely to contain sensitive data, such as
|
||||
# password, private keys, and other secrets. These should not be part of version
|
||||
# control as they are data points which are potentially sensitive and subject
|
||||
# to change depending on the environment.
|
||||
*.tfvars
|
||||
*.tfvars.json
|
||||
|
||||
# Ignore override files as they are usually used to override resources locally and so
|
||||
# are not checked in
|
||||
override.tf
|
||||
override.tf.json
|
||||
*_override.tf
|
||||
*_override.tf.json
|
||||
|
||||
# Include override files you do wish to add to version control using negated pattern
|
||||
# !example_override.tf
|
||||
|
||||
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
|
||||
# example: *tfplan*
|
||||
|
||||
# Ignore CLI configuration files
|
||||
.terraformrc
|
||||
terraform.rc
|
@ -1 +0,0 @@
|
||||
1.8.3
|
71
_meta/iac/.terraform.lock.hcl
generated
71
_meta/iac/.terraform.lock.hcl
generated
@ -1,71 +0,0 @@
|
||||
# This file is maintained automatically by "terraform init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/go-gitea/gitea" {
|
||||
version = "0.3.0"
|
||||
constraints = "~> 0.3.0"
|
||||
hashes = [
|
||||
"h1:9kI/rtwDrBt0Km055WJswN+PeGegoEov+1ZmyQ3QxAA=",
|
||||
"zh:37e9c35f76a5fa71b7864aa6af45c380463b5ea2afd162109f9960bf33f7b93e",
|
||||
"zh:4496717687dea48de96db815def8b2144b46c5c8a885c139dd45d5ddc6d13f4e",
|
||||
"zh:4875b3e9092d4f15678f7a605469c144bf298b05c8f8527bb27b1fdf6cb6fba0",
|
||||
"zh:51f15e0ef905619eb7236bbbdebd81f70f5e024c025a347b0224ed95c5103668",
|
||||
"zh:5779e9276a20c294710ec57397c06fb3afd9bffd28a5de8189fd7af1ed925ea9",
|
||||
"zh:63c2ec086260a2e15c9e77ca49344a56e4b86d52b3f502941c9562aa12345887",
|
||||
"zh:728fd15b2f3ec1c60ad45a996bac98022198078d0368507516f3a0526fd6c503",
|
||||
"zh:7951a3bf904f836c73b00263d2f057f5ffc123c2946508a57ca2d2a1dc3874ab",
|
||||
"zh:8495b9e6f6ae9f49b8e80fe3ccf47f1f942745c21fa30648e98aa6fe41a647d9",
|
||||
"zh:862888963677516379a34c4dbb2396810e1a0ac2e644704d692e4f847d487f55",
|
||||
"zh:8b1e1badf2ea6c4fcfdf71d98a68a8ba8f0850a4c5ec5f5a451a81cfd2c2b9e2",
|
||||
"zh:936671c9700a8549b9a4540ecec167415db704e97744ca1fd5e3ad9d48020693",
|
||||
"zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
|
||||
"zh:c134d5445ce56de7115ceb16d65ed2082b7987273a9d17626e9f6a4e6e8d4ce9",
|
||||
"zh:fb6fc4d41737bf2e0bd4a2e40ae2d7bddcda7361968f6b74fad00b4fd55e9506",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/aws" {
|
||||
version = "5.49.0"
|
||||
constraints = "~> 5.49.0"
|
||||
hashes = [
|
||||
"h1:Y3xvYjzBIwYSbcnZDcs6moiy30uxRoY5oT2ExQHKG5A=",
|
||||
"zh:0979b07cdeffb868ea605e4bbc008adc7cccb5f3ba1d3a0b794ea3e8fff20932",
|
||||
"zh:2121a0a048a1d9419df69f3561e524b7e8a6b74ba0f57bd8948799f12b6ad3a1",
|
||||
"zh:573362042ba0bd18e98567a4f45d91b09eb0d223513518ba04f16a646a906403",
|
||||
"zh:57be7a4d6c362be2fa586d270203f4eac1ee239816239a9503b86ebc8fa1fef0",
|
||||
"zh:5c72ed211d9234edd70eac9d77c3cafc7bbf819d1c28332a6d77acf227c9a23c",
|
||||
"zh:7786d1a9781f8e8c0079bf58f4ed4aeddec0caf54ad7ddcf43c47936d545a04f",
|
||||
"zh:82133e7d39787ee91ed41988da71beecc2ecb900b5da94b3f3d77fbc4d4dc722",
|
||||
"zh:8cdb1c154dead85be8352afd30eaf41c59249de9e7e0a8eb4ab8e625b90a4922",
|
||||
"zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
|
||||
"zh:ac215fd1c3bd647ae38868940651b97a53197688daefcd70b3595c84560e5267",
|
||||
"zh:c45db22356d20e431639061a72e07da5201f4937c1df6b9f03f32019facf3905",
|
||||
"zh:c9ba90e62db9a4708ed1a4e094849f88ce9d44c52b49f613b30bb3f7523b8d97",
|
||||
"zh:d2be3607be2209995c80dc1d66086d527de5d470f73509e813254067e8287106",
|
||||
"zh:e3fa20090f3cebf3911fc7ef122bd8c0505e3330ab7d541fa945fea861205007",
|
||||
"zh:ef1b9d5c0b6279323f2ecfc322db8083e141984cfe1bb2f33c0f4934fccb69e3",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/kichiyaki/woodpecker" {
|
||||
version = "0.3.0"
|
||||
constraints = "~> 0.3.0"
|
||||
hashes = [
|
||||
"h1:MWilItwMvIsQN88cyU8Yht23OmNv2BFlVNmh0ui8NNQ=",
|
||||
"zh:0cf8e4475f78397235bc2dda4efaccd10836b141a06413457f9aaa869638d5fb",
|
||||
"zh:280345c4d6f632559458021b9cd6ef91f66a7174e72f144d82bffe08771ec742",
|
||||
"zh:52288022d7b1e9e8f1567e17d2af7309546aff0275bcd89d474f9798fe851fea",
|
||||
"zh:72d190ae69863dc7f6474fa21f524820ae8a01be15d9373cadb2b228a54599f8",
|
||||
"zh:7901fa0ceee94026c56a786b0357f4d542304f5947b760766158e096c7361ed0",
|
||||
"zh:a41d5c895568da74325d6f8e4bd11aca7f878d8e5090967a003f8b959797fbdb",
|
||||
"zh:b7d3e9d5cfd4ed5fc2608234bcaade05b80425877b8922a7095689f1b9ebda4f",
|
||||
"zh:c45017d5fb293e71794bb7f9645aa33fe5e2b85038cf0d45ac9876a7320b95fd",
|
||||
"zh:c8955aadde1f4142b1e79505ecd79258354f73690b581f552662941f97a3795d",
|
||||
"zh:e2408e5ffb2236a4335a4d0945e3dd54cabb96afbc4c42c4489fbbd2065e157c",
|
||||
"zh:f0589fc29eb6c66640de50e07fd3ce0fac7759f7563230620ea7dcbb4de4f589",
|
||||
"zh:f28c493ae8eefc87d7ca577385a2c5bd10949d1d8aa54b04d2c82409fa654c9b",
|
||||
"zh:f56f23a13c6f1999409ea292e99b891f80e3727630222ed8ffa94b3c43725683",
|
||||
"zh:f809ab383cca0a5f83072981c64208cbd7fa67e986a86ee02dd2c82333221e32",
|
||||
"zh:f8f2a4cb994246d1270c805630ddf285156d3e9a6493ed6260b5ee2b1ad121f4",
|
||||
]
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
resource "aws_acm_certificate" "created" {
|
||||
domain_name = var.domain_name
|
||||
validation_method = "DNS"
|
||||
subject_alternative_names = []
|
||||
}
|
||||
|
||||
resource "aws_acm_certificate_validation" "created" {
|
||||
certificate_arn = aws_acm_certificate.created.arn
|
||||
depends_on = [terraform_data.print_acm_validation_records]
|
||||
}
|
||||
|
||||
|
||||
# HACK: im sorry
|
||||
|
||||
resource "terraform_data" "print_acm_validation_records" {
|
||||
provisioner "local-exec" {
|
||||
command = <<-EOT
|
||||
echo -en '\n\n\n\n\n\n
|
||||
add the following records to associated DNS server:\n\n
|
||||
${local.acm_validation_records_provisioner_string}
|
||||
\n\n\n\n\n\n'
|
||||
EOT
|
||||
}
|
||||
}
|
||||
|
||||
locals {
|
||||
acm_validation_records_provisioner_string = join("\n", [
|
||||
for map in aws_acm_certificate.created.domain_validation_options :
|
||||
join("\n", [
|
||||
join(" | ", keys(map)),
|
||||
join(" | ", values(map))
|
||||
])
|
||||
])
|
||||
}
|
@ -1,50 +0,0 @@
|
||||
data "aws_cloudfront_cache_policy" "caching_optimized" {
|
||||
name = "Managed-CachingOptimized"
|
||||
}
|
||||
|
||||
locals {
|
||||
cloudfront_s3_origin_id = "S3-${aws_s3_bucket.created.id}"
|
||||
}
|
||||
|
||||
resource "aws_cloudfront_distribution" "created" {
|
||||
enabled = true
|
||||
is_ipv6_enabled = true
|
||||
aliases = [var.domain_name]
|
||||
default_root_object = var.aws_cloudfront_default_root_object
|
||||
price_class = var.aws_cloudfront_price_class
|
||||
http_version = "http2and3"
|
||||
origin {
|
||||
domain_name = aws_s3_bucket.created.bucket_regional_domain_name
|
||||
origin_id = local.cloudfront_s3_origin_id
|
||||
origin_access_control_id = aws_cloudfront_origin_access_control.s3_access.id
|
||||
}
|
||||
default_cache_behavior {
|
||||
allowed_methods = ["GET", "HEAD", "OPTIONS"]
|
||||
cached_methods = ["GET", "HEAD"]
|
||||
cache_policy_id = data.aws_cloudfront_cache_policy.caching_optimized.id
|
||||
target_origin_id = local.cloudfront_s3_origin_id
|
||||
viewer_protocol_policy = "redirect-to-https"
|
||||
compress = true
|
||||
}
|
||||
restrictions {
|
||||
geo_restriction {
|
||||
restriction_type = "none"
|
||||
locations = []
|
||||
}
|
||||
}
|
||||
viewer_certificate {
|
||||
acm_certificate_arn = aws_acm_certificate.created.arn
|
||||
minimum_protocol_version = var.aws_cloudfront_minimum_protocol_version
|
||||
ssl_support_method = "sni-only"
|
||||
}
|
||||
|
||||
depends_on = [aws_acm_certificate_validation.created]
|
||||
}
|
||||
|
||||
resource "aws_cloudfront_origin_access_control" "s3_access" {
|
||||
name = "S3_${aws_s3_bucket.created.id}"
|
||||
description = "S3:${aws_s3_bucket.created.id}"
|
||||
origin_access_control_origin_type = "s3"
|
||||
signing_behavior = "always"
|
||||
signing_protocol = "sigv4"
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
data "aws_caller_identity" "current" {}
|
||||
|
||||
data "aws_kms_alias" "aws_s3" {
|
||||
name = "alias/aws/s3"
|
||||
}
|
||||
|
||||
data "aws_iam_policy_document" "s3_cloudfront_access" {
|
||||
statement {
|
||||
principals {
|
||||
type = "Service"
|
||||
identifiers = ["cloudfront.amazonaws.com"]
|
||||
}
|
||||
actions = [
|
||||
"s3:GetObject",
|
||||
"s3:ListBucket"
|
||||
]
|
||||
resources = [
|
||||
aws_s3_bucket.created.arn,
|
||||
"${aws_s3_bucket.created.arn}/*"
|
||||
]
|
||||
condition {
|
||||
test = "StringEquals"
|
||||
variable = "AWS:SourceArn"
|
||||
values = [aws_cloudfront_distribution.created.arn]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_iam_policy_document" "pubilsher" {
|
||||
statement {
|
||||
actions = [
|
||||
"s3:*",
|
||||
"cloudfront:*"
|
||||
]
|
||||
resources = [
|
||||
aws_s3_bucket.created.arn,
|
||||
"${aws_s3_bucket.created.arn}/*",
|
||||
aws_cloudfront_distribution.created.arn
|
||||
]
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
resource "aws_iam_user" "publisher" {
|
||||
name = "${var.domain_name}_publisher"
|
||||
path = "/${var.aws_tag_iac_identifier}/${local.workspace_env}/"
|
||||
force_destroy = true
|
||||
}
|
||||
|
||||
resource "aws_iam_access_key" "publisher" {
|
||||
user = aws_iam_user.publisher.name
|
||||
}
|
||||
|
||||
resource "aws_iam_policy" "publisher" {
|
||||
name_prefix = "${var.domain_name}_publisher"
|
||||
policy = data.aws_iam_policy_document.pubilsher.json
|
||||
}
|
||||
|
||||
resource "aws_iam_user_policy_attachment" "publisher" {
|
||||
policy_arn = aws_iam_policy.publisher.arn
|
||||
user = aws_iam_user.publisher.name
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
resource "aws_s3_bucket" "created" {
|
||||
bucket_prefix = var.aws_s3_use_domain_prefix ? var.domain_name : var.aws_s3_bucket_prefix
|
||||
force_destroy = var.aws_s3_force_destroy
|
||||
}
|
||||
|
||||
resource "aws_s3_bucket_public_access_block" "created" {
|
||||
bucket = aws_s3_bucket.created.id
|
||||
block_public_acls = true
|
||||
block_public_policy = true
|
||||
ignore_public_acls = true
|
||||
restrict_public_buckets = true
|
||||
}
|
||||
|
||||
resource "aws_s3_bucket_server_side_encryption_configuration" "created" {
|
||||
bucket = aws_s3_bucket.created.id
|
||||
rule {
|
||||
bucket_key_enabled = true
|
||||
apply_server_side_encryption_by_default {
|
||||
sse_algorithm = "AES256"
|
||||
# kms_master_key_id = data.aws_kms_alias.aws_s3.arn
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_s3_bucket_policy" "created" {
|
||||
bucket = aws_s3_bucket.created.id
|
||||
policy = data.aws_iam_policy_document.s3_cloudfront_access.json
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
data "gitea_user" "current" {}
|
||||
|
||||
data "gitea_repo" "source" {
|
||||
name = var.gitea_repo
|
||||
username = coalesce(var.gitea_user, data.gitea_user.current.username)
|
||||
}
|
@ -1,61 +0,0 @@
|
||||
locals {
|
||||
secrets_map = {
|
||||
aws_region = { value = var.aws_region }
|
||||
aws_access_key_id = { value = aws_iam_access_key.publisher.id }
|
||||
aws_secret_access_key = { value = aws_iam_access_key.publisher.secret }
|
||||
cloudfront_distribution = { value = aws_cloudfront_distribution.created.id }
|
||||
s3_bucket = { value = aws_s3_bucket.created.id }
|
||||
}
|
||||
}
|
||||
|
||||
################################
|
||||
#### for adduc/woodpecker ####
|
||||
################################
|
||||
|
||||
# adduc/woodpecker@v0.4.0 is incompatible with latest woodpecker (eg, v2.41)
|
||||
|
||||
# data "woodpecker_self" "current" { }
|
||||
#
|
||||
# resource "woodpecker_repository" "created" {
|
||||
# name = data.gitea_repo.source.name
|
||||
# # woodpecker username can come from associated gitea username
|
||||
# owner = coalesce(var.woodpecker_user, var.gitea_user, data.woodpecker_self.current.login)
|
||||
# visibility = data.gitea_repo.source.private ? "Public" : "Private"
|
||||
# }
|
||||
|
||||
# resource "woodpecker_repository_secret" "secrets" {
|
||||
# count = length(keys(local.secrets_map))
|
||||
#
|
||||
# repo_owner = woodpecker_repository.created.owner
|
||||
# repo_name = woodpecker_repository.created.name
|
||||
# name = upper(keys(local.secrets_map)[count.index])
|
||||
# value = values(local.secrets_map)[count.index].value
|
||||
# events = try(values(local.secrets_map)[count.index].events, var.woodpecker_secrets_events, [])
|
||||
# }
|
||||
|
||||
|
||||
####################################
|
||||
#### for Kichiyaki/woodpecker ####
|
||||
####################################
|
||||
|
||||
data "woodpecker_user" "current" {
|
||||
login = ""
|
||||
}
|
||||
|
||||
resource "woodpecker_repository" "created" {
|
||||
full_name = join("/", [
|
||||
coalesce(var.woodpecker_user, var.gitea_user, data.woodpecker_user.current.login),
|
||||
data.gitea_repo.source.name
|
||||
])
|
||||
|
||||
visibility = data.gitea_repo.source.private ? "public" : "private"
|
||||
}
|
||||
|
||||
resource "woodpecker_repository_secret" "secrets" {
|
||||
count = length(keys(local.secrets_map))
|
||||
|
||||
repository_id = woodpecker_repository.created.id
|
||||
name = upper(keys(local.secrets_map)[count.index])
|
||||
value = values(local.secrets_map)[count.index].value
|
||||
events = try(values(local.secrets_map)[count.index].events, var.woodpecker_secrets_events, [])
|
||||
}
|
@ -1,2 +0,0 @@
|
||||
organization = "bdeshi-space"
|
||||
workspaces { prefix = "resume-manpage-" }
|
@ -1,40 +0,0 @@
|
||||
output "aws_account_id" {
|
||||
value = data.aws_caller_identity.current.account_id
|
||||
description = "ID of the AWS account."
|
||||
}
|
||||
|
||||
output "s3_bucket" {
|
||||
value = aws_s3_bucket.created.id
|
||||
description = "name of the created S3 bucket."
|
||||
}
|
||||
|
||||
output "cloudfront_distribution" {
|
||||
value = aws_cloudfront_distribution.created.id
|
||||
description = "ID of the created CloudFront distribution."
|
||||
}
|
||||
|
||||
output "cloudfront_domain" {
|
||||
value = aws_cloudfront_distribution.created.domain_name
|
||||
description = "domain name of the created CloudFront distribution."
|
||||
}
|
||||
|
||||
output "acm_certificate_arn" {
|
||||
value = aws_acm_certificate.created.arn
|
||||
description = "ARN of the created ACM certificate."
|
||||
}
|
||||
|
||||
output "iam_access_key_id" {
|
||||
value = aws_iam_access_key.publisher.id
|
||||
description = "access key ID of the publisher IAM user."
|
||||
}
|
||||
|
||||
output "iam_secret_access_key" {
|
||||
value = aws_iam_access_key.publisher.secret
|
||||
sensitive = true
|
||||
description = "secret access key of the publisher IAM user."
|
||||
}
|
||||
|
||||
output "domain_name" {
|
||||
value = var.domain_name
|
||||
description = "target publishing domain name."
|
||||
}
|
@ -1,55 +0,0 @@
|
||||
terraform {
|
||||
required_version = "~> 1.8.0"
|
||||
|
||||
required_providers {
|
||||
aws = {
|
||||
source = "hashicorp/aws"
|
||||
version = "~>5.49.0"
|
||||
}
|
||||
# woodpecker = {
|
||||
# source = "adduc/woodpecker"
|
||||
# version = "~> 0.4.0"
|
||||
# }
|
||||
woodpecker = {
|
||||
source = "Kichiyaki/woodpecker"
|
||||
version = "~> 0.3.0"
|
||||
}
|
||||
gitea = {
|
||||
source = "go-gitea/gitea"
|
||||
version = "~>0.3.0"
|
||||
}
|
||||
}
|
||||
|
||||
backend "remote" {}
|
||||
|
||||
# cloud {
|
||||
# organization = collected from TF_CLOUD_ORGANIZATION env
|
||||
# workspaces {
|
||||
# project = collected from TF_CLOUD_PROJECT env
|
||||
# }
|
||||
# }
|
||||
|
||||
}
|
||||
|
||||
provider "aws" {
|
||||
# profile = collected from AWS_PROFILE env
|
||||
region = var.aws_region
|
||||
default_tags {
|
||||
tags = {
|
||||
"ManagedBy" = var.aws_tag_iac_identifier
|
||||
"iac/project" = var.aws_tag_iac_project_name
|
||||
"iac/source" = "${data.gitea_repo.source.ssh_url}/${var.aws_tag_iac_project_subpath}"
|
||||
"iac/environment" = local.workspace_env
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "woodpecker" {
|
||||
# server = collected from WOODPECKER_SERVER env
|
||||
# token = collected from WOODPECKER_TOKEN env
|
||||
}
|
||||
|
||||
provider "gitea" {
|
||||
# base_url = collected from GITEA_BASE_URL env
|
||||
# token = collected from GITEA_TOKEN env
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
domain_name = "sammay.sarkar.website"
|
||||
aws_tag_iac_project_name = "resume-manpage"
|
||||
aws_tag_iac_project_subpath = "/iac"
|
||||
gitea_repo = "resume-manpage"
|
||||
woodpecker_secrets_events = ["push", "deployment", "manual"]
|
@ -1,103 +0,0 @@
|
||||
####################
|
||||
#### commons ####
|
||||
####################
|
||||
|
||||
variable "domain_name" {
|
||||
type = string
|
||||
description = "domain name where the built site is published."
|
||||
}
|
||||
|
||||
|
||||
################
|
||||
#### aws ####
|
||||
################
|
||||
|
||||
variable "aws_tag_iac_identifier" {
|
||||
type = string
|
||||
default = "iac/terraform"
|
||||
description = "IaC tool name added as a tag to AWS resources, also used in iam user path."
|
||||
}
|
||||
|
||||
variable "aws_tag_iac_project_name" {
|
||||
type = string
|
||||
description = "IaC project name added as a tag to AWS resources."
|
||||
}
|
||||
|
||||
variable "aws_tag_iac_project_subpath" {
|
||||
type = string
|
||||
description = "IaC project source path added as a tag to AWS resources."
|
||||
}
|
||||
|
||||
variable "aws_region" {
|
||||
type = string
|
||||
default = "us-east-1"
|
||||
description = "AWS region passed to AWS provider."
|
||||
}
|
||||
|
||||
variable "aws_s3_bucket_prefix" {
|
||||
type = string
|
||||
default = null
|
||||
description = "AWS S3 bucket name prefix."
|
||||
}
|
||||
|
||||
variable "aws_s3_use_domain_prefix" {
|
||||
type = bool
|
||||
default = true
|
||||
description = "use var.domain_name as AWS S3 bucket name prefix."
|
||||
}
|
||||
|
||||
variable "aws_s3_force_destroy" {
|
||||
type = bool
|
||||
default = true
|
||||
description = "delete all bucket objects to allow clean bucket destroy operation."
|
||||
}
|
||||
|
||||
variable "aws_cloudfront_default_root_object" {
|
||||
type = string
|
||||
default = "index.html"
|
||||
description = "default root object name for the CloudFront distribution."
|
||||
}
|
||||
|
||||
variable "aws_cloudfront_price_class" {
|
||||
type = string
|
||||
default = "PriceClass_200"
|
||||
description = "price class for the CloudFront distribution: PriceClass_All|PriceClass_200|PriceClass_100."
|
||||
}
|
||||
|
||||
variable "aws_cloudfront_minimum_protocol_version" {
|
||||
type = string
|
||||
default = "TLSv1.2_2021"
|
||||
description = "name of the minimum SSL protocol version used by CloudFront for HTTPS requests."
|
||||
}
|
||||
|
||||
################
|
||||
#### gitea ####
|
||||
################
|
||||
|
||||
variable "gitea_repo" {
|
||||
type = string
|
||||
description = "name of source Gitea repository."
|
||||
}
|
||||
|
||||
variable "gitea_user" {
|
||||
type = string
|
||||
default = null
|
||||
description = "username of Gitea repo owner."
|
||||
}
|
||||
|
||||
|
||||
################
|
||||
# woodpecker #
|
||||
################
|
||||
|
||||
variable "woodpecker_user" {
|
||||
type = string
|
||||
default = null
|
||||
description = "username of Woodpecker server."
|
||||
}
|
||||
|
||||
variable "woodpecker_secrets_events" {
|
||||
type = list(string)
|
||||
default = ["push"]
|
||||
description = "default list of allowed events for Woodpecker secrets created."
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
locals {
|
||||
# terraform remote backend prefix key means local and remote
|
||||
# wokspace names can differ.
|
||||
# assuming workspace are named as `prefix+env`, this section
|
||||
# extracts the env from both local or remote workspace names.
|
||||
_workspace_name_segments = split("-", terraform.workspace)
|
||||
_workspace_name_segments_count = length(local._workspace_name_segments)
|
||||
workspace_env = local._workspace_name_segments[local._workspace_name_segments_count - 1]
|
||||
}
|
@ -1 +1,19 @@
|
||||
{"name":"","short_name":"","icons":[{"src":"./images/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"./images/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"}
|
||||
{
|
||||
"name": "",
|
||||
"short_name": "",
|
||||
"icons": [
|
||||
{
|
||||
"src": "./images/android-chrome-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "./images/android-chrome-512x512.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
],
|
||||
"theme_color": "#ffffff",
|
||||
"background_color": "#ffffff",
|
||||
"display": "standalone"
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ div.container
|
||||
include ./sections/description
|
||||
include ./sections/options
|
||||
include ./sections/history
|
||||
include ./sections/recognition
|
||||
include ./sections/achievement
|
||||
include ./sections/contact
|
||||
include ./sections/seealso
|
||||
include ./sections/copyright
|
||||
|
@ -4,18 +4,24 @@ section.achievements
|
||||
h1.target#achievements: a(href="#achievements") achievements
|
||||
.desc
|
||||
ul
|
||||
li
|
||||
b: +ext_a("https://www.credly.com/badges/40f2170e-692d-4aaa-a18e-a51d40a9d41b/public_url", "Certified Kubernetes Administrator")
|
||||
|, CNCF / Linux Foundation
|
||||
li
|
||||
b: +ext_a("https://www.credly.com/badges/231c502b-2d41-4d9f-8233-4f4e63e38caa/public_url", "Certified Kubernetes Application Developer")
|
||||
|, CNCF / Linux Foundation
|
||||
li
|
||||
b: +ext_a("https://www.credly.com/badges/07c275c9-019c-47dc-be22-98f932094362/public_url", "Terraform Associate")
|
||||
|, HashiCorp
|
||||
li
|
||||
b: +ext_a("https://coursera.org/share/90a24b5165900b3e22011c322b4013f8", "Google IT Support Professional")
|
||||
|, Google
|
||||
li
|
||||
b: +ext_a("https://coursera.org/share/53e0dc7863382d6253086b8dcac2518d", "AWS DevOps Specialization")
|
||||
|, AWS
|
||||
li
|
||||
b: +ext_a("https://coursera.org/share/5b48cc6ac827b619d69056d0d6705b5b", "Google Cloud Digital Leader")
|
||||
|, Google
|
||||
li
|
||||
b: +ext_a("https://coursera.org/share/90a24b5165900b3e22011c322b4013f8", "Google IT Support Professional")
|
||||
|, Google
|
||||
li
|
||||
b: +ext_a("https://coursera.org/share/47e5a79207cec44a45fcf2cb0ca1f815", "Python IT Automation Certificate")
|
||||
|, Google
|
||||
@ -34,7 +40,8 @@ section.achievements
|
||||
li
|
||||
b: +ext_a("https://www.hackerrank.com/certificates/dcdb6c3c4463", "Problem Solving Skill Certificate")
|
||||
|, HackerRank
|
||||
li #[b Recognized Contributor], Wikipedia
|
||||
li #[b Top Performer], FieldNation
|
||||
li #[b MVP], Pillar
|
||||
li #[b Star Performer], Digital Healthcare Solutions
|
||||
// li #[b Recognized Contributor], Wikipedia
|
||||
span.darker i use arch btw
|
@ -2,13 +2,13 @@ section.description
|
||||
h1.target#description: a(href="#description") description
|
||||
.desc
|
||||
p.
|
||||
Hello! I am a self-taught technology professional with a strong background in
|
||||
#[em system administration] and #[em software programming], carrying effective hands-on
|
||||
experience in #[em modern dev-ops], #[em cloud-ops], and #[em IaC] work domains.
|
||||
Hello! I am a self-taught technology professional with effective hands-on
|
||||
experience in #[em modern dev-ops], #[em cloud-ops], and #[em IaC] skill areas,
|
||||
from a strong background in #[em system administration] and #[em software programming].
|
||||
p.
|
||||
It's my passion to keep learning and building things that are useful,
|
||||
sustainable, minimal, and easy to use. I put a lot of care to keep my work
|
||||
output #[em clean], #[em maintainable] and #[em well-documented].
|
||||
output #[em resilient], #[em maintainable] and #[em well-documented].
|
||||
p.
|
||||
As an active supporter of the open-source culture, I'm always
|
||||
looking for opportunities to contribute and to assist the community.
|
||||
|
@ -3,25 +3,40 @@ section.history
|
||||
.desc
|
||||
// p My professional employment summary with notable duties is listed below.
|
||||
ul
|
||||
li #[b.job DevOps Consultant] - #[em.org freelance] #[span.period 2022-....]
|
||||
|
||||
li #[b.job DevOps Consultant] - #[em.org freelance]
|
||||
ul
|
||||
li adapting classic workloads to serverless architecture
|
||||
li implementing modern logging and tracing solutions
|
||||
li converting adhoc services to IaC/CaC projects
|
||||
li providing support for cloud devops processes
|
||||
li providing support for cloud platform operations
|
||||
li performing technical evaluation of bleeding-edge technologies
|
||||
|
||||
li #[b.job DevOps Engineer] - #[em.org Pillar] #[span.period 2022-....]
|
||||
li #[b.job Senior Platform Engineer] - #[em.org FieldNation] #[span.period 2024-....]
|
||||
ul
|
||||
li designing cost-effective k8s and AWS infrastructure
|
||||
li implementing monitoring and alerting mechanism
|
||||
li managing cloud-native ci/cd pipelines
|
||||
li contributing to design and implementation of internal developer platforms
|
||||
li ensuring best practices for infrastructure operations
|
||||
li serving as a mentor on cloud-native operational excellence
|
||||
li leading regional platform team and guiding self-sufficiency
|
||||
|
||||
li #[b.job DevOps Engineer] - #[em.org Digital Healthcare Solutions] #[span.period 2021-....]
|
||||
li #[b.job Lead DevOps Engineer] - #[em.org Pillar] #[span.period 2023-2024]
|
||||
ul
|
||||
li supervising kubernetes and classic server workloads
|
||||
li in charge of AWS infrastructure administration
|
||||
li maintaining total monitoring stack
|
||||
li managing ci/cd pipeline
|
||||
li provided operational expertise for critical strategic planning
|
||||
li performed extesnive RnD on technical solutions to guide purchase decisions
|
||||
li planned and managed operations projects and targets
|
||||
|
||||
li #[b.job DevOps Engineer] - #[em.org Pillar] #[span.period 2022-2023]
|
||||
ul
|
||||
li designed cost-effective k8s and AWS infrastructure
|
||||
li implemented end-to-end monitoring and alerting mechanisms
|
||||
li managed cloud-native ci/cd pipelines
|
||||
|
||||
li #[b.job DevOps Engineer] - #[em.org Digital Healthcare Solutions] #[span.period 2021-2022]
|
||||
ul
|
||||
li supervised kubernetes and classic server workloads
|
||||
li performed AWS infrastructure administration
|
||||
li maintained total monitoring stack
|
||||
li managed ci/cd pipelines
|
||||
|
||||
li #[b.job TechOps Engineer] - #[em.org Telenor Health / Digital Healthcare Solutions] #[span.period 2020-2021]
|
||||
ul
|
||||
|
@ -4,11 +4,11 @@ audio#pronounce(controls=false preload="auto")
|
||||
source(src="assets/media/spell.wav" type="audio/wav")
|
||||
|
||||
script.
|
||||
function pronounce() {
|
||||
var audio = document.getElementById('pronounce')
|
||||
audio.play()
|
||||
}
|
||||
window.pronounce = function() {
|
||||
var audio = document.getElementById('pronounce');
|
||||
audio.play();
|
||||
};
|
||||
|
||||
section.name
|
||||
h1.target#name: a(href="#name") name
|
||||
.desc #[a(href='#' onclick="pronounce()") #{name} 🔉] - #{description}
|
||||
.desc #[a(href='#' onclick="pronounce()" title="tap for name pronunciation") #{name} 🔉] - #{description}
|
||||
|
@ -3,6 +3,23 @@ section.options
|
||||
.desc
|
||||
ul
|
||||
|
||||
li
|
||||
b.target#devops: a(href="#devops") --dev-ops
|
||||
.opt_desc
|
||||
p.
|
||||
I am involved with both classic and modern DevOps practices, having current knowledge of
|
||||
dev-ops orchestration and management tools such as #[em Docker], #[em Kubernetes], #[em Terraform].
|
||||
My professional experience includes managing production k8s clusters on #[em AWS] cloud.
|
||||
I have also worked on converting legacy workloads to #[em serverless] paradigm and maintaining them.
|
||||
p.
|
||||
I can set up a working dev-ops environment from scratch, with a working CI/CD pipeline —
|
||||
doing this with both modern container-based tools such as #[em k8s], #[em Helm], #[em Argo],
|
||||
#[em Gitlab/Github CI], or alternatively with legacy tools such as #[em Ansible] and #[em Jenkins].
|
||||
p.
|
||||
My expertise on #[em observability] comprises of managing monitoring and alerting stacks built
|
||||
with #[em prometheus], #[em Grafana], #[em PagerDuty], as well as in-depth observability
|
||||
toolkits such as #[em Graylog], #[em NewRelic], #[em DataDog], #[em Splunk] etc.
|
||||
|
||||
li
|
||||
b.target#sysadmin: a(href="#sysadmin") --sys-admin
|
||||
.opt_desc
|
||||
@ -17,23 +34,6 @@ section.options
|
||||
p.
|
||||
My expertise on #[em Windows] systems is above-average, with working knowledge of #[em Mac OSX].
|
||||
|
||||
li
|
||||
b.target#devops: a(href="#devops") --dev-ops
|
||||
.opt_desc
|
||||
p.
|
||||
I am involved in both classic and modern DevOps practices, having current knowledge of
|
||||
dev-ops orchestration and management tools such as #[em Docker], #[em Kubernetes], #[em Terraform].
|
||||
My professional experience includes managing production k8s clusters on #[em AWS] cloud.
|
||||
I have also worked on converting legacy workloads to #[em serverless] paradigm and maintaining them.
|
||||
p.
|
||||
I can set up a working dev-ops environment from scratch, with a working CI/CD pipeline —
|
||||
doing this with both modern container-based tools such as #[em k8s], #[em Helm], #[em Argo],
|
||||
#[em Gitlab/Github CI], or alternatively with legacy tools such as #[em Ansible] and #[em Jenkins].
|
||||
p.
|
||||
My expertise on #[em observability] comprises of managing monitoring and alerting stacks built
|
||||
with #[em prometheus], #[em Grafana], #[em PagerDuty], as well as in-depth observability
|
||||
toolkits such as #[em Graylog], #[em NewRelic], #[em DataDog], #[em Splunk] etc.
|
||||
|
||||
li
|
||||
b.target#backend: a(href="#backend") --back-end
|
||||
.opt_desc
|
||||
@ -83,7 +83,9 @@ section.options
|
||||
DNS, VPN, code hosting, RSS, caldav and more.
|
||||
p.
|
||||
My other interests are the #[em decentralized web] and the #[em indie] community.
|
||||
I closely follow the fediverse and indieweb movements.
|
||||
I closely follow the fediverse and indieweb movement. I also hang around niche cozy
|
||||
communities like tilde club, 512kb club etc.
|
||||
|
||||
p.
|
||||
I am a #[strike huge] mid-level nerd, which might be apparent from the design of this page.
|
||||
My field of work is complementary to my passion. I love to tinker with technology,
|
||||
|
@ -2,11 +2,17 @@ section.seealso
|
||||
h1.target#seealso: a(href="#seealso") see also
|
||||
.desc
|
||||
p
|
||||
a(href="https://bdeshi.space" rel="external").print: b articles
|
||||
a(href=links.blog rel="external").print: b articles
|
||||
|(7),
|
||||
|
|
||||
a(href="https://github.com/bdeshi" rel="external nofollow noopener noreferrer").print: b github
|
||||
a(href=links.github rel="external nofollow noopener noreferrer").print: b github
|
||||
|(4)
|
||||
|
||||
//-
|
||||
|
|
||||
a(href=links.showcase).print: b showcase
|
||||
|(5)
|
||||
|
||||
//-
|
||||
|
|
||||
|
|
||||
|
@ -2,12 +2,12 @@ section.synopsis
|
||||
h1.target#synopsis: a(href="#synopsis") synopsis
|
||||
.desc
|
||||
p #[b #{name}]
|
||||
| --#[a(href="#sysadmin") sys-admin]
|
||||
|
|
||||
span.dimmer [#[em linux], #[em server] ...]
|
||||
| --#[a(href='#devops') dev-ops]
|
||||
|
|
||||
span.dimmer [#[em cloud], #[em k8s], #[em iac] ...]
|
||||
| --#[a(href="#sysadmin") sys-admin]
|
||||
|
|
||||
span.dimmer [#[em linux], #[em server] ...]
|
||||
p #[b #{name}]
|
||||
| --#[a(href='#backend') back-end] --#[a(href="#automation") automation]
|
||||
|
|
||||
|
22
package.json
22
package.json
@ -1,18 +1,22 @@
|
||||
{
|
||||
"name": "manpage-html",
|
||||
"name": "resume-manpage",
|
||||
"version": "0.0.1",
|
||||
"license": "CC-BY-4.0",
|
||||
"source": "index.pug",
|
||||
"dependencies": {
|
||||
"@csstools/normalize.css": "^12.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@csstools/normalize.css": "^12.0.0",
|
||||
"@parcel/packager-raw-url": "2.5.0",
|
||||
"@parcel/transformer-pug": "2.5.0",
|
||||
"@parcel/transformer-sass": "2.5.0",
|
||||
"@parcel/transformer-webmanifest": "2.5.0",
|
||||
"@parcel/packager-raw-url": "^2.5.0",
|
||||
"@parcel/transformer-pug": "^2.5.0",
|
||||
"@parcel/transformer-sass": "^2.5.0",
|
||||
"@parcel/transformer-webmanifest": "^2.5.0",
|
||||
"parcel": "^2.5.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "parcel build index.pug",
|
||||
"serve": "parcel serve index.pug",
|
||||
"watch": "parcel watch index.pug"
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "parcel build",
|
||||
"serve": "parcel serve",
|
||||
"watch": "parcel watch"
|
||||
}
|
||||
}
|
||||
|
41
vars.pug
41
vars.pug
@ -3,30 +3,45 @@
|
||||
last_name = 'Sarkar',
|
||||
name = first_name.toLowerCase() + '_' + last_name.toLowerCase(),
|
||||
site_url = 'https://sammay.sarkar.website',
|
||||
description = 'server magician',
|
||||
description = 'cloud-native magician',
|
||||
meta_description = "a self-taught tech professional with a strong background in system administration and general programming, with proven hands-on experience in modern DevOps, cloud, and IaC work areas."
|
||||
|
||||
var section = 8,
|
||||
section_name = 'System Administration Manuals'
|
||||
var version = require('child_process').execSync('git rev-parse HEAD').toString().trim(),
|
||||
short_version = version.substring(0, 7),
|
||||
date = (new Date()).toDateString()
|
||||
year = (new Date()).getFullYear()
|
||||
|
||||
var contacts = {
|
||||
'location': 'Dhaka, Bangladesh (GMT+6)',
|
||||
'email': '<a href="mailto:sammay@sarkar.website">sammay@sarkar.website</a>',
|
||||
'pubkey': '<a href="assets/pubkey.asc">0x2F66DAA6F22F092C</a>',
|
||||
'linkedin': '<a href="https://www.linkedin.com/in/sammay-sarkar/">sammay-sarkar</a>',
|
||||
'cell': '<span id="phone"><a href="tel:+8801622085295">+8801622085295</a> <span id="phone-hint">/* please consider texting first? */</span></span>'
|
||||
'cell': '<span id="phone"><a href="tel:+8801622085295">+8801622085295</a> <span id="phone-hint">/* please consider texting first? */</span></span>',
|
||||
// 'telegram': '<a href="https://t.me/bdeshi">@bdeshi</a>',
|
||||
// 'irc': 'bdeshi at libera.chat'
|
||||
}
|
||||
|
||||
var links = {
|
||||
'blog': '<a href="https://bdeshi.space">bdeshi.space</a>',
|
||||
'showcase': '<a href="/portfolio">/portfolio</a>',
|
||||
'github': '<a href="https://github.com/bdeshi">github.com/bdeshi</a>',
|
||||
// 'fediverse': '<a href="https://mastodon.social/@bdeshi">@bdeshi</a>',
|
||||
'telegram': '<a href="https://t.me/bdeshi">bdeshi</a>',
|
||||
'irc': 'bdeshi at libera.chat'
|
||||
'blog': 'https://bdeshi.space',
|
||||
'showcase': '/portfolio.pug',
|
||||
'github': 'https://github.com/bdeshi',
|
||||
// 'fediverse': 'https://mastodon.social/@bdeshi',
|
||||
}
|
||||
|
||||
var monospace_font_css = "https://fonts.googleapis.com/css2?"
|
||||
+ "family=IBM+Plex+Mono:ital,wght@0,400;0,700;1,400;1,700"
|
||||
+ "display=swap"
|
||||
var source_link = 'https://git.bdeshi.space/bdeshi/resume-manpage/commit/' + version
|
||||
|
||||
var source_link = (process.env.CI_REPO_URL ?
|
||||
process.env.CI_REPO_URL : process.env.CI_PROJECT_URL ?
|
||||
process.env.CI_PROJECT_URL : process.env.GITHUB_REPOSITORY ?
|
||||
`${process.env.GITHUB_SERVER_URL}/${process.env.GITHUB_REPOSITORY}` :
|
||||
require('child_process').execSync('git remote get-url origin').
|
||||
toString().replace(":", "/").replace("git@", "https://")
|
||||
),
|
||||
version = (process.env.CI_COMMIT_SHA ?
|
||||
process.env.CI_COMMIT_SHA : process.env.GITHUB_SHA ?
|
||||
process.env.GITHUB_SHA :
|
||||
require('child_process').execSync('git rev-parse HEAD').toString()
|
||||
),
|
||||
short_version = version.substring(0, 7),
|
||||
date = (new Date()).toDateString(),
|
||||
year = (new Date()).getFullYear()
|
||||
|
Reference in New Issue
Block a user